List of usage examples for org.apache.commons.io FileUtils cleanDirectory
public static void cleanDirectory(File directory) throws IOException
From source file:com.taobao.android.TPatchTool.java
/** * ?patch//from www.j a va 2s. co m * * @param outPatchDir ?patch * @param createPatchJson * @param outPatchJson * @param createHistoryPatch * @param patchHistoryUrl * @param productName * @return ?patch? */ public File doPatch(File outPatchDir, boolean createPatchJson, File outPatchJson, boolean createHistoryPatch, String patchHistoryUrl, String productName) throws Exception { isTpatch = true; pName = productName; final File diffTxtFile = new File(outPatchDir, "diff.json"); final File patchInfoFile = new File(outPatchDir, "patchInfo.json"); final File patchTmpDir = new File(outPatchDir, "tpatch-tmp"); File mainDiffFolder = new File(patchTmpDir, mainBundleName); // FileUtils.cleanDirectory(outPatchDir); patchTmpDir.mkdirs(); FileUtils.cleanDirectory(patchTmpDir); mainDiffFolder.mkdirs(); File lastPatchFile = null; lastPatchFile = getLastPatchFile(baseApkBO.getVersionName(), productName, outPatchDir); PatchUtils.getTpatchClassDef(lastPatchFile, bundleClassMap); // apk File unzipFolder = unzipApk(outPatchDir); final File newApkUnzipFolder = new File(unzipFolder, NEW_APK_UNZIP_NAME); final File baseApkUnzipFolder = new File(unzipFolder, BASE_APK_UNZIP_NAME); // bundledex diff File mianDiffDestDex = new File(mainDiffFolder, DEX_NAME); File tmpDexFile = new File(patchTmpDir, mainBundleName + "-dex"); createBundleDexPatch(newApkUnzipFolder, baseApkUnzipFolder, mianDiffDestDex, tmpDexFile, true); // ??bundle? if (isRetainMainBundleRes()) { copyMainBundleResources(newApkUnzipFolder, baseApkUnzipFolder, new File(patchTmpDir, mainBundleName)); } ExecutorServicesHelper executorServicesHelper = new ExecutorServicesHelper(); String taskName = "diffBundleTask"; // bundlesoawb? Collection<File> soFiles = FileUtils.listFiles(newApkUnzipFolder, new String[] { "so" }, true); if (splitDiffBundle != null) { for (Pair<BundleBO, BundleBO> bundle : splitDiffBundle) { processBundleFiles(bundle.getSecond().getBundleFile(), bundle.getFirst().getBundleFile(), patchTmpDir); } } for (final File soFile : soFiles) { final String relativePath = PathUtils.toRelative(newApkUnzipFolder, soFile.getAbsolutePath()); if (null != notIncludeFiles && pathMatcher.match(notIncludeFiles, relativePath)) { continue; } executorServicesHelper.submitTask(taskName, new Callable<Boolean>() { @Override public Boolean call() throws Exception { File baseSoFile = new File(baseApkUnzipFolder, relativePath); if (PatchUtils.isBundleFile(soFile)) { // bundle processBundleFiles(soFile, baseSoFile, patchTmpDir); } else { File destFile = new File(patchTmpDir, mainBundleName + "/" + relativePath); if (isFileModify(soFile, baseSoFile)) { FileUtils.copyFile(soFile, destFile); } } return true; } }); } executorServicesHelper.waitTaskCompleted(taskName); executorServicesHelper.stop(); // patchtpatch File patchFile = createTPatchFile(outPatchDir, patchTmpDir); PatchInfo curPatchInfo = createBasePatchInfo(patchFile); BuildPatchInfos buildPatchInfos = new BuildPatchInfos(); // ?tpatch if (createHistoryPatch && patchHistoryUrl != null) { buildPatchInfos = createIncrementPatchFiles(productName, patchFile, outPatchDir, newApkUnzipFolder, curPatchInfo, patchHistoryUrl); } buildPatchInfos.getPatches().add(curPatchInfo); buildPatchInfos.setBaseVersion(baseApkBO.getVersionName()); buildPatchInfos.setDiffBundleDex(diffBundleDex); if (createPatchJson) { FileUtils.writeStringToFile(outPatchJson, JSON.toJSONString(buildPatchInfos)); } // FileUtils.deleteDirectory(patchTmpDir); apkDiff.setBaseApkVersion(baseApkBO.getVersionName()); apkDiff.setNewApkVersion(newApkBO.getVersionName()); apkDiff.setBundleDiffResults(bundleDiffResults); apkDiff.setNewApkMd5(MD5Util.getFileMD5String(newApkBO.getApkFile())); apkDiff.setFileName(newApkBO.getApkName()); apkPatchInfos.setBaseApkVersion(baseApkBO.getVersionName()); apkPatchInfos.setNewApkVersion(newApkBO.getVersionName()); apkPatchInfos.setBundleDiffResults(patchInfos); apkPatchInfos.setFileName(patchFile.getName()); apkPatchInfos.setNewApkMd5(MD5Util.getFileMD5String(patchFile)); FileUtils.writeStringToFile(diffTxtFile, JSON.toJSONString(apkDiff)); FileUtils.writeStringToFile(patchInfoFile, JSON.toJSONString(apkPatchInfos)); FileUtils.copyFileToDirectory(diffTxtFile, outPatchDir.getParentFile(), true); FileUtils.copyFileToDirectory(newApkBO.getApkFile(), outPatchDir.getParentFile(), true); // FileUtils.deleteDirectory(unzipFolder); return patchFile; }
From source file:com.edgenius.wiki.search.service.IndexServiceImpl.java
public void cleanIndexes(final IndexRebuildListener listener) { //code move to AdvanceAdminAction.rebuild(), may move back until I find solution for lazy loading for each rebuild*() pageTemplate.closeIndex();//w w w . j ava 2 s.c o m commentTemplate.closeIndex(); spaceTemplate.closeIndex(); userTemplate.closeIndex(); roleTemplate.closeIndex(); pageTagTemplate.closeIndex(); spaceTagTemplate.closeIndex(); attachmentTemplate.closeIndex(); widgetTemplate.closeIndex(); //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Clean all sub directories(first level) under index root, but not delete directory itself. try { File[] list = indexRoot.getFile().listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.isDirectory() ? true : false; } }); for (File file : list) { try { FileUtils.cleanDirectory(file); } catch (IOException e) { log.error("Unable to clean index root directory:" + indexRoot.getFilename(), e); } } } catch (IOException e1) { log.error("Unable to list index root directory", e1); } pageTemplate.createEmptyIndex(); commentTemplate.createEmptyIndex(); spaceTemplate.createEmptyIndex(); userTemplate.createEmptyIndex(); roleTemplate.createEmptyIndex(); pageTagTemplate.createEmptyIndex(); spaceTagTemplate.createEmptyIndex(); attachmentTemplate.createEmptyIndex(); widgetTemplate.createEmptyIndex(); }
From source file:edu.duke.cabig.c3pr.webservice.integration.C3PREmbeddedTomcatTestBase.java
/** * @throws IOException/*from www. ja va2s .c om*/ */ private void cleanup() { try { restoreKeystoreFileIfNeeded(); stopContainer(); System.out.println("Cleaning up after ourselves: " + catalinaHome.getAbsolutePath() + " and " + tmpDir.getAbsolutePath()); FileUtils.cleanDirectory(catalinaHome); FileUtils.cleanDirectory(tmpDir); } catch (IOException e) { logger.severe(ExceptionUtils.getFullStackTrace(e)); } }
From source file:com.edgenius.core.repository.SimpleRepositoryServiceImpl.java
public FileNode removeFile(ITicket ticket, String nodeUuid, String version) throws RepositoryException { if (!ticket.isAllowWrite()) { String error = "Workspace has not write permission " + ticket.getSpacename() + " for node " + nodeUuid; log.warn(error);//from ww w . j a v a 2 s . com throw new RepositoryException("Permission denied: " + error); } log.info("File will removed from " + ticket.getSpacename() + ". NodeUUID: " + nodeUuid + ". Version: " + version); //get this node base, so that we can know NodeType and IdentifierUuid CrFileNode filenode = crFileNodeDAO.getBaseByNodeUuid(nodeUuid); try { acquireLock(ticket.getSpacename(), filenode.getIdentifierUuid(), nodeUuid); CrWorkspace crW = getCrWorkspace(ticket); if (NumberUtils.toInt(version) == 0) { //remove history together crFileNodeDAO.removeByNodeUuid(nodeUuid); File fileDir = new File(FileUtil.getFullPath(homeDir, crW.getSpaceUuid(), filenode.getNodeType(), filenode.getIdentifierUuid(), filenode.getNodeUuid())); FileUtils.cleanDirectory(fileDir); if (fileDir.delete()) log.info("All history " + filenode.getNodeUuid() + " is removed"); else log.info("Node " + filenode.getNodeUuid() + "physcial file can not be removed"); } else { //only remove special version if (!crFileNodeDAO.removeVersion(nodeUuid, NumberUtils.toInt(version))) { log.warn("No version " + version + " exist in Database"); } String verDir = FileUtil.getFullPath(homeDir, crW.getSpaceUuid(), filenode.getNodeType(), filenode.getIdentifierUuid(), filenode.getNodeUuid(), Integer.valueOf(filenode.getVersion()).toString()); File ver = new File(verDir); FileUtils.cleanDirectory(ver); if (ver.delete()) log.info("Version " + version + " is removed"); else log.warn("Version " + version + " physcial file can not be removed"); } log.info("Remove is done"); return FileNode.copyPersistToNode(filenode); } catch (Exception e) { log.error("Remove file node failed ", e); throw new RepositoryException("Remove file node failed " + e); } finally { releaseLock(ticket.getSpacename(), filenode.getIdentifierUuid(), nodeUuid); } }
From source file:com.universal.storage.UniversalS3Storage.java
/** * This method cleans the context of this storage. This method doesn't remove any file from the storage. * The method will clean the tmp folder to release disk usage. *///from w w w. j av a 2 s. c o m public void clean() throws UniversalIOException { try { FileUtils.cleanDirectory(new File(this.settings.getTmp())); } catch (Exception e) { throw new UniversalIOException(e.getMessage()); } }
From source file:com.o2d.pkayjava.editor.proxy.ResolutionManager.java
public void resizeImagesTmpDirToResolution(String packName, File sourceFolder, ResolutionEntryVO resolution, File targetFolder) {// ww w . j a v a2 s. c om ProjectManager projectManager = facade.retrieveProxy(ProjectManager.NAME); float ratio = ResolutionManager.getResolutionRatio(resolution, projectManager.getCurrentProjectInfoVO().originalResolution); if (targetFolder.exists()) { try { FileUtils.cleanDirectory(targetFolder); } catch (IOException e) { e.printStackTrace(); } } // now pack TexturePacker.Settings settings = new TexturePacker.Settings(); settings.flattenPaths = true; settings.maxHeight = getMinSquareNum(resolution.height); settings.maxWidth = getMinSquareNum(resolution.height); settings.filterMag = Texture.TextureFilter.Linear; settings.filterMin = Texture.TextureFilter.Linear; TexturePacker tp = new TexturePacker(settings); for (final File fileEntry : sourceFolder.listFiles()) { if (!fileEntry.isDirectory()) { BufferedImage bufferedImage = ResolutionManager.imageResize(fileEntry, ratio); tp.addImage(bufferedImage, FilenameUtils.removeExtension(fileEntry.getName())); } } tp.pack(targetFolder, packName); }
From source file:com.ah.be.cloudauth.HmCloudAuthCertMgmtImpl.java
private String generateCert(String filePath) throws HmCloudAuthException { try {//from w w w.j av a 2 s. co m if (StringUtils.isBlank(this.certificationContent)) { throw new HmCloudAuthException("generateCert", UpdateCAStatus.HTTPS_CERT_CONTENT_ERR); } File file = new File(filePath); if (file.getParentFile().exists()) { FileUtils.cleanDirectory(file.getParentFile()); } FileUtils.writeStringToFile(file, this.certificationContent); } catch (IOException e) { throw new HmCloudAuthException("generateCert", UpdateCAStatus.HTTPS_CERT_FILE_IO_ERR, e); } return null; }
From source file:com.alibaba.jstorm.daemon.nimbus.ServiceHandler.java
/** * prepare to uploading topology jar, return the file location */// w w w.j a va 2 s .co m @Override public String beginFileUpload() throws TException { String fileLoc = null; try { String path; String key = UUID.randomUUID().toString(); path = StormConfig.masterInbox(conf) + "/" + key; FileUtils.forceMkdir(new File(path)); FileUtils.cleanDirectory(new File(path)); fileLoc = path + "/stormjar-" + key + ".jar"; data.getUploaders().put(fileLoc, Channels.newChannel(new FileOutputStream(fileLoc))); LOG.info("Begin upload file from client to " + fileLoc); return path; } catch (FileNotFoundException e) { LOG.error("File not found: " + fileLoc, e); throw new TException(e); } catch (IOException e) { LOG.error("Upload file error: " + fileLoc, e); throw new TException(e); } }
From source file:edu.isi.karma.research.modeling.ModelLearner_LOD.java
public static void main(String[] args) throws Exception { ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault(); contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config"); OntologyManager ontologyManager = new OntologyManager(contextParameters.getId()); File ff = new File(Params.ONTOLOGY_DIR); File[] files = ff.listFiles(); if (files == null) { logger.error("no ontology to import at " + ff.getAbsolutePath()); return;// ww w. j ava2 s. com } for (File f : files) { if (f.getName().endsWith(".owl") || f.getName().endsWith(".rdf") || f.getName().endsWith(".n3") || f.getName().endsWith(".ttl") || f.getName().endsWith(".xml")) { logger.info("Loading ontology file: " + f.getAbsolutePath()); ontologyManager.doImport(f, "UTF-8"); } } ontologyManager.updateCache(); String outputPath = Params.OUTPUT_DIR; String graphPath = Params.GRAPHS_DIR; FileUtils.cleanDirectory(new File(graphPath)); List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR, Params.MODEL_MAIN_FILE_EXT); ModelLearner_LOD modelLearner = null; boolean onlyGenerateSemanticTypeStatistics = false; boolean onlyUseOntology = false; boolean useCorrectType = false; int numberOfCandidates = 4; boolean onlyEvaluateInternalLinks = false; int maxPatternSize = 3; if (onlyGenerateSemanticTypeStatistics) { getStatistics(semanticModels); return; } String filePath = Params.RESULTS_DIR + "temp/"; String filename = ""; filename += "lod-results"; filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates; filename += onlyUseOntology ? "-ontology" : "-p" + maxPatternSize; filename += onlyEvaluateInternalLinks ? "-internal" : "-all"; filename += ".csv"; PrintWriter resultFile = new PrintWriter(new File(filePath + filename)); resultFile.println("source \t p \t r \t t \n"); for (int i = 0; i < semanticModels.size(); i++) { // for (int i = 0; i <= 10; i++) { // int i = 1; { int newSourceIndex = i; SemanticModel newSource = semanticModels.get(newSourceIndex); logger.info("======================================================"); logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); logger.info("======================================================"); SemanticModel correctModel = newSource; List<ColumnNode> columnNodes = correctModel.getColumnNodes(); List<Node> steinerNodes = new LinkedList<Node>(columnNodes); String graphName = graphPath + "lod" + Params.GRAPH_FILE_EXT; if (onlyUseOntology) { modelLearner = new ModelLearner_LOD(new GraphBuilder(ontologyManager, false), steinerNodes); } else if (new File(graphName).exists()) { // read graph from file try { logger.info("loading the graph ..."); DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName); modelLearner = new ModelLearner_LOD(new GraphBuilderTopK(ontologyManager, graph), steinerNodes); } catch (Exception e) { e.printStackTrace(); resultFile.close(); return; } } else { logger.info("building the graph ..."); // create and save the graph to file // GraphBuilder_Popularity b = new GraphBuilder_Popularity(ontologyManager, // Params.LOD_OBJECT_PROPERIES_FILE, // Params.LOD_DATA_PROPERIES_FILE); GraphBuilder_LOD_Pattern b = new GraphBuilder_LOD_Pattern(ontologyManager, Params.PATTERNS_DIR, maxPatternSize); modelLearner = new ModelLearner_LOD(b.getGraphBuilder(), steinerNodes); } long start = System.currentTimeMillis(); List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType, numberOfCandidates); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; List<SortableSemanticModel> topHypotheses = null; if (hypothesisList != null) { // for (SortableSemanticModel sss : hypothesisList) { // ModelEvaluation mmm = sss.evaluate(correctModel); // System.out.println(mmm.getPrecision() + ", " + mmm.getRecall()); // } topHypotheses = hypothesisList.size() > 10 ? hypothesisList.subList(0, 10) : hypothesisList; } Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>(); ModelEvaluation me; models.put("1-correct model", correctModel); if (topHypotheses != null) for (int k = 0; k < topHypotheses.size(); k++) { SortableSemanticModel m = topHypotheses.get(k); me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false); String label = "candidate " + k + "\n" + // (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) + "link coherence:" + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n"; label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? "" : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n"; label += "confidence:" + m.getConfidenceScore() + "\n"; label += m.getSteinerNodes() == null ? "" : "mapping score:" + m.getSteinerNodes().getScore() + "\n"; label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" + // "-distance:" + me.getDistance() + "-precision:" + me.getPrecision() + "-recall:" + me.getRecall(); models.put(label, m); if (k == 0) { // first rank model System.out.println("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); logger.info("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); String s = newSource.getName() + "\t" + me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec; resultFile.println(s); } } String outName = outputPath + newSource.getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT; GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName, GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true); } resultFile.close(); }
From source file:edu.ku.brc.specify.tools.FormDisplayer.java
/** * // ww w. j av a2 s. c o m */ protected boolean setup() { String pathStr = AppContextMgr.getInstance().getClassObject(Discipline.class) != null ? AppContextMgr.getInstance().getClassObject(Discipline.class).getType() : ""; //$NON-NLS-1$ pathStr += "_" + UIHelper.getOSType().toString() + "_" + (doAll ? "all" : "user");//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ baseDir = new File(getUserHomeDir() + File.separator + "FormImages"); //$NON-NLS-1$ outputDir = new File(baseDir.getAbsoluteFile() + File.separator + pathStr); if (!baseDir.exists()) { if (!baseDir.mkdir()) { UIRegistry.showError( String.format(getResourceString("FormDisplayer.OUTPUT_ERROR"), baseDir.getAbsoluteFile())); } } if (!outputDir.exists()) { if (!outputDir.mkdir()) { UIRegistry.showError(String.format(getResourceString("FormDisplayer.OUTPUT_ERROR"), outputDir.getAbsoluteFile())); } } else { try { FileUtils.cleanDirectory(outputDir); } catch (Exception ex) { edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(FormDisplayer.class, ex); ex.printStackTrace(); } } String dstDirPath = UIRegistry.getDefaultUserHomeDir() + File.separator + "Specify/site"; try { File tmplateFile = checkForTemplateFiles(dstDirPath); mapTemplate = FileUtils.readFileToString(tmplateFile); mapTemplate = StringUtils.replace(mapTemplate, "Database Schema", //$NON-NLS-1$ getResourceString("FormDisplayer.FORMS")); //$NON-NLS-1$ } catch (IOException ex) { ex.printStackTrace(); JOptionPane.showMessageDialog(null, "You are missing the template that is needed to run this tool."); return false; } if (StringUtils.isEmpty(mapTemplate)) { System.err.println("The template file is empty!"); //$NON-NLS-1$ } try { File srcDir = new File(dstDirPath); //$NON-NLS-1$ for (File f : srcDir.listFiles()) { if (!f.getName().startsWith(".")) //$NON-NLS-1$ { File dst = new File(outputDir.getAbsolutePath() + File.separator + f.getName()); //$NON-NLS-1$ if (!FilenameUtils.getExtension(f.getName()).toLowerCase().equals("html")) //$NON-NLS-1$ { FileUtils.copyFile(f, dst); } dst = new File(baseDir.getAbsolutePath() + File.separator + f.getName()); //$NON-NLS-1$ if (!FilenameUtils.getExtension(f.getName()).toLowerCase().equals("html")) //$NON-NLS-1$ { FileUtils.copyFile(f, dst); } } } } catch (Exception ex) { ex.printStackTrace(); edu.ku.brc.af.core.UsageTracker.incrHandledUsageCount(); edu.ku.brc.exceptions.ExceptionTracker.getInstance().capture(FormDisplayer.class, ex); ex.printStackTrace(); return false; } return true; }