List of usage examples for org.apache.commons.vfs2 FileObject getURL
URL getURL() throws FileSystemException;
From source file:org.onehippo.forge.content.exim.core.util.ContentFileObjectUtils.java
/** * Converts {@code fileObject} to a {@link File} instance and return it if {@code fileObject} is a local file. * @param fileObject {@link FileObject} instance * @return Converts {@code fileObject} to a {@link File} instance and return it if {@code fileObject} is a local file * @throws IOException if {@code fileObject} is not a local file or any IOException occurs *//*from w w w . j a v a2 s . c om*/ public static File toFile(FileObject fileObject) throws IOException { try { return new File(fileObject.getURL().toURI()); } catch (FileSystemException | URISyntaxException e) { throw new IOException(e.toString(), e); } }
From source file:org.onehippo.forge.content.exim.repository.jaxrs.ContentEximExportService.java
private int exportBinaries(Logger procLogger, ProcessStatus processStatus, ExecutionParams params, DefaultBinaryExportTask exportTask, Result result, int batchCount, FileObject baseFolder) throws Exception { final String baseFolderUrlPrefix = baseFolder.getURL().toString() + "/"; final AntPathMatcher pathMatcher = new AntPathMatcher(); for (ResultItem item : result.getItems()) { if (isStopRequested(baseFolder)) { procLogger.info("Stop requested by file at {}/{}", baseFolder.getName().getPath(), STOP_REQUEST_FILE_REL_PATH); break; }/*from w w w .ja v a 2 s. co m*/ ContentMigrationRecord record = null; try { String handlePath = item.getPath(); if (!isBinaryPathIncluded(pathMatcher, params, handlePath)) { continue; } if (!HippoNodeUtils.isBinaryPath(handlePath)) { continue; } if (!exportTask.getDocumentManager().getSession().nodeExists(handlePath)) { continue; } Node handle = exportTask.getDocumentManager().getSession().getNode(handlePath); Node variant = HippoNodeUtils.getFirstVariantNode(handle); if (variant == null) { continue; } String variantPath = variant.getPath(); record = exportTask.beginRecord(variant.getIdentifier(), variantPath); ContentNode contentNode = exportTask.exportBinarySetToContentNode(variant); record.setProcessed(true); ContentNodeUtils.replaceDocbasesByPaths(exportTask.getDocumentManager().getSession(), contentNode, ContentNodeUtils.MIRROR_DOCBASES_XPATH); Set<String> docbasePropNames = params.getDocbasePropNames(); if (CollectionUtils.isNotEmpty(docbasePropNames)) { for (String docbasePropName : docbasePropNames) { ContentNodeUtils.replaceDocbasePropertiesByPaths( exportTask.getDocumentManager().getSession(), contentNode, "properties[@itemName='" + docbasePropName + "']"); } } ContentNodeUtils.removeUrlPrefixInJcrDataValues(contentNode, baseFolderUrlPrefix); applyTagContentProperties(contentNode, params.getBinaryTags()); String relPath = StringUtils .removeStart(ContentPathUtils.removeIndexNotationInNodePath(variantPath), "/"); FileObject file = baseFolder.resolveFile(relPath + ".json"); record.setAttribute("file", file.getName().getPath()); exportTask.writeContentNodeToJsonFile(contentNode, file); procLogger.debug("Exported document from {} to {}.", handlePath, file.getName().getPath()); record.setSucceeded(true); } catch (Exception e) { procLogger.error("Failed to process record: {}", record, e); if (record != null) { record.setErrorMessage(e.toString()); } } finally { if (record != null) { exportTask.endRecord(); result.incrementTotalBinaryCount(); if (record.isSucceeded()) { result.incrementSucceededBinaryCount(); } else { result.incrementFailedBinaryCount(); } if (processStatus != null) { processStatus.setProgress(result.getProgress()); } } ++batchCount; if (batchCount % params.getBatchSize() == 0) { exportTask.getDocumentManager().getSession().refresh(false); if (params.getThrottle() > 0) { Thread.sleep(params.getThrottle()); } } } } exportTask.getDocumentManager().getSession().refresh(false); return batchCount; }
From source file:org.onehippo.forge.content.exim.repository.jaxrs.ContentEximExportService.java
private int exportDocuments(Logger procLogger, ProcessStatus processStatus, ExecutionParams params, WorkflowDocumentVariantExportTask exportTask, Result result, int batchCount, FileObject baseFolder, Set<String> referredBinaryPaths) throws Exception { final String baseFolderUrlPrefix = baseFolder.getURL().toString() + "/"; final AntPathMatcher pathMatcher = new AntPathMatcher(); for (ResultItem item : result.getItems()) { if (isStopRequested(baseFolder)) { procLogger.info("Stop requested by file at {}/{}", baseFolder.getName().getPath(), STOP_REQUEST_FILE_REL_PATH); break; }/* w w w . ja v a 2 s . c o m*/ ContentMigrationRecord record = null; try { String handlePath = item.getPath(); if (!isDocumentPathIncluded(pathMatcher, params, handlePath)) { continue; } if (!HippoNodeUtils.isDocumentPath(handlePath)) { continue; } if (!exportTask.getDocumentManager().getSession().nodeExists(handlePath)) { continue; } Node handle = exportTask.getDocumentManager().getSession().getNode(handlePath); Map<String, Node> variantsMap = HippoNodeUtils.getDocumentVariantsMap(handle); Node variant = variantsMap.get(HippoStdNodeType.PUBLISHED); if (variant == null) { variant = variantsMap.get(HippoStdNodeType.UNPUBLISHED); } if (variant == null) { continue; } String variantPath = variant.getPath(); record = exportTask.beginRecord(variant.getIdentifier(), variantPath); Document document = new Document(variant.getIdentifier()); ContentNode contentNode = exportTask.exportVariantToContentNode(document); record.setProcessed(true); ContentNodeUtils.replaceDocbasesByPaths(exportTask.getDocumentManager().getSession(), contentNode, ContentNodeUtils.MIRROR_DOCBASES_XPATH, referredBinaryPaths); Set<String> docbasePropNames = params.getDocbasePropNames(); if (CollectionUtils.isNotEmpty(docbasePropNames)) { for (String docbasePropName : docbasePropNames) { ContentNodeUtils.replaceDocbasePropertiesByPaths( exportTask.getDocumentManager().getSession(), contentNode, "properties[@itemName='" + docbasePropName + "']"); } } ContentNodeUtils.removeUrlPrefixInJcrDataValues(contentNode, baseFolderUrlPrefix); applyTagContentProperties(contentNode, params.getDocumentTags()); String relPath = StringUtils .removeStart(ContentPathUtils.removeIndexNotationInNodePath(variantPath), "/"); FileObject file = baseFolder.resolveFile(relPath + ".json"); record.setAttribute("file", file.getName().getPath()); exportTask.writeContentNodeToJsonFile(contentNode, file); procLogger.debug("Exported document from {} to {}.", handlePath, file.getName().getPath()); record.setSucceeded(true); } catch (Exception e) { procLogger.error("Failed to process record: {}", record, e); if (record != null) { record.setErrorMessage(e.toString()); } } finally { if (record != null) { exportTask.endRecord(); result.incrementTotalDocumentCount(); if (record.isSucceeded()) { result.incrementSucceededDocumentCount(); } else { result.incrementFailedDocumentCount(); } if (processStatus != null) { processStatus.setProgress(result.getProgress()); } } ++batchCount; if (batchCount % params.getBatchSize() == 0) { exportTask.getDocumentManager().getSession().refresh(false); if (params.getThrottle() > 0) { Thread.sleep(params.getThrottle()); } } } } exportTask.getDocumentManager().getSession().refresh(false); return batchCount; }
From source file:org.onehippo.forge.content.exim.repository.jaxrs.ContentEximImportService.java
private int importBinaries(Logger procLogger, ProcessStatus processStatus, FileObject[] jsonFiles, ExecutionParams params, FileObject baseFolder, DefaultBinaryImportTask importTask, Result result, int batchCount) throws Exception { final String baseFolderUrlPrefix = baseFolder.getURL().toString(); final AntPathMatcher pathMatcher = new AntPathMatcher(); for (FileObject file : jsonFiles) { if (isStopRequested(baseFolder)) { procLogger.info("Stop requested by file at {}/{}", baseFolder.getName().getPath(), STOP_REQUEST_FILE_REL_PATH); break; }/*w w w . ja v a 2s.c o m*/ ContentNode contentNode = importTask.readContentNodeFromJsonFile(file); String primaryTypeName = contentNode.getPrimaryType(); String path = contentNode.getProperty("jcr:path").getValue(); if (!isBinaryPathIncluded(pathMatcher, params, path)) { continue; } if (!HippoNodeUtils.isBinaryPath(path)) { continue; } ContentMigrationRecord record = null; try { ContentNodeUtils.prependUrlPrefixInJcrDataValues(contentNode, BINARY_ATTACHMENT_REL_PATH, baseFolderUrlPrefix); record = importTask.beginRecord("", path); record.setAttribute("file", file.getName().getPath()); record.setProcessed(true); String[] folderPathAndName = ContentPathUtils.splitToFolderPathAndName(path); String folderPath = folderPathAndName[0]; String name = folderPathAndName[1]; String folderPrimaryType; String[] folderTypes; String[] galleryTypes; if (HippoNodeUtils.isGalleryPath(path)) { folderPrimaryType = params.getGalleryFolderPrimaryType(); folderTypes = params.getGalleryFolderFolderTypes(); galleryTypes = params.getGalleryFolderGalleryTypes(); } else { folderPrimaryType = params.getAssetFolderPrimaryType(); folderTypes = params.getAssetFolderFolderTypes(); galleryTypes = params.getAssetFolderGalleryTypes(); } folderPath = importTask.createOrUpdateBinaryFolder(folderPath, folderPrimaryType, folderTypes, galleryTypes); applyTagContentProperties(contentNode, params.getBinaryTags()); String updatedPath = importTask.createOrUpdateBinaryFromContentNode(contentNode, primaryTypeName, folderPath, name); HippoBinaryNodeUtils.extractTextFromBinariesAndSaveHippoTextsUnderHandlePath( importTask.getDocumentManager().getSession(), updatedPath); record.setSucceeded(true); } catch (Exception e) { procLogger.error("Failed to process record: {}", record, e); if (record != null) { record.setErrorMessage(e.toString()); } } finally { if (record != null) { importTask.endRecord(); result.addItem(recordToResultItem(record)); result.incrementTotalBinaryCount(); if (record.isSucceeded()) { result.incrementSucceededBinaryCount(); } else { result.incrementFailedBinaryCount(); } if (processStatus != null) { // the remaining 5% for cleaning paths to convert those to uuids. processStatus.setProgress(0.95 * ((double) batchCount) / ((double) jsonFiles.length)); } } ++batchCount; if (batchCount % params.getBatchSize() == 0) { importTask.getDocumentManager().getSession().save(); importTask.getDocumentManager().getSession().refresh(false); if (params.getThrottle() > 0) { Thread.sleep(params.getThrottle()); } } } } importTask.getDocumentManager().getSession().save(); importTask.getDocumentManager().getSession().refresh(false); return batchCount; }
From source file:org.onehippo.forge.content.exim.repository.jaxrs.ContentEximImportService.java
private int importDocuments(Logger procLogger, ProcessStatus processStatus, FileObject[] jsonFiles, ExecutionParams params, FileObject baseFolder, WorkflowDocumentVariantImportTask importTask, Result result, int batchCount) throws Exception { final String baseFolderUrlPrefix = baseFolder.getURL().toString(); final AntPathMatcher pathMatcher = new AntPathMatcher(); for (FileObject file : jsonFiles) { if (isStopRequested(baseFolder)) { procLogger.info("Stop requested by file at {}/{}", baseFolder.getName().getPath(), STOP_REQUEST_FILE_REL_PATH); break; }//from w w w .j a v a 2 s . com ContentNode contentNode = importTask.readContentNodeFromJsonFile(file); String primaryTypeName = contentNode.getPrimaryType(); String path = contentNode.getProperty("jcr:path").getValue(); if (!isDocumentPathIncluded(pathMatcher, params, path)) { continue; } if (!HippoNodeUtils.isDocumentPath(path)) { continue; } ContentMigrationRecord record = null; try { ContentNodeUtils.prependUrlPrefixInJcrDataValues(contentNode, BINARY_ATTACHMENT_REL_PATH, baseFolderUrlPrefix); record = importTask.beginRecord("", path); record.setAttribute("file", file.getName().getPath()); record.setProcessed(true); String locale = (contentNode.hasProperty("hippotranslation:locale")) ? contentNode.getProperty("hippotranslation:locale").getValue() : null; String localizedName = contentNode.getProperty("jcr:localizedName").getValue(); applyTagContentProperties(contentNode, params.getDocumentTags()); String updatedPath = importTask.createOrUpdateDocumentFromVariantContentNode(contentNode, primaryTypeName, path, locale, localizedName); boolean isToPublish = ExecutionParams.PUBLISH_ON_IMPORT_ALL.equals(params.getPublishOnImport()); if (!isToPublish && ExecutionParams.PUBLISH_ON_IMPORT_LIVE.equals(params.getPublishOnImport())) { isToPublish = ContentNodeUtils.containsStringValueInProperty(contentNode, HippoNodeType.HIPPO_AVAILABILITY, "live"); } if (isToPublish) { importTask.getDocumentManager().depublishDocument(updatedPath); importTask.getDocumentManager().publishDocument(updatedPath); } record.setSucceeded(true); } catch (Exception e) { procLogger.error("Failed to process record: {}", record, e); if (record != null) { record.setErrorMessage(e.toString()); } } finally { if (record != null) { importTask.endRecord(); result.addItem(recordToResultItem(record)); result.incrementTotalDocumentCount(); if (record.isSucceeded()) { result.incrementSucceededDocumentCount(); } else { result.incrementFailedDocumentCount(); } if (processStatus != null) { // the remaining 5% for cleaning paths to convert those to uuids. processStatus.setProgress(0.95 * ((double) batchCount) / ((double) jsonFiles.length)); } } ++batchCount; if (batchCount % params.getBatchSize() == 0) { importTask.getDocumentManager().getSession().save(); importTask.getDocumentManager().getSession().refresh(false); if (params.getThrottle() > 0) { Thread.sleep(params.getThrottle()); } } } } importTask.getDocumentManager().getSession().save(); importTask.getDocumentManager().getSession().refresh(false); return batchCount; }
From source file:org.ow2.proactive.scheduler.smartproxy.JobTrackerImpl.java
/** * Removes from the proxy knowledge all info related with the given job. * This will also delete every folder created by the job in the shared input and output spaces * * @param id jobID/* w ww . ja va 2s . c om*/ */ public void removeAwaitedJob(String id) { AwaitedJob aj = jobDatabase.getAwaitedJob(id); if (aj == null) { logger.warn("Job " + id + " not in the awaited list"); return; } logger.debug("Removing knowledge of job " + id); String pullUrl = aj.getPullURL(); String pushUrl = aj.getPushURL(); FileObject remotePullFolder = null; FileObject remotePushFolder = null; try { remotePullFolder = resolveFile(pullUrl); remotePushFolder = resolveFile(pushUrl); } catch (Exception e) { logger.error("Could not remove data for job " + id, e); return; } if (aj.isIsolateTaskOutputs()) { try { remotePullFolder = remotePullFolder.getParent(); } catch (FileSystemException e) { logger.error("Could not get the parent of folder " + remotePullFolder, e); } } Set<FileObject> foldersToDelete = new HashSet<>(); try { foldersToDelete.add(remotePullFolder.getParent()); if (!remotePullFolder.getParent().equals(remotePushFolder.getParent())) foldersToDelete.add(remotePushFolder.getParent()); } catch (FileSystemException e) { logger.warn("Data in folders " + pullUrl + " and " + pushUrl + " cannot be deleted due to an unexpected error ", e); } String url = "NOT YET DEFINED"; for (FileObject fo : foldersToDelete) { try { url = fo.getURL().toString(); if (!logger.isTraceEnabled()) { logger.debug("Deleting directory " + url); fo.delete(Selectors.SELECT_ALL); fo.delete(); } } catch (FileSystemException e) { logger.warn("Could not delete temporary files at location " + url + " .", e); } } jobDatabase.removeAwaitedJob(id); try { jobDatabase.commit(); } catch (IOException e) { logger.error("Could not save status file after removing job " + id, e); } }
From source file:org.ow2.proactive.scheduler.smartproxy.SmartProxyImpl.java
@Override protected void downloadTaskOutputFiles(AwaitedJob awaitedjob, String jobId, String t_name, String localFolder) throws Exception { AwaitedTask atask = awaitedjob.getAwaitedTask(t_name); if (atask == null) { throw new IllegalArgumentException( "The task " + t_name + " does not belong to job " + jobId + " or has already been removed"); }/*from ww w . ja v a2s . c o m*/ if (atask.isTransferring()) { log.warn("The task " + t_name + " of job " + jobId + " is already transferring its output"); return; } String pull_URL = awaitedjob.getPullURL(); if (awaitedjob.isIsolateTaskOutputs()) { pull_URL = pull_URL.replace(SchedulerConstants.TASKID_DIR_DEFAULT_NAME, SchedulerConstants.TASKID_DIR_DEFAULT_NAME + "/" + atask.getTaskId()); } FileObject remotePullFolderFO; FileObject localfolderFO; try { remotePullFolderFO = jobTracker.resolveFile(pull_URL); localfolderFO = jobTracker.resolveFile(localFolder); } catch (FileSystemException e) { log.error("Could not retrieve data for job " + jobId, e); throw new IllegalStateException("Could not retrieve data for job " + jobId, e); } String sourceUrl = remotePullFolderFO.getURL().toString(); String destUrl = localfolderFO.getURL().toString(); org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector fileSelector = new org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector(); List<OutputSelector> ouputFileSelectors = atask.getOutputSelectors(); for (OutputSelector os : ouputFileSelectors) { org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector fs = os.getOutputFiles(); if (!fs.getIncludes().isEmpty()) { fileSelector.addIncludes(fs.getIncludes()); } if (!fs.getExcludes().isEmpty()) { fileSelector.addExcludes(fs.getExcludes()); } } if (log.isDebugEnabled()) { log.debug("Looking at files in " + sourceUrl + " with " + fileSelector.getIncludes() + "-" + fileSelector.getExcludes()); boolean goon = true; int cpt = 0; FileObject[] fos = null; while (goon) { fos = remotePullFolderFO.findFiles(fileSelector); goon = cpt < 50 && (fos == null || fos.length == 0); cpt++; if (goon) { try { Thread.sleep(100); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } } if (fos != null && fos.length > 0) { for (FileObject fo : fos) { log.debug("Found " + fo.getName()); } } else { log.warn("Couldn't find " + fileSelector.getIncludes() + "-" + fileSelector.getExcludes() + " in " + sourceUrl); } } if (awaitedjob.isAutomaticTransfer()) { DataTransferProcessor dtp = new DataTransferProcessor(remotePullFolderFO, localfolderFO, jobId, t_name, fileSelector); jobTracker.setTaskTransferring(jobId, t_name, true); threadPool.submit((Runnable) dtp); } else { log.debug("Copying files from " + sourceUrl + " to " + destUrl); try { localfolderFO.copyFrom(remotePullFolderFO, fileSelector); } catch (FileSystemException e) { log.error(e); throw e; } finally { jobTracker.setTaskTransferring(jobId, t_name, false); jobTracker.removeAwaitedTask(jobId, t_name); } log.debug("Finished copying files from " + sourceUrl + " to " + destUrl); // ok we can remove the task } }
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
/** * Pushes a file from the local file system into the given DataSpace * //from w w w.j av a 2s .c o m * @param sessionId * a valid session id * @param spaceName * the name of the DataSpace * @param filePath * the path inside the DataSpace where to put the file e.g. * "/myfolder" * @param multipart * the form data containing : - fileName the name of the file * that will be created on the DataSpace - fileContent the * content of the file * @return true if the transfer succeeded * @see org.ow2.proactive.scheduler.common.SchedulerConstants for spaces * names **/ @Override public boolean pushFile(@HeaderParam("sessionid") String sessionId, @PathParam("spaceName") String spaceName, @PathParam("filePath") String filePath, MultipartFormDataInput multipart) throws IOException, NotConnectedRestException, PermissionRestException { checkAccess(sessionId, "pushFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); Map<String, List<InputPart>> formDataMap = multipart.getFormDataMap(); List<InputPart> fNL = formDataMap.get("fileName"); if ((fNL == null) || (fNL.size() == 0)) { throw new IllegalArgumentException("Illegal multipart argument definition (fileName), received " + fNL); } String fileName = fNL.get(0).getBody(String.class, null); List<InputPart> fCL = formDataMap.get("fileContent"); if ((fCL == null) || (fCL.size() == 0)) { throw new IllegalArgumentException( "Illegal multipart argument definition (fileContent), received " + fCL); } InputStream fileContent = fCL.get(0).getBody(InputStream.class, null); if (fileName == null) { throw new IllegalArgumentException("Wrong file name : " + fileName); } filePath = normalizeFilePath(filePath, fileName); FileObject destfo = dataspaceRestApi.resolveFile(session, spaceName, filePath); URL targetUrl = destfo.getURL(); logger.info("[pushFile] pushing file to " + targetUrl); if (!destfo.isWriteable()) { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " is not writable in space " + spaceName); logger.error(ex); throw ex; } if (destfo.exists()) { destfo.delete(); } // used to create the necessary directories if needed destfo.createFile(); dataspaceRestApi.writeFile(fileContent, destfo, null); return true; }
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
/** * Either Pulls a file from the given DataSpace to the local file system or * list the content of a directory if the path refers to a directory In the * case the path to a file is given, the content of this file will be * returns as an input stream In the case the path to a directory is given, * the input stream returned will be a text stream containing at each line * the content of the directory//from ww w. j av a 2 s .c o m * * @param sessionId * a valid session id * @param spaceName * the name of the data space involved (GLOBAL or USER) * @param filePath * the path to the file or directory whose content must be * received **/ @Override public InputStream pullFile(@HeaderParam("sessionid") String sessionId, @PathParam("spaceName") String spaceName, @PathParam("filePath") String filePath) throws IOException, NotConnectedRestException, PermissionRestException { checkAccess(sessionId, "pullFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); filePath = normalizeFilePath(filePath, null); FileObject sourcefo = dataspaceRestApi.resolveFile(session, spaceName, filePath); if (!sourcefo.exists() || !sourcefo.isReadable()) { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " does not exist or is not readable in space " + spaceName); logger.error(ex); throw ex; } if (sourcefo.getType().equals(FileType.FOLDER)) { logger.info("[pullFile] reading directory content from " + sourcefo.getURL()); // if it's a folder we return an InputStream listing its content StringBuilder sb = new StringBuilder(); String nl = System.lineSeparator(); for (FileObject fo : sourcefo.getChildren()) { sb.append(fo.getName().getBaseName() + nl); } return IOUtils.toInputStream(sb.toString()); } else if (sourcefo.getType().equals(FileType.FILE)) { logger.info("[pullFile] reading file content from " + sourcefo.getURL()); return sourcefo.getContent().getInputStream(); } else { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " has an unsupported type " + sourcefo.getType()); logger.error(ex); throw ex; } }
From source file:org.ow2.proactive_grid_cloud_portal.scheduler.SchedulerStateRest.java
/** * Deletes a file or recursively delete a directory from the given DataSpace * /* ww w . j a va 2 s . co m*/ * @param sessionId * a valid session id * @param spaceName * the name of the data space involved (GLOBAL or USER) * @param filePath * the path to the file or directory which must be deleted **/ @Override public boolean deleteFile(@HeaderParam("sessionid") String sessionId, @PathParam("spaceName") String spaceName, @PathParam("filePath") String filePath) throws IOException, NotConnectedRestException, PermissionRestException { checkAccess(sessionId, "deleteFile"); Session session = dataspaceRestApi.checkSessionValidity(sessionId); filePath = normalizeFilePath(filePath, null); FileObject sourcefo = dataspaceRestApi.resolveFile(session, spaceName, filePath); if (!sourcefo.exists() || !sourcefo.isWriteable()) { RuntimeException ex = new IllegalArgumentException( "File or Folder " + filePath + " does not exist or is not writable in space " + spaceName); logger.error(ex); throw ex; } if (sourcefo.getType().equals(FileType.FILE)) { logger.info("[deleteFile] deleting file " + sourcefo.getURL()); sourcefo.delete(); } else if (sourcefo.getType().equals(FileType.FOLDER)) { logger.info("[deleteFile] deleting folder (and all its descendants) " + sourcefo.getURL()); sourcefo.delete(Selectors.SELECT_ALL); } else { RuntimeException ex = new IllegalArgumentException( "File " + filePath + " has an unsupported type " + sourcefo.getType()); logger.error(ex); throw ex; } return true; }