List of usage examples for org.apache.commons.vfs2 FileObject resolveFile
FileObject resolveFile(String path) throws FileSystemException;
From source file:org.onehippo.forge.content.exim.repository.jaxrs.ContentEximExportService.java
private int exportDocuments(Logger procLogger, ProcessStatus processStatus, ExecutionParams params, WorkflowDocumentVariantExportTask exportTask, Result result, int batchCount, FileObject baseFolder, Set<String> referredBinaryPaths) throws Exception { final String baseFolderUrlPrefix = baseFolder.getURL().toString() + "/"; final AntPathMatcher pathMatcher = new AntPathMatcher(); for (ResultItem item : result.getItems()) { if (isStopRequested(baseFolder)) { procLogger.info("Stop requested by file at {}/{}", baseFolder.getName().getPath(), STOP_REQUEST_FILE_REL_PATH); break; }/* w w w. ja va 2s. c o m*/ ContentMigrationRecord record = null; try { String handlePath = item.getPath(); if (!isDocumentPathIncluded(pathMatcher, params, handlePath)) { continue; } if (!HippoNodeUtils.isDocumentPath(handlePath)) { continue; } if (!exportTask.getDocumentManager().getSession().nodeExists(handlePath)) { continue; } Node handle = exportTask.getDocumentManager().getSession().getNode(handlePath); Map<String, Node> variantsMap = HippoNodeUtils.getDocumentVariantsMap(handle); Node variant = variantsMap.get(HippoStdNodeType.PUBLISHED); if (variant == null) { variant = variantsMap.get(HippoStdNodeType.UNPUBLISHED); } if (variant == null) { continue; } String variantPath = variant.getPath(); record = exportTask.beginRecord(variant.getIdentifier(), variantPath); Document document = new Document(variant.getIdentifier()); ContentNode contentNode = exportTask.exportVariantToContentNode(document); record.setProcessed(true); ContentNodeUtils.replaceDocbasesByPaths(exportTask.getDocumentManager().getSession(), contentNode, ContentNodeUtils.MIRROR_DOCBASES_XPATH, referredBinaryPaths); Set<String> docbasePropNames = params.getDocbasePropNames(); if (CollectionUtils.isNotEmpty(docbasePropNames)) { for (String docbasePropName : docbasePropNames) { ContentNodeUtils.replaceDocbasePropertiesByPaths( exportTask.getDocumentManager().getSession(), contentNode, "properties[@itemName='" + docbasePropName + "']"); } } ContentNodeUtils.removeUrlPrefixInJcrDataValues(contentNode, baseFolderUrlPrefix); applyTagContentProperties(contentNode, params.getDocumentTags()); String relPath = StringUtils .removeStart(ContentPathUtils.removeIndexNotationInNodePath(variantPath), "/"); FileObject file = baseFolder.resolveFile(relPath + ".json"); record.setAttribute("file", file.getName().getPath()); exportTask.writeContentNodeToJsonFile(contentNode, file); procLogger.debug("Exported document from {} to {}.", handlePath, file.getName().getPath()); record.setSucceeded(true); } catch (Exception e) { procLogger.error("Failed to process record: {}", record, e); if (record != null) { record.setErrorMessage(e.toString()); } } finally { if (record != null) { exportTask.endRecord(); result.incrementTotalDocumentCount(); if (record.isSucceeded()) { result.incrementSucceededDocumentCount(); } else { result.incrementFailedDocumentCount(); } if (processStatus != null) { processStatus.setProgress(result.getProgress()); } } ++batchCount; if (batchCount % params.getBatchSize() == 0) { exportTask.getDocumentManager().getSession().refresh(false); if (params.getThrottle() > 0) { Thread.sleep(params.getThrottle()); } } } } exportTask.getDocumentManager().getSession().refresh(false); return batchCount; }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrapTest.java
@Test public void resolveHadoopConfigurationsDirectory() throws Exception { final FileObject ramRoot = VFS.getManager().resolveFile("ram://"); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_HADOOP_CONFIGURATIONS_PATH, "hadoop-configs-go-here"); HadoopConfigurationBootstrap b = spy(new HadoopConfigurationBootstrap()); doReturn(ramRoot).when(b).locatePluginDirectory(); doReturn(prop).when(b).getPluginProperties(); FileObject hadoopConfigsDir = b.resolveHadoopConfigurationsDirectory(); assertNotNull(hadoopConfigsDir);/*from w w w .ja va 2 s . c o m*/ assertEquals(ramRoot.resolveFile("hadoop-configs-go-here").getURL(), hadoopConfigsDir.getURL()); }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrapTest.java
@Test public void getHadoopConfigurationProvider() throws Exception { final FileObject ramRoot = VFS.getManager().resolveFile("ram://"); String CONFIGS_PATH = "hadoop-configs-go-here"; ramRoot.resolveFile(CONFIGS_PATH).createFolder(); HadoopConfiguration c = new HadoopConfiguration(ramRoot, "test", "test", new MockHadoopShim()); HadoopConfigurationProvider provider = new MockHadoopConfigurationProvider(Arrays.asList(c), "test"); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_HADOOP_CONFIGURATIONS_PATH, CONFIGS_PATH); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_ACTIVE_HADOOP_CONFIGURATION, "test"); HadoopConfigurationBootstrap boot = spy(new HadoopConfigurationBootstrap()); boot.setPrompter(mock(HadoopConfigurationPrompter.class)); doReturn(ramRoot).when(boot).locatePluginDirectory(); doReturn(prop).when(boot).getPluginProperties(); doReturn(prop).when(boot).getMergedPmrAndPluginProperties(); doReturn(provider).when(boot).initializeHadoopConfigurationProvider(any(FileObject.class)); assertEquals(provider, boot.getProvider()); }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrapTest.java
@Test public void getHadoopConfigurationProvider_active_invalid() throws Exception { final FileObject ramRoot = VFS.getManager().resolveFile("ram://"); final String CONFIGS_PATH = "hadoop-configs-go-here"; ramRoot.resolveFile(CONFIGS_PATH).createFolder(); HadoopConfiguration c = new HadoopConfiguration(ramRoot, "test", "test", new MockHadoopShim()); HadoopConfigurationProvider provider = new MockHadoopConfigurationProvider(Arrays.asList(c), "invalid"); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_HADOOP_CONFIGURATIONS_PATH, CONFIGS_PATH); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_ACTIVE_HADOOP_CONFIGURATION, "invalid"); HadoopConfigurationBootstrap b = spy(new HadoopConfigurationBootstrap()); doReturn(ramRoot).when(b).locatePluginDirectory(); doReturn(prop).when(b).getPluginProperties(); doReturn(prop).when(b).getMergedPmrAndPluginProperties(); doReturn(provider).when(b).initializeHadoopConfigurationProvider(any(FileObject.class)); try {//from w ww .j a v a 2s . c o m b.getProvider(); fail("Expected exception"); } catch (ConfigurationException ex) { assertEquals("Invalid active Hadoop configuration: \"invalid\".", ex.getMessage()); } }
From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrapTest.java
@Test public void getHadoopConfigurationProvider_getActiveException() throws Exception { final FileObject ramRoot = VFS.getManager().resolveFile("ram://"); final String CONFIGS_PATH = "hadoop-configs-go-here"; ramRoot.resolveFile(CONFIGS_PATH).createFolder(); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_HADOOP_CONFIGURATIONS_PATH, CONFIGS_PATH); prop.setProperty(HadoopConfigurationBootstrap.PROPERTY_ACTIVE_HADOOP_CONFIGURATION, "test"); HadoopConfiguration c = new HadoopConfiguration(ramRoot, "test", "test", new MockHadoopShim()); try {/*w w w . ja va 2 s .c o m*/ HadoopConfigurationProvider provider = new MockHadoopConfigurationProvider(Arrays.asList(c), "test") { public HadoopConfiguration getActiveConfiguration() throws ConfigurationException { throw new NullPointerException(); }; }; HadoopConfigurationBootstrap boot = spy(new HadoopConfigurationBootstrap()); doReturn(ramRoot).when(boot).locatePluginDirectory(); doReturn(prop).when(boot).getPluginProperties(); doReturn(prop).when(boot).getMergedPmrAndPluginProperties(); doReturn(provider).when(boot).initializeHadoopConfigurationProvider(any(FileObject.class)); boot.getProvider(); fail("Expected exception"); } catch (ConfigurationException ex) { assertEquals("Invalid active Hadoop configuration: \"test\".", ex.getMessage()); } }
From source file:org.pentaho.di.trans.steps.pentahoreporting.urlrepository.FileObjectContentLocation.java
/** * Returns the content entity with the given name. If the entity does not exist, an Exception will be raised. * * @param name the name of the entity to be retrieved. * @return the content entity for this name, never null. * @throws ContentIOException if an repository error occured. *///from w w w.j a va2 s . com public ContentEntity getEntry(final String name) throws ContentIOException { try { if (RepositoryUtilities.isInvalidPathName(name)) { throw new IllegalArgumentException("The name given is not valid."); } final FileObject file = getBackend(); final FileObject child = file.resolveFile(name); if (child.exists() == false) { throw new ContentIOException("Not found:" + child); } if (child.isFolder()) { return new FileObjectContentLocation(this, child); } else if (child.isFile()) { return new FileObjectContentItem(this, child); } else { throw new ContentIOException("Not File nor directory."); } } catch (FileSystemException e) { throw new RuntimeException(e); } }
From source file:org.pentaho.di.trans.steps.pentahoreporting.urlrepository.FileObjectContentLocation.java
/** * Creates a new data item in the current location. This method must never return null. This method will fail if an * entity with the same name exists in this location. * * @param name the name of the new entity. * @return the newly created entity, never null. * @throws ContentCreationException if the item could not be created. *//*from w ww .j a v a 2 s . c o m*/ public ContentItem createItem(final String name) throws ContentCreationException { if (RepositoryUtilities.isInvalidPathName(name)) { throw new IllegalArgumentException("The name given is not valid."); } try { final FileObject file = getBackend(); final FileObject child = file.resolveFile(name); if (child.exists()) { if (child.getContent().getSize() == 0) { // probably one of the temp files created by the pentaho-system return new FileObjectContentItem(this, child); } throw new ContentCreationException("File already exists: " + child); } try { child.createFile(); return new FileObjectContentItem(this, child); } catch (IOException e) { throw new ContentCreationException("IOError while create", e); } } catch (FileSystemException e) { throw new RuntimeException(e); } }
From source file:org.pentaho.di.trans.steps.pentahoreporting.urlrepository.FileObjectContentLocation.java
/** * Creates a new content location in the current location. This method must never return null. This method will fail * if an entity with the same name exists in this location. * * @param name the name of the new entity. * @return the newly created entity, never null. * @throws ContentCreationException if the item could not be created. *//* w w w.j ava 2 s .c om*/ public ContentLocation createLocation(final String name) throws ContentCreationException { if (RepositoryUtilities.isInvalidPathName(name)) { throw new IllegalArgumentException("The name given is not valid."); } try { final FileObject file = getBackend(); final FileObject child = file.resolveFile(name); if (child.exists()) { throw new ContentCreationException("File already exists."); } child.createFile(); try { return new FileObjectContentLocation(this, child); } catch (ContentIOException e) { throw new ContentCreationException("Failed to create the content-location", e); } } catch (FileSystemException e) { throw new RuntimeException(e); } }
From source file:org.pentaho.di.trans.steps.pentahoreporting.urlrepository.FileObjectContentLocation.java
/** * Checks, whether an content entity with the given name exists in this content location. This method will report * invalid filenames as non-existent.// w ww . java2s.com * * @param name the name of the new entity. * @return true, if an entity exists with this name, false otherwise. */ public boolean exists(final String name) { if (RepositoryUtilities.isInvalidPathName(name)) { return false; } try { final FileObject file = getBackend(); final FileObject child = file.resolveFile(name); return child.exists(); } catch (FileSystemException e) { throw new RuntimeException(e); } }
From source file:org.pentaho.hadoop.shim.common.DistributedCacheTestUtil.java
static FileObject createTestFolderWithContent(String rootFolderName) throws Exception { String rootName = "bin/test/" + rootFolderName; FileObject root = KettleVFS.getFileObject(rootName); root.resolveFile("jar1.jar").createFile(); root.resolveFile("jar2.jar").createFile(); root.resolveFile("folder").resolveFile("file.txt").createFile(); root.resolveFile("pentaho-mapreduce-libraries.zip").createFile(); createTestHadoopConfiguration(rootName); return root;/*from w w w. j a v a2s. co m*/ }