List of usage examples for org.springframework.core.io Resource getURI
URI getURI() throws IOException;
From source file:org.springframework.cloud.config.monitor.FileMonitorConfiguration.java
private Set<Path> getFileRepo() { if (this.scmRepositories != null) { String repositoryUri = null; Set<Path> paths = new LinkedHashSet<>(); try {//ww w . j a v a 2 s. c o m for (AbstractScmEnvironmentRepository repository : scmRepositories) { repositoryUri = repository.getUri(); Resource resource = this.resourceLoader.getResource(repositoryUri); if (resource instanceof FileSystemResource) { paths.add(Paths.get(resource.getURI())); } } return paths; } catch (IOException e) { log.error("Cannot resolve URI for path: " + repositoryUri); } } if (this.nativeEnvironmentRepository != null) { Set<Path> paths = new LinkedHashSet<>(); for (String path : this.nativeEnvironmentRepository.getSearchLocations()) { Resource resource = this.resourceLoader.getResource(path); if (resource.exists()) { try { paths.add(Paths.get(resource.getURI())); } catch (Exception e) { log.error("Cannot resolve URI for path: " + path); } } } return paths; } return null; }
From source file:org.springframework.cloud.config.server.support.AbstractScmAccessor.java
private List<String> matchingDirectories(File dir, String value) { List<String> output = new ArrayList<String>(); try {/*from ww w . java 2 s. c o m*/ PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver( this.resourceLoader); String path = new File(dir, value).toURI().toString(); for (Resource resource : resolver.getResources(path)) { if (resource.getFile().isDirectory()) { output.add(resource.getURI().toString()); } } } catch (IOException e) { } return output; }
From source file:org.springframework.cloud.deployer.spi.yarn.YarnAppDeployer.java
private boolean isHdfsResource(Resource resource) { try {//from www.j av a 2s . c o m return resource != null && resource.getURI().getScheme().equals("hdfs"); } catch (IOException e) { return false; } }
From source file:org.springframework.cloud.deployer.spi.yarn.YarnAppDeployer.java
private String getHdfsArtifactPath(Resource resource) { String path = null;//w w w. java2s . c o m try { path = "/" + FilenameUtils.getPath(resource.getURI().getPath()); } catch (IOException e) { } return path; }
From source file:org.springframework.cloud.stream.app.tensorflow.util.ModelExtractor.java
public byte[] getModel(Resource modelResource) { Assert.notNull(modelResource, "Not null model resource is required!"); try (InputStream is = modelResource.getInputStream(); InputStream bi = new BufferedInputStream(is)) { String[] archiveCompressor = detectArchiveAndCompressor(modelResource.getFilename()); String archive = archiveCompressor[0]; String compressor = archiveCompressor[1]; String fragment = modelResource.getURI().getFragment(); if (StringUtils.hasText(compressor)) { try (CompressorInputStream cis = new CompressorStreamFactory() .createCompressorInputStream(compressor, bi)) { if (StringUtils.hasText(archive)) { try (ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(archive, cis)) {//from w w w . j a v a2 s .co m // Compressor with Archive return findInArchiveStream(fragment, ais); } } else { // Compressor only return StreamUtils.copyToByteArray(cis); } } } else if (StringUtils.hasText(archive)) { // Archive only try (ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(archive, bi)) { return findInArchiveStream(fragment, ais); } } else { // No compressor nor Archive return StreamUtils.copyToByteArray(bi); } } catch (Exception e) { throw new IllegalStateException("Failed to extract a model from: " + modelResource.getDescription(), e); } }
From source file:org.springframework.context.expression.ApplicationContextExpressionTests.java
@Test public void resourceInjection() throws IOException { System.setProperty("logfile", "do_not_delete_me.txt"); try (AnnotationConfigApplicationContext ac = new AnnotationConfigApplicationContext( ResourceInjectionBean.class)) { ResourceInjectionBean resourceInjectionBean = ac.getBean(ResourceInjectionBean.class); Resource resource = new ClassPathResource("do_not_delete_me.txt"); assertEquals(resource, resourceInjectionBean.resource); assertEquals(resource.getURL(), resourceInjectionBean.url); assertEquals(resource.getURI(), resourceInjectionBean.uri); assertEquals(resource.getFile(), resourceInjectionBean.file); assertArrayEquals(FileCopyUtils.copyToByteArray(resource.getInputStream()), FileCopyUtils.copyToByteArray(resourceInjectionBean.inputStream)); assertEquals(FileCopyUtils.copyToString(new EncodedResource(resource).getReader()), FileCopyUtils.copyToString(resourceInjectionBean.reader)); } finally {//from w w w.ja va 2 s . c o m System.getProperties().remove("logfile"); } }
From source file:org.springframework.data.hadoop.fs.DistributedCacheFactoryBean.java
private void setEntries(EntryType cp, Collection<Resource> resources) { if (resources == null) { setEntries(null);/*ww w . j a va 2 s .c om*/ } else { Collection<CacheEntry> entries = new ArrayList<CacheEntry>(resources.size()); for (Resource resource : resources) { try { entries.add(new CacheEntry(cp, resource.getURI().toString())); } catch (IOException ex) { throw new IllegalArgumentException("Cannot resolve resource " + resource, ex); } } setEntries(entries); } }
From source file:org.springframework.data.hadoop.fs.HdfsResourceLoader.java
/** * Find all resources in the hdfs file system that match the given location pattern * via the Ant-style PathMatcher.//w ww.jav a2 s.c om * * @param rootDirResource the root directory as Resource * @param subPattern the sub pattern to match (below the root directory) * @return the Set of matching Resource instances * @throws IOException in case of I/O errors */ protected Set<Resource> doFindPathMatchingFileResources(Resource rootDirResource, String subPattern) throws IOException { Path rootDir; try { rootDir = (rootDirResource instanceof HdfsResource ? ((HdfsResource) rootDirResource).getPath() : new Path(rootDirResource.getURI().toString())); } catch (IOException ex) { if (log.isWarnEnabled()) { log.warn("Cannot search for matching files underneath " + rootDirResource + " because it does not correspond to a directory in the file system", ex); } return Collections.emptySet(); } return doFindMatchingFileSystemResources(rootDir, subPattern); }
From source file:org.springframework.integration.xml.transformer.XsltPayloadTransformer.java
/** * Compensate for the fact that a Resource <i>may</i> not be a File or even * addressable through a URI. If it is, we want the created StreamSource to * read other resources relative to the provided one. If it isn't, it loads * from the default path.//from ww w. j a v a 2s . com */ private static StreamSource createStreamSourceOnResource(Resource xslResource) throws IOException { try { String systemId = xslResource.getURI().toString(); return new StreamSource(xslResource.getInputStream(), systemId); } catch (IOException e) { return new StreamSource(xslResource.getInputStream()); } }
From source file:org.springframework.security.ldap.server.ApacheDsSSLContainer.java
protected void importLdifs() throws Exception { // Import any ldif files Resource[] ldifs = ldifResources; // Note that we can't just import using the ServerContext returned // from starting Apache DS, apparently because of the long-running issue // DIRSERVER-169. // We need a standard context. // DirContext dirContext = contextSource.getReadWriteContext(); if (ldifs == null || ldifs.length == 0) { return;// w ww . ja va 2 s . co m } for (Resource resource : ldifs) { String ldifFile; try { ldifFile = resource.getFile().getAbsolutePath(); } catch (IOException e) { ldifFile = resource.getURI().toString(); } logger.info("Loading LDIF file: " + ldifFile); new LdifFileLoader(service.getAdminSession(), new File(ldifFile), null, getClass().getClassLoader()) .execute(); } }