Example usage for org.springframework.core.io Resource getFile

List of usage examples for org.springframework.core.io Resource getFile

Introduction

In this page you can find the example usage for org.springframework.core.io Resource getFile.

Prototype

File getFile() throws IOException;

Source Link

Document

Return a File handle for this resource.

Usage

From source file:com.epam.catgenome.manager.GffManagerTest.java

@Test
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void testGetNextFeature() throws IOException, InterruptedException, FeatureIndexException,
        NoSuchAlgorithmException, GeneReadingException {
    Resource resource = context.getResource(GENES_SORTED_GTF_PATH);

    FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
    request.setReferenceId(referenceId);
    request.setPath(resource.getFile().getAbsolutePath());

    GeneFile geneFile = gffManager.registerGeneFile(request);
    Assert.assertNotNull(geneFile);//  www. j a  v  a 2  s  .co m
    Assert.assertNotNull(geneFile.getId());

    Track<Gene> track = new Track<>();
    track.setId(geneFile.getId());
    track.setStartIndex(1);
    track.setEndIndex(TEST_END_INDEX);
    track.setChromosome(testChromosome);
    track.setScaleFactor(FULL_QUERY_SCALE_FACTOR);

    Track<Gene> featureList = gffManager.loadGenes(track, false);
    Assert.assertNotNull(featureList);
    Assert.assertFalse(featureList.getBlocks().isEmpty());

    List<Gene> exons = new ArrayList<>();
    for (Gene gene : featureList.getBlocks()) {
        if (gene.getItems() != null) {
            for (Gene mRna : gene.getItems()) {
                if (mRna.getItems() != null) {
                    exons.addAll(mRna.getItems().stream().filter(s -> "exon".equals(s.getFeature()))
                            .collect(Collectors.toList()));
                }
            }
        }
    }

    Assert.assertFalse(exons.isEmpty());

    Collections.sort(exons, (o1, o2) -> o1.getStartIndex().compareTo(o2.getStartIndex()));
    int middle = exons.size() / 2;
    Gene firstExon = exons.get(middle);
    Gene secondExon = exons.get(middle + 1);

    double time1 = Utils.getSystemTimeMilliseconds();
    Gene loadedNextExon = gffManager.getNextOrPreviousFeature(firstExon.getEndIndex(), geneFile.getId(),
            testChromosome.getId(), true);
    double time2 = Utils.getSystemTimeMilliseconds();
    logger.info("next feature took {} ms", time2 - time1);
    Assert.assertNotNull(loadedNextExon);
    Assert.assertEquals(secondExon.getStartIndex(), loadedNextExon.getStartIndex());
    Assert.assertEquals(secondExon.getEndIndex(), loadedNextExon.getEndIndex());

    time1 = Utils.getSystemTimeMilliseconds();
    Gene loadPrevExon = gffManager.getNextOrPreviousFeature(secondExon.getStartIndex(), geneFile.getId(),
            testChromosome.getId(), false);
    time2 = Utils.getSystemTimeMilliseconds();
    logger.info("prev feature took {} ms", time2 - time1);

    Assert.assertNotNull(loadPrevExon);
    Assert.assertEquals(firstExon.getStartIndex(), loadPrevExon.getStartIndex());
    Assert.assertEquals(firstExon.getEndIndex(), loadPrevExon.getEndIndex());
}

From source file:com.epam.catgenome.manager.GffManagerTest.java

private boolean testRegister(String path) throws IOException, InterruptedException, FeatureIndexException,
        NoSuchAlgorithmException, HistogramReadingException, GeneReadingException {
    Resource resource = context.getResource(path);

    FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
    request.setReferenceId(referenceId);
    request.setPath(resource.getFile().getAbsolutePath());

    GeneFile geneFile = gffManager.registerGeneFile(request);
    Assert.assertNotNull(geneFile);//from   w  ww  .  j a v a2 s  . co m
    Assert.assertNotNull(geneFile.getId());

    Track<Wig> histogram = new Track<>();
    histogram.setId(geneFile.getId());
    histogram.setChromosome(testChromosome);
    histogram.setScaleFactor(1.0);

    gffManager.loadHistogram(histogram);
    Assert.assertFalse(histogram.getBlocks().isEmpty());

    Track<Gene> track = new Track<>();
    track.setId(geneFile.getId());
    track.setStartIndex(1);
    track.setEndIndex(TEST_END_INDEX);
    track.setChromosome(testChromosome);
    track.setScaleFactor(FULL_QUERY_SCALE_FACTOR);

    double time1 = Utils.getSystemTimeMilliseconds();
    Track<Gene> featureList = gffManager.loadGenes(track, false);
    double time2 = Utils.getSystemTimeMilliseconds();
    logger.info("genes loading : {} ms", time2 - time1);
    Assert.assertNotNull(featureList);
    Assert.assertFalse(featureList.getBlocks().isEmpty());
    logger.info("{} genes", featureList.getBlocks().size());

    featureList
            .getBlocks().stream().filter(
                    g -> g.getItems() != null)
            .forEach(
                    g -> Assert
                            .assertTrue(g.getItems().stream().filter(GeneUtils::isTranscript)
                                    .allMatch(i -> i.getExonsCount() != null && i.getAminoacidLength() != null
                                            && i.getExonsCount() == i.getItems().stream()
                                                    .filter(ii -> "exon".equalsIgnoreCase(ii.getFeature()))
                                                    .count())));

    Track<Gene> track2 = new Track<>();
    track2.setId(geneFile.getId());
    track2.setStartIndex(1);
    track2.setEndIndex(TEST_END_INDEX);
    track2.setChromosome(testChromosome);
    track2.setScaleFactor(SMALL_SCALE_FACTOR);

    time1 = Utils.getSystemTimeMilliseconds();
    Track<Gene> featureListLargeScale = gffManager.loadGenes(track2, false);
    time2 = Utils.getSystemTimeMilliseconds();
    logger.info("genes loading large scale: {} ms", time2 - time1);

    Assert.assertEquals(featureListLargeScale.getBlocks().size(),
            featureList.getBlocks().stream().filter(Gene::isMapped).count());
    Assert.assertFalse(featureListLargeScale.getBlocks().isEmpty());
    Assert.assertTrue(featureListLargeScale.getBlocks().stream().allMatch(g -> g.getItems() == null));

    // unregister:
    gffManager.unregisterGeneFile(geneFile.getId());

    boolean failed = false;
    try {
        geneFileManager.loadGeneFile(geneFile.getId());
    } catch (IllegalArgumentException e) {
        failed = true;
    }
    Assert.assertTrue(failed);
    List<BiologicalDataItem> dataItems = biologicalDataItemDao.loadBiologicalDataItemsByIds(
            Arrays.asList(geneFile.getBioDataItemId(), geneFile.getIndex().getId()));
    Assert.assertTrue(dataItems.isEmpty());

    File dir = new File(baseDirPath + "/42/genes/" + geneFile.getId());
    Assert.assertFalse(dir.exists());

    return true;
}

From source file:com.epam.catgenome.manager.GffManagerTest.java

@Test
@Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = Exception.class)
public void testRegisterGffFail()
        throws IOException, FeatureIndexException, InterruptedException, NoSuchAlgorithmException {
    Resource resource = context.getResource("classpath:templates/Felis_catus.Felis_catus_6.2.81.gtf");

    FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
    request.setReferenceId(referenceId);
    request.setPath(resource.getFile().getAbsolutePath());

    boolean failed = true;
    try {//from  w  w  w. j  a v  a2 s.c om
        gffManager.registerGeneFile(request);
    } catch (TribbleException.MalformedFeatureFile e) {
        failed = false;
    }

    Assert.assertFalse("Not failed on unsorted file", failed);

    /*Resource fakeIndex = context.getResource("classpath:templates/fake_gtf_index.tbi");
    request.setIndexPath(fakeIndex.getFile().getAbsolutePath());
            
    failed = true;
    try {
    gffManager.registerGeneFile(request);
    } catch (Exception e) {
    failed = false;
    }
            
    Assert.assertFalse("Not failed on unsorted file", failed);*/
}

From source file:com.epam.catgenome.manager.GffManagerTest.java

@Test
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void testUnmappedGenes() throws InterruptedException, NoSuchAlgorithmException, FeatureIndexException,
        IOException, GeneReadingException, HistogramReadingException {
    Chromosome testChr1 = EntityHelper.createNewChromosome();
    testChr1.setName("chr1");
    testChr1.setSize(TEST_CHROMOSOME_SIZE);
    Reference testRef = EntityHelper.createNewReference(testChr1, referenceGenomeManager.createReferenceId());

    referenceGenomeManager.register(testRef);
    Long testRefId = testReference.getId();

    Resource resource = context.getResource("classpath:templates/mrna.sorted.chunk.gtf");

    FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
    request.setReferenceId(testRefId);// w w w  . ja  va2  s  . c om
    request.setPath(resource.getFile().getAbsolutePath());

    GeneFile geneFile = gffManager.registerGeneFile(request);
    Assert.assertNotNull(geneFile);
    Assert.assertNotNull(geneFile.getId());

    Track<Gene> track = new Track<>();
    track.setId(geneFile.getId());
    track.setStartIndex(1);
    track.setEndIndex(TEST_UNMAPPED_END_INDEX);
    track.setChromosome(testChr1);
    track.setScaleFactor(FULL_QUERY_SCALE_FACTOR);

    double time1 = Utils.getSystemTimeMilliseconds();
    Track<Gene> featureList = gffManager.loadGenes(track, false);
    double time2 = Utils.getSystemTimeMilliseconds();
    logger.info("genes loading : {} ms", time2 - time1);
    Assert.assertNotNull(featureList);
    Assert.assertFalse(featureList.getBlocks().isEmpty());
    Assert.assertTrue(featureList.getBlocks().stream().allMatch(g -> !g.isMapped()));

    Track<Gene> smallScaleFactorTrack = new Track<>();
    smallScaleFactorTrack.setId(geneFile.getId());
    smallScaleFactorTrack.setStartIndex(1);
    smallScaleFactorTrack.setEndIndex(TEST_UNMAPPED_END_INDEX);
    smallScaleFactorTrack.setChromosome(testChr1);
    smallScaleFactorTrack.setScaleFactor(SMALL_SCALE_FACTOR);

    time1 = Utils.getSystemTimeMilliseconds();
    featureList = gffManager.loadGenes(smallScaleFactorTrack, false);
    time2 = Utils.getSystemTimeMilliseconds();
    logger.info("genes large scale loading : {} ms", time2 - time1);
    Assert.assertNotNull(featureList);
    Assert.assertFalse(featureList.getBlocks().isEmpty());
    Assert.assertTrue(featureList.getBlocks().stream().allMatch(g -> !g.isMapped()));
    int groupedGenesCount = featureList.getBlocks().stream()
            .collect(Collectors.summingInt(Gene::getFeatureCount));
    logger.debug("{} features total", groupedGenesCount);
    Assert.assertEquals(TEST_FEATURE_COUNT, groupedGenesCount);

    Track<Wig> histogram = new Track<>();
    histogram.setId(geneFile.getId());
    histogram.setChromosome(testChr1);
    histogram.setScaleFactor(1.0);

    gffManager.loadHistogram(histogram);
    Assert.assertFalse(histogram.getBlocks().isEmpty());
}

From source file:de.ingrid.admin.Config.java

@SuppressWarnings("rawtypes")
public void writePlugdescriptionToProperties(PlugdescriptionCommandObject pd) {
    try {/*ww w .  j a v a2  s.  c  o m*/

        Resource override = getOverrideConfigResource();
        InputStream is = new FileInputStream(override.getFile().getAbsolutePath());
        Properties props = new Properties() {
            private static final long serialVersionUID = 6956076060462348684L;

            @Override
            public synchronized Enumeration<Object> keys() {
                return Collections.enumeration(new TreeSet<Object>(super.keySet()));
            }
        };
        props.load(is);

        for (Iterator<Object> it = pd.keySet().iterator(); it.hasNext();) {
            String key = (String) it.next();

            // do not write properties from plug description we do not want
            if (IGNORE_LIST.contains(key))
                continue;

            Object valObj = pd.get(key);
            if (valObj instanceof String) {
                props.setProperty("plugdescription." + key, (String) valObj);
            } else if (valObj instanceof List) {
                props.setProperty("plugdescription." + key, convertListToString((List) valObj));
            } else if (valObj instanceof Integer) {
                if ("IPLUG_ADMIN_GUI_PORT".equals(key)) {
                    props.setProperty("jetty.port", String.valueOf(valObj));
                } else {
                    props.setProperty("plugdescription." + key, String.valueOf(valObj));
                }
            } else if (valObj instanceof File) {
                props.setProperty("plugdescription." + key, ((File) valObj).getPath());
            } else {
                if (valObj != null) {
                    props.setProperty("plugdescription." + key, valObj.toString());
                } else {
                    log.warn("value of plugdescription field was NULL: " + key);
                }
            }
        }

        // always write working dir as relative path if it was set as such
        String workDir = pd.getRealWorkingDir();
        if (workDir == null) {
            workDir = pd.getWorkinDirectory() == null ? "." : pd.getWorkinDirectory().getPath();
        }
        props.setProperty("plugdescription.workingDirectory", workDir);

        props.setProperty("plugdescription.queryExtensions",
                convertQueryExtensionsToString(this.queryExtensions));

        props.setProperty("index.searchInTypes", StringUtils.join(this.indexSearchInTypes, ','));

        setDatatypes(props);

        IConfig externalConfig = JettyStarter.getInstance().getExternalConfig();
        if (externalConfig != null) {
            externalConfig.setPropertiesFromPlugdescription(props, pd);
            externalConfig.addPlugdescriptionValues(pd);
        }

        // ---------------------------
        is.close();
        try (OutputStream os = new FileOutputStream(override.getFile().getAbsolutePath())) {
            if (log.isDebugEnabled()) {
                log.debug("writing configuration to: " + override.getFile().getAbsolutePath());
            }
            props.store(os, "Override configuration written by the application");
        }
    } catch (Exception e) {
        log.error("Error writing properties:", e);
    }
}

From source file:com.epam.catgenome.manager.GffManagerTest.java

@Test
@Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = Exception.class)
public void testLoadGenesTranscript() throws IOException, InterruptedException, FeatureIndexException,
        NoSuchAlgorithmException, ExternalDbUnavailableException {
    MockitoAnnotations.initMocks(this);
    String fetchRes1 = readFile("ensembl_id_ENSG00000177663.json");
    String fetchRes2 = readFile("uniprot_id_ENST00000319363.xml");
    String fetchRes3 = readFile("uniprot_id_ENST00000319363.xml");
    Mockito.when(httpDataManager.fetchData(Mockito.any(), Mockito.any(ParameterNameValue[].class)))
            .thenReturn(fetchRes1).thenReturn(fetchRes2).thenReturn(fetchRes3);

    Chromosome otherChromosome = EntityHelper.createNewChromosome("22");
    otherChromosome.setSize(TEST_CHROMOSOME_SIZE);
    Reference otherReference = EntityHelper.createNewReference(otherChromosome,
            referenceGenomeManager.createReferenceId());

    referenceGenomeManager.register(otherReference);
    Long otherReferenceId = otherReference.getId();

    Resource resource = context.getResource("classpath:templates/Homo_sapiens.GRCh38.83.sorted.chr21-22.gtf");

    FeatureIndexedFileRegistrationRequest request = new FeatureIndexedFileRegistrationRequest();
    request.setReferenceId(otherReferenceId);
    request.setPath(resource.getFile().getAbsolutePath());

    GeneFile geneFile = gffManager.registerGeneFile(request);
    Assert.assertNotNull(geneFile);//from ww  w  .j  av  a 2 s.  c o  m
    Assert.assertNotNull(geneFile.getId());

    Track<Gene> track = new Track<>();
    track.setId(geneFile.getId());
    track.setStartIndex(START_INDEX_ASTRA);
    track.setEndIndex(END_INDEX_ASTRA);
    track.setChromosome(otherChromosome);
    track.setScaleFactor(FULL_QUERY_SCALE_FACTOR);
    try {
        Track<GeneTranscript> featureList = gffManager.loadGenesTranscript(track, null, null);
        Assert.assertNotNull(featureList);
        Assert.assertFalse(featureList.getBlocks().isEmpty());
        Gene testGene = featureList.getBlocks().get(0);
        Assert.assertNotNull(testGene);
        Assert.assertFalse(testGene.getTranscripts().isEmpty());
        Transcript testTranscript = testGene.getTranscripts().get(1);

        Assert.assertTrue(testTranscript.getBioType().equals(PROTEIN_CODING));

        Assert.assertFalse(testTranscript.getDomain().isEmpty());
        Assert.assertFalse(testTranscript.getExon().isEmpty());
        Assert.assertFalse(testTranscript.getSecondaryStructure().isEmpty());
        Assert.assertFalse(testTranscript.getPdb().isEmpty());
    } catch (GeneReadingException e) {
        logger.info("database unavailable");
    }
}

From source file:org.opennms.ng.dao.support.PropertiesGraphDao.java

private void scanIncludeDirectory(PrefabGraphTypeDao type) throws IOException {
    Resource includeDirectoryResource = type.getIncludeDirectoryResource();

    if (includeDirectoryResource != null) {
        File includeDirectory = includeDirectoryResource.getFile();
        // Include all the files in the directory, knowing that the
        // format is slightly different (no report name required in
        // each property name, and report.id is expected)
        FilenameFilter propertyFilesFilter = new FilenameFilter() {
            @Override/*  w  ww  .  ja  v  a 2 s . co  m*/
            public boolean accept(File dir, String name) {
                return (name.endsWith(".properties"));
            }
        };
        File[] propertyFiles = includeDirectory.listFiles(propertyFilesFilter);

        for (File file : propertyFiles) {
            loadIncludedFile(type, file);
        }
    }
    type.setLastIncludeScan(System.currentTimeMillis());
}

From source file:architecture.ee.web.spring.controller.SecureWebMgmtDataController.java

/**
 * Template/*from   w w  w. jav  a2 s. co m*/
 */

@RequestMapping(value = "/mgmt/template/list.json", method = { RequestMethod.POST, RequestMethod.GET })
@ResponseBody
public List<FileInfo> getTemplateList(
        @RequestParam(value = "siteId", defaultValue = "0", required = false) Long siteId,
        @RequestParam(value = "path", defaultValue = "", required = false) String path,
        @RequestParam(value = "customized", defaultValue = "false", required = false) boolean customized,
        NativeWebRequest request) throws NotFoundException {
    WebSite webSite;
    if (siteId > 0)
        webSite = webSiteManager.getWebSiteById(siteId);
    else
        webSite = WebSiteUtils.getWebSite(request.getNativeRequest(HttpServletRequest.class));

    boolean customizedToUse = customized && isTemplateCustomizedEnabled();

    Resource root = resourceLoader.getResource(getTemplateSrouceLocation(customizedToUse));
    List<FileInfo> list = new ArrayList<FileInfo>();
    try {
        File file = root.getFile();
        if (StringUtils.isEmpty(path)) {
            for (File f : file.listFiles()) {
                list.add(new FileInfo(file, f, customizedToUse));
            }
        } else {
            File targetFile = resourceLoader.getResource(getTemplateSrouceLocation(customized) + path)
                    .getFile();
            for (File f : targetFile.listFiles()) {
                list.add(new FileInfo(file, f, customizedToUse));
            }
        }
    } catch (IOException e) {
        log.error(e);
    }
    return list;
}

From source file:org.red5.server.stream.NoSyncServerStream.java

/** {@inheritDoc} */
public void saveAs(String name, boolean isAppend)
        throws IOException, ResourceNotFoundException, ResourceExistException {
    try {// www.j av  a2 s.c  om
        IScope scope = getScope();
        IStreamFilenameGenerator generator = (IStreamFilenameGenerator) ScopeUtils.getScopeService(scope,
                IStreamFilenameGenerator.class, DefaultStreamFilenameGenerator.class);

        String filename = generator.generateFilename(scope, name, ".flv", GenerationType.RECORD);
        Resource res = scope.getContext().getResource(filename);
        if (!isAppend) {
            if (res.exists()) {
                // Per livedoc of FCS/FMS:
                // When "live" or "record" is used,
                // any previously recorded stream with the same stream
                // URI is deleted.
                if (!res.getFile().delete())
                    throw new IOException("file could not be deleted");
            }
        } else {
            if (!res.exists()) {
                // Per livedoc of FCS/FMS:
                // If a recorded stream at the same URI does not already
                // exist,
                // "append" creates the stream as though "record" was
                // passed.
                isAppend = false;
            }
        }

        if (!res.exists()) {
            // Make sure the destination directory exists
            try {
                String path = res.getFile().getAbsolutePath();
                int slashPos = path.lastIndexOf(File.separator);
                if (slashPos != -1) {
                    path = path.substring(0, slashPos);
                }
                File tmp = new File(path);
                if (!tmp.isDirectory()) {
                    tmp.mkdirs();
                }
            } catch (IOException err) {
                log.error("Could not create destination directory.", err);
            }
            res = scope.getResource(filename);
        }

        if (!res.exists()) {
            if (!res.getFile().canWrite()) {
                log.warn("File cannot be written to " + res.getFile().getCanonicalPath());
            }
            res.getFile().createNewFile();
        }
        FileConsumer fc = new FileConsumer(scope, res.getFile());
        Map<String, Object> paramMap = new HashMap<String, Object>();
        if (isAppend) {
            paramMap.put("mode", "append");
        } else {
            paramMap.put("mode", "record");
        }
        if (null == recordPipe) {
            recordPipe = new InMemoryPushPushPipe();
        }
        recordPipe.subscribe(fc, paramMap);
        recordingFilename = filename;
    } catch (IOException e) {
        log.warn("Save as exception", e);
    }
}

From source file:it.doqui.index.ecmengine.business.personalization.hibernate.RoutingLocalSessionFactoryBean.java

protected SessionFactory buildSessionFactory() throws Exception {
    logger.debug("[RoutingLocalSessionFactoryBean::buildSessionFactory] BEGIN");
    SessionFactory sf = null;/*from w  w w.j a v  a  2s  .co m*/

    // Create Configuration instance.
    Configuration config = newConfiguration();

    DataSource currentDataSource = getCurrentDataSource();
    logger.debug("[RoutingLocalSessionFactoryBean::buildSessionFactory] " + "Repository '"
            + RepositoryManager.getCurrentRepository() + "' -- Got currentDataSource: " + currentDataSource);

    if (currentDataSource == null) {
        throw new IllegalStateException("Null DataSource!");
    }

    // Make given DataSource available for SessionFactory configuration.
    logger.debug("[RoutingLocalSessionFactoryBean::buildSessionFactory] " + "Thread '"
            + Thread.currentThread().getName() + "' -- Setting DataSource for current thread: "
            + currentDataSource);
    CONFIG_TIME_DS_HOLDER.set(currentDataSource);

    if (this.jtaTransactionManager != null) {
        // Make Spring-provided JTA TransactionManager available.
        CONFIG_TIME_TM_HOLDER.set(this.jtaTransactionManager);
    }

    if (this.lobHandler != null) {
        // Make given LobHandler available for SessionFactory configuration.
        // Do early because because mapping resource might refer to custom types.
        CONFIG_TIME_LOB_HANDLER_HOLDER.set(this.lobHandler);
    }

    try {
        // Set connection release mode "on_close" as default.
        // This was the case for Hibernate 3.0; Hibernate 3.1 changed
        // it to "auto" (i.e. "after_statement" or "after_transaction").
        // However, for Spring's resource management (in particular for
        // HibernateTransactionManager), "on_close" is the better default.
        config.setProperty(Environment.RELEASE_CONNECTIONS, ConnectionReleaseMode.ON_CLOSE.toString());

        if (!isExposeTransactionAwareSessionFactory()) {
            // Not exposing a SessionFactory proxy with transaction-aware
            // getCurrentSession() method -> set Hibernate 3.1 CurrentSessionContext
            // implementation instead, providing the Spring-managed Session that way.
            // Can be overridden by a custom value for corresponding Hibernate property.
            config.setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS,
                    "org.springframework.orm.hibernate3.SpringSessionContext");
        }

        if (this.entityInterceptor != null) {
            // Set given entity interceptor at SessionFactory level.
            config.setInterceptor(this.entityInterceptor);
        }

        if (this.namingStrategy != null) {
            // Pass given naming strategy to Hibernate Configuration.
            config.setNamingStrategy(this.namingStrategy);
        }

        if (this.typeDefinitions != null) {
            // Register specified Hibernate type definitions.
            Mappings mappings = config.createMappings();
            for (int i = 0; i < this.typeDefinitions.length; i++) {
                TypeDefinitionBean typeDef = this.typeDefinitions[i];
                mappings.addTypeDef(typeDef.getTypeName(), typeDef.getTypeClass(), typeDef.getParameters());
            }
        }

        if (this.filterDefinitions != null) {
            // Register specified Hibernate FilterDefinitions.
            for (int i = 0; i < this.filterDefinitions.length; i++) {
                config.addFilterDefinition(this.filterDefinitions[i]);
            }
        }

        if (this.configLocations != null) {
            for (int i = 0; i < this.configLocations.length; i++) {
                // Load Hibernate configuration from given location.
                config.configure(this.configLocations[i].getURL());
            }
        }

        if (this.hibernateProperties != null) {
            // Add given Hibernate properties to Configuration.
            config.addProperties(this.hibernateProperties);
        }

        if (currentDataSource != null) {
            boolean actuallyTransactionAware = (this.useTransactionAwareDataSource
                    || currentDataSource instanceof TransactionAwareDataSourceProxy);
            // Set Spring-provided DataSource as Hibernate ConnectionProvider.
            config.setProperty(Environment.CONNECTION_PROVIDER,
                    actuallyTransactionAware ? TransactionAwareDataSourceConnectionProvider.class.getName()
                            : RoutingLocalDataSourceConnectionProvider.class.getName());
        }

        if (this.jtaTransactionManager != null) {
            // Set Spring-provided JTA TransactionManager as Hibernate property.
            config.setProperty(Environment.TRANSACTION_MANAGER_STRATEGY,
                    LocalTransactionManagerLookup.class.getName());
        }

        if (this.mappingLocations != null) {
            // Register given Hibernate mapping definitions, contained in resource files.
            for (int i = 0; i < this.mappingLocations.length; i++) {
                config.addInputStream(this.mappingLocations[i].getInputStream());
            }
        }

        if (this.cacheableMappingLocations != null) {
            // Register given cacheable Hibernate mapping definitions, read from the file system.
            for (int i = 0; i < this.cacheableMappingLocations.length; i++) {
                config.addCacheableFile(this.cacheableMappingLocations[i].getFile());
            }
        }

        if (this.mappingJarLocations != null) {
            // Register given Hibernate mapping definitions, contained in jar files.
            for (int i = 0; i < this.mappingJarLocations.length; i++) {
                Resource resource = this.mappingJarLocations[i];
                config.addJar(resource.getFile());
            }
        }

        if (this.mappingDirectoryLocations != null) {
            // Register all Hibernate mapping definitions in the given directories.
            for (int i = 0; i < this.mappingDirectoryLocations.length; i++) {
                File file = this.mappingDirectoryLocations[i].getFile();
                if (!file.isDirectory()) {
                    throw new IllegalArgumentException("Mapping directory location ["
                            + this.mappingDirectoryLocations[i] + "] does not denote a directory");
                }
                config.addDirectory(file);
            }
        }

        if (this.entityCacheStrategies != null) {
            // Register cache strategies for mapped entities.
            for (Enumeration<?> classNames = this.entityCacheStrategies.propertyNames(); classNames
                    .hasMoreElements(); /* */) {
                String className = (String) classNames.nextElement();
                String[] strategyAndRegion = StringUtils
                        .commaDelimitedListToStringArray(this.entityCacheStrategies.getProperty(className));
                if (strategyAndRegion.length > 1) {
                    config.setCacheConcurrencyStrategy(className, strategyAndRegion[0], strategyAndRegion[1]);
                } else if (strategyAndRegion.length > 0) {
                    config.setCacheConcurrencyStrategy(className, strategyAndRegion[0]);
                }
            }
        }

        if (this.collectionCacheStrategies != null) {
            // Register cache strategies for mapped collections.
            for (Enumeration<?> collRoles = this.collectionCacheStrategies.propertyNames(); collRoles
                    .hasMoreElements(); /* */) {
                String collRole = (String) collRoles.nextElement();
                String[] strategyAndRegion = StringUtils
                        .commaDelimitedListToStringArray(this.collectionCacheStrategies.getProperty(collRole));
                if (strategyAndRegion.length > 1) {
                    config.setCollectionCacheConcurrencyStrategy(collRole, strategyAndRegion[0],
                            strategyAndRegion[1]);
                } else if (strategyAndRegion.length > 0) {
                    config.setCollectionCacheConcurrencyStrategy(collRole, strategyAndRegion[0]);
                }
            }
        }

        if (this.eventListeners != null) {
            // Register specified Hibernate event listeners.
            for (Map.Entry<?, ?> entry : this.eventListeners.entrySet()) {
                Assert.isTrue(entry.getKey() instanceof String,
                        "Event listener key needs to be of type String");
                String listenerType = (String) entry.getKey();
                Object listenerObject = entry.getValue();

                if (listenerObject instanceof Collection) {
                    Collection<?> listeners = (Collection<?>) listenerObject;
                    EventListeners listenerRegistry = config.getEventListeners();
                    Object[] listenerArray = (Object[]) Array
                            .newInstance(listenerRegistry.getListenerClassFor(listenerType), listeners.size());
                    listenerArray = listeners.toArray(listenerArray);
                    config.setListeners(listenerType, listenerArray);
                } else {
                    config.setListener(listenerType, listenerObject);
                }
            }
        }

        // Perform custom post-processing in subclasses.
        postProcessConfiguration(config);

        // Build SessionFactory instance.
        logger.debug(
                "[RoutingLocalSessionFactoryBean::buildSessionFactory] Building new Hibernate SessionFactory.");
        this.configuration = config;

        SessionFactoryProxy sessionFactoryProxy = new SessionFactoryProxy(
                repositoryManager.getDefaultRepository().getId());
        for (Repository repository : repositoryManager.getRepositories()) {
            logger.debug("[RoutingLocalSessionFactoryBean::buildSessionFactory] " + "Repository '"
                    + repository.getId() + "' -- Building SessionFactory...");

            RepositoryManager.setCurrentRepository(repository.getId());
            sessionFactoryProxy.addSessionFactory(repository.getId(), newSessionFactory(config));
        }
        RepositoryManager.setCurrentRepository(repositoryManager.getDefaultRepository().getId());
        sf = sessionFactoryProxy;
    } finally {
        if (currentDataSource != null) {
            // Reset DataSource holder.
            CONFIG_TIME_DS_HOLDER.set(null);
        }

        if (this.jtaTransactionManager != null) {
            // Reset TransactionManager holder.
            CONFIG_TIME_TM_HOLDER.set(null);
        }

        if (this.lobHandler != null) {
            // Reset LobHandler holder.
            CONFIG_TIME_LOB_HANDLER_HOLDER.set(null);
        }
    }

    // Execute schema update if requested.
    if (this.schemaUpdate) {
        updateDatabaseSchema();
    }

    return sf;
}