List of usage examples for org.apache.commons.io FileUtils copyFileToDirectory
public static void copyFileToDirectory(File srcFile, File destDir) throws IOException
From source file:org.geoserver.config.GeoServerDataDirectory.java
/** * Copies a file into the global style configuration directory. * <p>//from w ww.j a va 2 s. c om * If the resource directory does exist it will be created * </p> * @deprecated use {@link #copyToStyleDir(File, StyleInfo)} */ public void copyToStyleDir(File f) throws IOException { FileUtils.copyFileToDirectory(f, styleDir(true, (WorkspaceInfo) null)); }
From source file:org.geoserver.config.GeoServerDataDirectory.java
public void copyToStyleDir(File f, StyleInfo s) throws IOException { FileUtils.copyFileToDirectory(f, styleDir(true, s)); }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testHarvestAddTime() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "polyphemus_20130301_test.nc"); File mosaic = new File(TestData.file(this, "."), "nc_harvest1"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }//from w ww . ja v a 2s .co m assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); SimpleFeatureIterator it = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(1, names.length); assertEquals("O3", names[0]); // check we have the two granules we expect GranuleSource source = reader.getGranules("O3", true); FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(); Query q = new Query(Query.ALL); q.setSortBy(new SortBy[] { ff.sort("time", SortOrder.ASCENDING) }); SimpleFeatureCollection granules = source.getGranules(q); assertEquals(2, granules.size()); it = granules.features(); assertTrue(it.hasNext()); SimpleFeature f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); // now add another netcdf and harvest it File nc2 = TestData.file(this, "polyphemus_20130302_test.nc"); FileUtils.copyFileToDirectory(nc2, mosaic); File fileToHarvest = new File(mosaic, "polyphemus_20130302_test.nc"); List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null); assertEquals(1, harvestSummary.size()); HarvestedSource hf = harvestSummary.get(0); assertEquals("polyphemus_20130302_test.nc", ((File) hf.getSource()).getName()); assertTrue(hf.success()); assertEquals(1, reader.getGridCoverageNames().length); // check that we have four times now granules = source.getGranules(q); assertEquals(4, granules.size()); it = granules.features(); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); f = it.next(); assertEquals("polyphemus_20130302_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-02T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130302_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-02T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); } finally { if (it != null) { it.close(); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testReHarvest() throws Exception { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "polyphemus_20130301_test.nc"); File mosaic = new File(TestData.file(this, "."), "nc_harvest4"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }/* w ww.j av a 2s. c om*/ assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); SimpleFeatureIterator it = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(1, names.length); assertEquals("O3", names[0]); // check we have the two granules we expect GranuleSource source = reader.getGranules("O3", true); FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(); Query q = new Query(Query.ALL); q.setSortBy(new SortBy[] { ff.sort("time", SortOrder.ASCENDING) }); SimpleFeatureCollection granules = source.getGranules(q); assertEquals(2, granules.size()); it = granules.features(); assertTrue(it.hasNext()); SimpleFeature f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); // close the reader and re-open it reader.dispose(); reader = format.getReader(mosaic); source = reader.getGranules("O3", true); // wait a bit, we have to make sure the old indexes are recognized as old Thread.sleep(1000); // now replace the netcdf file with a more up to date version of the same File nc2 = TestData.file(this, "polyphemus_20130301_test_more_times.nc"); File target = new File(mosaic, "polyphemus_20130301_test.nc"); FileUtils.copyFile(nc2, target, false); File fileToHarvest = new File(mosaic, "polyphemus_20130301_test.nc"); List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null); assertEquals(1, harvestSummary.size()); HarvestedSource hf = harvestSummary.get(0); assertEquals("polyphemus_20130301_test.nc", ((File) hf.getSource()).getName()); assertTrue(hf.success()); assertEquals(1, reader.getGridCoverageNames().length); // check that we have four times now source = reader.getGranules("O3", true); granules = source.getGranules(q); assertEquals(4, granules.size()); it = granules.features(); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(2, f.getAttribute("imageindex")); assertEquals("2013-03-01T02:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(3, f.getAttribute("imageindex")); assertEquals("2013-03-01T03:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); } finally { if (it != null) { it.close(); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testHarvestHDF5Data() throws IOException { File nc1 = TestData.file(this, "2DLatLonCoverage.nc"); File nc2 = TestData.file(this, "2DLatLonCoverage2.nc"); File mosaic = new File(TestData.file(this, "."), "simpleMosaic"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }/*from w w w. j a v a 2s . co m*/ assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); FileUtils.copyFileToDirectory(nc2, mosaic); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); File xml = TestData.file(this, "hdf5Coverage2D.xml"); FileUtils.copyFileToDirectory(xml, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n"; // + "PropertyCollectors=TimestampFileNameExtractorSPI[timeregex](time)\n"; indexer += Prop.AUXILIARY_FILE + "=" + "hdf5Coverage2D.xml"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); //simply test if the mosaic can be read without exceptions ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); reader.read("L1_V2", null); }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testHarvestAddVariable() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "polyphemus_20130301_test.nc"); File mosaic = new File(TestData.file(this, "."), "nc_harvest2"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }// w ww .j a va 2s .c o m assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); SimpleFeatureIterator it = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(1, names.length); assertEquals("O3", names[0]); // check we have the two granules we expect GranuleSource source = reader.getGranules("O3", true); FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(); Query q = new Query(Query.ALL); q.setSortBy(new SortBy[] { ff.sort("time", SortOrder.ASCENDING) }); SimpleFeatureCollection granules = source.getGranules(q); assertEquals(2, granules.size()); it = granules.features(); assertTrue(it.hasNext()); SimpleFeature f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_test.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); // now add another netcdf and harvest it File nc2 = TestData.file(this, "polyphemus_20130301_NO2.nc"); FileUtils.copyFileToDirectory(nc2, mosaic); File fileToHarvest = new File(mosaic, "polyphemus_20130301_NO2.nc"); List<HarvestedSource> harvestSummary = reader.harvest(null, fileToHarvest, null); assertEquals(1, harvestSummary.size()); HarvestedSource hf = harvestSummary.get(0); assertEquals("polyphemus_20130301_NO2.nc", ((File) hf.getSource()).getName()); assertTrue(hf.success()); // check we have two coverages now names = reader.getGridCoverageNames(); Arrays.sort(names); assertEquals(2, names.length); assertEquals("NO2", names[0]); assertEquals("O3", names[1]); // test the newly ingested granules, which are in a separate coverage q.setTypeName("NO2"); granules = source.getGranules(q); assertEquals(2, granules.size()); it = granules.features(); f = it.next(); assertEquals("polyphemus_20130301_NO2.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-03-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("polyphemus_20130301_NO2.nc", f.getAttribute("location")); assertEquals(1, f.getAttribute("imageindex")); assertEquals("2013-03-01T01:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); } finally { if (it != null) { it.close(); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testHarvest3Gome() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "20130101.METOPA.GOME2.NO2.DUMMY.nc"); File mosaic = new File(TestData.file(this, "."), "nc_harvest"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }//from w w w .j a v a2 s . co m assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); File xml = TestData.file(this, ".DUMMY.GOME2.NO2.PGL/GOME2.NO2.xml"); FileUtils.copyFileToDirectory(xml, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n" + "PropertyCollectors=TimestampFileNameExtractorSPI[timeregex](time)\n"; indexer += Prop.AUXILIARY_FILE + "=" + "GOME2.NO2.xml"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); String timeregex = "regex=[0-9]{8}"; FileUtils.writeStringToFile(new File(mosaic, "timeregex.properties"), timeregex); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); SimpleFeatureIterator it = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(1, names.length); assertEquals("NO2", names[0]); // check we have the two granules we expect GranuleSource source = reader.getGranules("NO2", true); FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(); Query q = new Query(Query.ALL); q.setSortBy(new SortBy[] { ff.sort("time", SortOrder.DESCENDING) }); SimpleFeatureCollection granules = source.getGranules(q); assertEquals(1, granules.size()); it = granules.features(); assertTrue(it.hasNext()); SimpleFeature f = it.next(); assertEquals("20130101.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-01-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); // now add another netcdf and harvest it File nc2 = TestData.file(this, "20130116.METOPA.GOME2.NO2.DUMMY.nc"); FileUtils.copyFileToDirectory(nc2, mosaic); File fileToHarvest = new File(mosaic, "20130116.METOPA.GOME2.NO2.DUMMY.nc"); List<HarvestedSource> harvestSummary = reader.harvest("NO2", fileToHarvest, null); assertEquals(1, harvestSummary.size()); granules = source.getGranules(q); assertEquals(2, granules.size()); HarvestedSource hf = harvestSummary.get(0); assertEquals("20130116.METOPA.GOME2.NO2.DUMMY.nc", ((File) hf.getSource()).getName()); assertTrue(hf.success()); assertEquals(1, reader.getGridCoverageNames().length); File nc3 = TestData.file(this, "20130108.METOPA.GOME2.NO2.DUMMY.nc"); FileUtils.copyFileToDirectory(nc3, mosaic); fileToHarvest = new File(mosaic, "20130108.METOPA.GOME2.NO2.DUMMY.nc"); harvestSummary = reader.harvest("NO2", fileToHarvest, null); assertEquals(1, harvestSummary.size()); hf = harvestSummary.get(0); assertEquals("20130108.METOPA.GOME2.NO2.DUMMY.nc", ((File) hf.getSource()).getName()); assertTrue(hf.success()); assertEquals(1, reader.getGridCoverageNames().length); // check that we have 2 times now granules = source.getGranules(q); assertEquals(3, granules.size()); it = granules.features(); f = it.next(); assertEquals("20130116.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-01-16T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); assertTrue(it.hasNext()); f = it.next(); assertEquals("20130108.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-01-08T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); f = it.next(); assertEquals("20130101.METOPA.GOME2.NO2.DUMMY.nc", f.getAttribute("location")); assertEquals(0, f.getAttribute("imageindex")); assertEquals("2013-01-01T00:00:00.000Z", ConvertersHack.convert(f.getAttribute("time"), String.class)); it.close(); } finally { if (it != null) { it.close(); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testReadCoverageGome() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "20130101.METOPA.GOME2.NO2.DUMMY.nc"); File mosaic = new File(TestData.file(this, "."), "nc_harvest3"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }/*from w ww .j a v a 2 s. co m*/ assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); File xml = TestData.file(this, ".DUMMY.GOME2.NO2.PGL/GOME2.NO2.xml"); FileUtils.copyFileToDirectory(xml, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n" + "PropertyCollectors=TimestampFileNameExtractorSPI[timeregex](time)\n"; indexer += Prop.AUXILIARY_FILE + "=" + "GOME2.NO2.xml"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); String timeregex = "regex=[0-9]{8}"; FileUtils.writeStringToFile(new File(mosaic, "timeregex.properties"), timeregex); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); GridCoverage2D coverage = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(1, names.length); assertEquals("NO2", names[0]); GranuleSource source = reader.getGranules("NO2", true); SimpleFeatureCollection granules = source.getGranules(Query.ALL); assertEquals(1, granules.size()); assertTrue(CRS.equalsIgnoreMetadata(DefaultGeographicCRS.WGS84, reader.getCoordinateReferenceSystem())); GeneralEnvelope envelope = reader.getOriginalEnvelope("NO2"); assertEquals(-360, envelope.getMinimum(0), 0d); assertEquals(360, envelope.getMaximum(0), 0d); assertEquals(-180, envelope.getMinimum(1), 0d); assertEquals(180, envelope.getMaximum(1), 0d); // check we can read a coverage out of it coverage = reader.read(null); reader.dispose(); // Checking we can read again from the coverage once it has been configured. reader = format.getReader(mosaic); coverage = reader.read(null); assertNotNull(coverage); } finally { if (coverage != null) { ImageUtilities.disposePlanarImageChain((PlanarImage) coverage.getRenderedImage()); coverage.dispose(true); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testDeleteCoverageGome() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "O3-NO2.nc"); File mosaic = new File(TestData.file(this, "."), "nc_deleteCoverage"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }//from w w w. j a v a2 s. co m assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); File xml = TestData.file(this, ".O3-NO2/O3-NO2.xml"); FileUtils.copyFileToDirectory(xml, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n"; indexer += Prop.AUXILIARY_FILE + "=" + "O3-NO2.xml"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); GridCoverage2D coverage = null; assertNotNull(reader); try { assertEquals(2, reader.getGridCoverageNames().length); File[] files = mosaic.listFiles(); assertEquals(15, files.length); reader.dispose(); reader = format.getReader(mosaic); reader.delete(false); files = mosaic.listFiles(); assertEquals(2, files.length); } finally { if (coverage != null) { ImageUtilities.disposePlanarImageChain((PlanarImage) coverage.getRenderedImage()); coverage.dispose(true); } reader.dispose(); } }
From source file:org.geotools.coverage.io.netcdf.NetCDFMosaicReaderTest.java
@Test public void testReadCoverageGome2Names() throws IOException { // prepare a "mosaic" with just one NetCDF File nc1 = TestData.file(this, "20130101.METOPA.GOME2.NO2.DUMMY.nc"); File mosaic = new File(TestData.file(this, "."), "nc_gome2"); if (mosaic.exists()) { FileUtils.deleteDirectory(mosaic); }//from w w w . j a va2 s . c om assertTrue(mosaic.mkdirs()); FileUtils.copyFileToDirectory(nc1, mosaic); nc1 = TestData.file(this, "20130101.METOPA.GOME2.BrO.DUMMY.nc"); FileUtils.copyFileToDirectory(nc1, mosaic); File xml = TestData.file(this, "DUMMYGOME2.xml"); FileUtils.copyFileToDirectory(xml, mosaic); // The indexer String indexer = "TimeAttribute=time\n" + "Schema=the_geom:Polygon,location:String,imageindex:Integer,time:java.util.Date\n" + "PropertyCollectors=TimestampFileNameExtractorSPI[timeregex](time)\n"; indexer += Prop.AUXILIARY_FILE + "=" + "DUMMYGOME2.xml"; FileUtils.writeStringToFile(new File(mosaic, "indexer.properties"), indexer); String timeregex = "regex=[0-9]{8}"; FileUtils.writeStringToFile(new File(mosaic, "timeregex.properties"), timeregex); // the datastore.properties file is also mandatory... File dsp = TestData.file(this, "datastore.properties"); FileUtils.copyFileToDirectory(dsp, mosaic); // have the reader harvest it ImageMosaicFormat format = new ImageMosaicFormat(); ImageMosaicReader reader = format.getReader(mosaic); GridCoverage2D coverage = null; assertNotNull(reader); try { String[] names = reader.getGridCoverageNames(); assertEquals(2, names.length); assertEquals("NO2", names[0]); assertEquals("BrO", names[1]); GranuleSource source = reader.getGranules("NO2", true); SimpleFeatureCollection granules = source.getGranules(Query.ALL); assertEquals(1, granules.size()); assertTrue(CRS.equalsIgnoreMetadata(DefaultGeographicCRS.WGS84, reader.getCoordinateReferenceSystem("NO2"))); GeneralEnvelope envelope = reader.getOriginalEnvelope("NO2"); assertEquals(-360, envelope.getMinimum(0), 0d); assertEquals(360, envelope.getMaximum(0), 0d); assertEquals(-180, envelope.getMinimum(1), 0d); assertEquals(180, envelope.getMaximum(1), 0d); // check we can read a coverage out of it coverage = reader.read("NO2", null); reader.dispose(); // Checking we can read again from the coverage (using a different name this time) once it has been configured. reader = format.getReader(mosaic); coverage = reader.read("BrO", null); assertNotNull(coverage); } finally { if (coverage != null) { ImageUtilities.disposePlanarImageChain((PlanarImage) coverage.getRenderedImage()); coverage.dispose(true); } reader.dispose(); } }