List<DsgFeatureCollection> featureCollectionList = pfDataset.getPointFeatureCollectionList(); for ( DsgFeatureCollection featureCollection : featureCollectionList) { if (featureCollection instanceof PointFeatureCollection) throw new IOException("There is no PointFeatureCollection in "+pfDataset.getLocation()); ucar.nc2.NetcdfFile ncfile = pfDataset.getNetcdfFile(); if ((ncfile == null) || !(ncfile instanceof NetcdfDataset)) { dataVars.addAll(pfDataset.getDataVariables()); } else { NetcdfDataset ncd = (NetcdfDataset) ncfile; for (VariableSimpleIF vs : pfDataset.getDataVariables()) { if (ncd.findCoordinateAxis(vs.getShortName()) == null) dataVars.add(vs); writer = new WriterCFPointDataset(out, pfDataset.getGlobalAttributes(), altUnits); writer.writeHeader( dataVars, -1); System.out.printf("Write %d records from %s to %s took %d msecs %n", count, pfDataset.getLocation(),fileOut,took); return count;
public void finish() { if (finished) return; if (pfIter != null) pfIter.finish(); finishCalcBounds(); if (currentDataset != null) try { currentDataset.close(); if (CompositeDatasetFactory.debug) System.out.printf("CompositeStationCollectionFlattened close dataset: %s%n", currentDataset.getLocation()); } catch (IOException e) { throw new RuntimeException(e); } finished = true; }
public ThreddsMetadata.GeospatialCoverage extractGeospatial(FeatureDatasetPoint fd) { LatLonRect llbb = fd.getBoundingBox(); if (llbb != null) { return new ThreddsMetadata.GeospatialCoverage(llbb, null, 0.0, 0.0); } return null; }
/** * Constructs a FlattenedDatasetPointCollection. * * @param fdPoint a point dataset. * @throws IllegalArgumentException if any of the feature collections in the dataset are not of type * {@code PointFeatureCollection} or {@code NestedPointFeatureCollection}. */ public FlattenedDatasetPointCollection(FeatureDatasetPoint fdPoint) throws IllegalArgumentException { super(fdPoint.getLocation(), CalendarDateUnit.unixDateUnit, null); // Default dateUnit and altUnits. this.fdPoint = fdPoint; List<DsgFeatureCollection> featCols = fdPoint.getPointFeatureCollectionList(); if (!featCols.isEmpty()) { DsgFeatureCollection firstFeatCol = featCols.get(0); // Replace this.dateUnit, this.altUnits, and this.extras with "typical" values from firstFeatCol. // We can't be certain that those values are representative of ALL collections in the dataset, but it's // a decent bet because in practice, firstFeatCol is so often the ONLY collection. copyFieldsFrom(firstFeatCol); } }
private void readMetadata() { // must open a prototype in order to get the data variable TimedCollection.Dataset td = pointCollections.getPrototype(); if (td == null) throw new RuntimeException("No datasets in the collection"); Formatter errlog = new Formatter(); try (FeatureDatasetPoint openDataset = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.POINT, td.getLocation(), null, errlog)) { if (openDataset != null) { dataVariables = openDataset.getDataVariables(); globalAttributes = openDataset.getGlobalAttributes(); } } catch (IOException ioe) { throw new RuntimeException(ioe); } }
public static void main(String args[]) throws IOException { String endpoint = "http://localhost:8080/thredds/cdmrfeature/idd/metar/ncdecodedLocalHome"; FeatureDatasetPoint fd = (FeatureDatasetPoint) CdmrFeatureDataset.factory(FeatureType.ANY, endpoint); FeatureCollection fc = fd.getPointFeatureCollectionList().get(0); System.out.printf("Result= %s %n %s %n", fd, fc); /* StationTimeSeriesFeatureCollection sfc = (StationTimeSeriesFeatureCollection) fc; PointFeatureIterator pfIter = sfc.get(-1); try { while (pfIter.hasNext()) { PointFeature pf = pfIter.next(); System.out.println("pf= " + pf); } } finally { pfIter.finish(); } */ }
@Override protected StationHelper createStationHelper() throws IOException { TimedCollection.Dataset td = dataCollection.getPrototype(); if (td == null) throw new RuntimeException("No datasets in the collection"); Formatter errlog = new Formatter(); try (FeatureDatasetPoint openDataset = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.STATION, td.getLocation(), null, errlog)) { if (openDataset == null) throw new IllegalStateException("Cant open FeatureDatasetPoint " + td.getLocation()); StationHelper stationHelper = new StationHelper(); List<DsgFeatureCollection> fcList = openDataset.getPointFeatureCollectionList(); StationTimeSeriesCollectionImpl openCollection = (StationTimeSeriesCollectionImpl) fcList.get(0); List<StationFeature> stns = openCollection.getStationFeatures(); for (StationFeature stnFeature : stns) { stationHelper.addStation(new CompositeStationFeature(stnFeature, timeUnit, altUnits, stnFeature.getFeatureData(), this.dataCollection)); } dataVariables = openDataset.getDataVariables(); globalAttributes = openDataset.getGlobalAttributes(); return stationHelper; } }
rootElem.setAttribute("location", path); Element elem = new Element("featureDataset"); FeatureType ft = fdp.getFeatureType(); elem.setAttribute("type", ft.toString().toLowerCase()); String url = path.replace("dataset.xml", ft.toString().toLowerCase() + ".xml"); List<? extends VariableSimpleIF> vars = fdp.getDataVariables(); Collections.sort(vars); for (VariableSimpleIF v : vars) { fdp.calcBounds(); } catch (IOException e) { log.warn("Unable to compute bounds for dataset " + fdp.getTitle(), e); LatLonRect bb = fdp.getBoundingBox(); if (bb != null) rootElem.addContent(writeBoundingBox(bb)); CalendarDateRange dateRange = fdp.getCalendarDateRange(); if (dateRange != null) { Element drElem = new Element("TimeSpan"); // from KML
@Test // the z coordinate doesnt fit into the structures, but must be transferred to the rewritten dataset public void testPointZCoord() throws Exception { String file = TestDir.cdmLocalTestDataDir + "point/pointUnlimited.nc"; Formatter buf = new Formatter(); try (FeatureDatasetPoint fdpoint = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.POINT, file, null, buf)) { Assert.assertNotNull(fdpoint); List<DsgFeatureCollection> collectionList = fdpoint.getPointFeatureCollectionList(); assert (collectionList.size() == 1) : "Can't handle point data with multiple collections"; DsgFeatureCollection fc = collectionList.get(0); assert fc instanceof PointFeatureCollection; NetcdfFile ncfile = fdpoint.getNetcdfFile(); Assert.assertNotNull(ncfile); Assert.assertNotNull("cant find variable 'z' in netcdf file", ncfile.findVariable("z")); FeatureDatasetPoint rewrite = rewriteDataset(fdpoint, "nc3", new CFPointWriterConfig(NetcdfFileWriter.Version.netcdf3)); collectionList = rewrite.getPointFeatureCollectionList(); fc = collectionList.get(0); assert fc instanceof PointFeatureCollection; ncfile = rewrite.getNetcdfFile(); Assert.assertNotNull(ncfile); Assert.assertNotNull("cant find variable 'z' in rewritten netcdf file", ncfile.findVariable("z")); rewrite.close(); } }
@Override protected void initStationHelper() { TimedCollection.Dataset td = dataCollection.getPrototype(); if (td == null) throw new RuntimeException("No datasets in the collection"); Formatter errlog = new Formatter(); FeatureDatasetPoint openDataset = null; try { openDataset = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.STATION, td.getLocation(), null, errlog); List<FeatureCollection> fcList = openDataset.getPointFeatureCollectionList(); StationTimeSeriesCollectionImpl openCollection = (StationTimeSeriesCollectionImpl) fcList.get(0); List<Station> stns = openCollection.getStations(); stationHelper = new StationHelper(); for (Station s : stns) stationHelper.addStation(new CompositeStationFeature(s, null, this.dataCollection)); dataVariables = openDataset.getDataVariables(); } catch (Exception ioe) { throw new RuntimeException(td.getLocation(), ioe); } finally { try { if (openDataset != null) openDataset.close(); } catch (Throwable t) { } } }
rootElem.setAttribute("location", path); Element elem = new Element("featureDataset"); elem.setAttribute("type", fdp.getFeatureType().toString().toLowerCase()); elem.setAttribute("url", path + "/" + fdp.getFeatureType().toString().toLowerCase()); rootElem.addContent(elem); List<? extends VariableSimpleIF> vars = fdp.getDataVariables(); Collections.sort(vars); for (VariableSimpleIF v : vars) { LatLonRect bb = fdp.getBoundingBox(); if (bb != null) rootElem.addContent(writeBoundingBox(bb)); CalendarDateRange dateRange = fdp.getCalendarDateRange(); if (dateRange != null) { Element drElem = new Element("TimeSpan"); // from KML
private void readMetadata() { // must open a prototype in order to get the data variable TimedCollection.Dataset td = pointCollections.getPrototype(); if (td == null) throw new RuntimeException("No datasets in the collection"); Formatter errlog = new Formatter(); FeatureDatasetPoint openDataset = null; try { openDataset = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.POINT, td.getLocation(), null, errlog); if (openDataset != null) { dataVariables = openDataset.getDataVariables(); globalAttributes = openDataset.getGlobalAttributes(); } } catch (IOException ioe) { throw new RuntimeException(ioe); } finally { try { if (openDataset != null) openDataset.close(); } catch (Throwable t) { } } }
rootElem.setAttribute("location", path); Element elem = new Element("featureDataset"); FeatureType ft = fdp.getFeatureType(); elem.setAttribute("type", ft.toString().toLowerCase()); String url = path.replace("dataset.xml", ft.toString().toLowerCase() + ".xml"); List<DsgFeatureCollection> list = fdp.getPointFeatureCollectionList(); DsgFeatureCollection fc = list.get(0); // LOOK maybe should pass in the dsg? List<? extends VariableSimpleIF> vars = fdp.getDataVariables(); Collections.sort(vars); for (VariableSimpleIF v : vars) {
throw new FileNotFoundException("Collection dataset is not a FeatureDatasetPoint; spec=" + dcm); if (wantFeatureType == FeatureType.ANY_POINT) wantFeatureType = proto.getFeatureType(); List<DsgFeatureCollection> fcList = proto.getPointFeatureCollectionList(); if (fcList.size() == 0) { throw new FileNotFoundException("FeatureCollectionList is empty; spec=" + dcm);
@Test public void testDataVars() throws Exception { String file = TestDir.cdmLocalTestDataDir + "point/stationSingle.ncml"; Formatter buf = new Formatter(); try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) { List<VariableSimpleIF> dataVars = pods.getDataVariables(); for (VariableSimpleIF dv : dataVars) System.out.printf(" %s%n", dv ); assert (dataVars.size() == 1) : "Should only be one data var"; VariableSimpleIF data = dataVars.get(0); assert data.getShortName().equalsIgnoreCase("data"); } }
public List<VariableSimpleIF> getDataVariables() { if (dataVariables == null) { // must open a prototype in order to get the data variable TimedCollection.Dataset td = pointCollections.getPrototype(); if (td == null) throw new RuntimeException("No datasets in the collection"); Formatter errlog = new Formatter(); FeatureDatasetPoint openDataset = null; try { openDataset = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.POINT, td.getLocation(), null, errlog); if (openDataset != null) dataVariables = openDataset.getDataVariables(); } catch (IOException ioe) { throw new RuntimeException(ioe); } finally { try { if (openDataset != null) openDataset.close(); } catch (Throwable t) { } } } return dataVariables; }
static void compare(FeatureDatasetPoint org, FeatureDatasetPoint copy) { FeatureType fcOrg = org.getFeatureType(); FeatureType fcCopy = copy.getFeatureType(); assert fcOrg == fcCopy; List<VariableSimpleIF> orgVars = org.getDataVariables(); List<VariableSimpleIF> copyVars = copy.getDataVariables(); Formatter f = new Formatter(); boolean ok = CompareNetcdf2.compareLists(getNames(orgVars, Lists.newArrayList("profileId")), getNames(copyVars, Lists.newArrayList("profileId")), f); if (ok) System.out.printf("Data Vars OK%n"); else { System.out.printf("Data Vars NOT OK%n %s%n", f); if (failOnDataVarsDifferent) assert false; } }
@Test public void testAltUnits() throws Exception { // Ignore this test if NetCDF-4 isn't present. Assume.assumeTrue("NetCDF-4 C library not present.", Nc4Iosp.isClibraryPresent()); String file = TestDir.cdmLocalTestDataDir + "point/stationRaggedContig.ncml"; Formatter buf = new Formatter(); try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) { List<DsgFeatureCollection> collectionList = pods.getPointFeatureCollectionList(); assert (collectionList.size() == 1) : "Can't handle point data with multiple collections"; DsgFeatureCollection fc1 = collectionList.get(0); assert fc1.getAltUnits() != null : "no Alt Units"; assert fc1.getAltUnits().equalsIgnoreCase("m") : "Alt Units should be 'm'"; FeatureDatasetPoint rewrite = rewriteDataset(pods, "nc4", new CFPointWriterConfig(NetcdfFileWriter.Version.netcdf4)); collectionList = rewrite.getPointFeatureCollectionList(); DsgFeatureCollection fc2 = collectionList.get(0); assert fc2 instanceof PointFeatureCC; assert fc2.getAltUnits() != null : "no Alt Units"; assert fc2.getAltUnits().equalsIgnoreCase("m") : "Alt Units should be 'm'"; rewrite.close(); } }
public void finish() { if (finished) return; if (pfIter != null) pfIter.finish(); finishCalcBounds(); if (currentDataset != null) try { currentDataset.close(); } catch (IOException e) { throw new RuntimeException(e); } finished = true; }
public static void writeConfigXML(FeatureDatasetPoint pfd, java.util.Formatter f) { if (!(pfd instanceof PointDatasetStandardFactory.PointDatasetStandard)) { f.format("%s not instance of PointDatasetStandard%n", pfd.getLocation()); return; } PointDatasetStandardFactory.PointDatasetStandard spfd = (PointDatasetStandardFactory.PointDatasetStandard) pfd; TableAnalyzer analyser = spfd.getTableAnalyzer(); TableConfig config = analyser.getTableConfig(); TableConfigurer tc = analyser.getTableConfigurer(); if (tc == null) { f.format("%s has no TableConfig%n", pfd.getLocation()); return; } PointConfigXML writer = new PointConfigXML(); try { writer.writeConfigXML(config, tc.getClass().getName(), f); } catch (IOException e) { f.format("%s error writing=%s%n", pfd.getLocation(), e.getMessage()); } }