public void execute(JobExecutionContext context) throws JobExecutionException { try { CollectionManager manager = (CollectionManager) context.getJobDetail().getJobDataMap().get(DCM_NAME); org.slf4j.Logger loggerfc = (org.slf4j.Logger) context.getJobDetail().getJobDataMap().get(LOGGER); String groupName = context.getTrigger().getKey().getGroup(); if (groupName.equals("nocheck")) { loggerfc.info("UpdateCollection {} nocheck", manager.getCollectionName()); manager.updateNocheck(); // update(CollectionManager.Force.nocheck) } else { loggerfc.debug("UpdateCollection {} scan(true)", manager.getCollectionName()); manager.scan(true); } } catch (Throwable e) { logger.error("UpdateCollectionJob.execute failed", e); } } }
/** * Manage collections of files that we can assign date ranges to * * @param manager the collection manager * @param errlog put error messsages here * @see CollectionSpecParser * @throws java.io.IOException on read error */ public TimedCollection(CollectionManager manager, Formatter errlog) throws IOException { this.manager = manager; // get the inventory, sorted by path manager.scanIfNeeded(); update(); if (debug) { System.out.printf("Datasets in collection=%s%n", manager.getCollectionName()); for (TimedCollection.Dataset d: datasets) { System.out.printf(" %s %n",d); } System.out.printf("%n"); } }
protected void finishConstruction() { dcm.addEventListener(this); // now wired for events CollectionUpdater.INSTANCE.scheduleTasks(config, dcm); // see if any background tasks are needed }
public void execute(JobExecutionContext context) throws JobExecutionException { try { CollectionManager manager = (CollectionManager) context.getJobDetail().getJobDataMap().get(DCM_NAME); org.slf4j.Logger loggerfc = (org.slf4j.Logger) context.getJobDetail().getJobDataMap().get(LOGGER); if (manager == null) { loggerfc.error("Update resetProto failed: no manager object on {}", context); return; } logger.info("ResetProto for {}", manager.getCollectionName()); manager.resetProto(); } catch (Throwable e) { logger.error("ChangeProtoJob.execute failed", e); } } }
public void checkNeeded(boolean force) { synchronized (lock) { if (fmrcDataset == null) { try { manager.scan(true); update(); return; } catch (Throwable t) { logger.error(config.spec+": rescan failed"); throw new RuntimeException(t); } } if (!force && !manager.isScanNeeded()) return; try { if (!manager.scan(true)) return; update(); } catch (Throwable t) { logger.error(config.spec+": rescan failed"); throw new RuntimeException(t); } } }
@Test public void testScan() throws IOException { // count scanned files Formatter f = new Formatter(System.out); CollectionManager dcm = MFileCollectionManager.open("testScan", TestDir.cdmUnitTestDir + "agg/narr/narr-a_221_#yyyyMMdd_HHmm#.*grb$", null, f); dcm.scan(true); List<MFile> fileList = (List<MFile>) Misc.getList(dcm.getFilesSorted()); assert fileList.size() == 3 : dcm; // check date extractor int count = 0; String[] result = new String[] {"2000-01-18T12:00:00", "2000-01-19T00:00:00", "2000-01-20T12:00:00"}; for (MFile mfile : dcm.getFilesSorted()) { CalendarDate de = dcm.extractDate(mfile); System.out.printf(" %s == %s%n", mfile.getPath(), de); assert de.toString().startsWith(result[count]); count++; } }
@Test @Ignore("tests fail on jenkins due to file permisssions") public void testScanOlderThan() throws IOException, InterruptedException { Formatter f = new Formatter(System.out); CollectionManager dcm = MFileCollectionManager.open("testScanOlderThan", TestDir.cdmUnitTestDir + "agg/updating/.*nc$", null, f); dcm.scan(true); List<MFile> fileList = (List<MFile>) Misc.getList(dcm.getFilesSorted()); assert fileList.size() == 3 : dcm; assert touch(TestDir.cdmUnitTestDir + "agg/updating/extra.nc"); dcm = MFileCollectionManager.open("testScanOlderThan", TestDir.cdmUnitTestDir + "agg/updating/.*nc$", "10 sec", f); dcm.scan(true); fileList = (List<MFile>) Misc.getList(dcm.getFilesSorted()); assert fileList.size() == 2 : dcm; }
public void update() { datasets = new ArrayList<TimedCollection.Dataset>(); for (MFile f : manager.getFiles()) datasets.add(new Dataset(f)); if (manager.hasDateExtractor()) { if (datasets.size() == 1) { Dataset ds = (Dataset) datasets.get(0); if (ds.start != null) dateRange = CalendarDateRange.of(ds.start, ds.start); // LOOK ?? } else if (datasets.size() > 1) { for (int i = 0; i < datasets.size() - 1; i++) { Dataset d1 = (Dataset) datasets.get(i); Dataset d2 = (Dataset) datasets.get(i + 1); d1.setDateRange(CalendarDateRange.of(d1.start, d2.start)); if (i == datasets.size() - 2) // last one d2.setDateRange(new CalendarDateRange(d2.start, d1.getDateRange().getDurationInSecs())); } Dataset first = (Dataset) datasets.get(0); Dataset last = (Dataset) datasets.get(datasets.size() - 1); dateRange = CalendarDateRange.of(first.getDateRange().getStart(), last.getDateRange().getEnd()); } } }
for (MFile f : manager.getFiles()) { if (logger.isDebugEnabled()) logger.debug("Fmrc: "+config.spec+": file="+f.getPath()); return new FmrcInv("fmrc:"+manager.getCollectionName(), fmrList, config.fmrcConfig.regularize);
Dataset(MFile f) { this.location = f.getPath(); this.start = manager.extractRunDate(f); }
return makeCatalogFiles(catURI, localState, dcm.getFilenames(), true);
String jobName = manager.getCollectionName(); map.put(LOGGER, org.slf4j.LoggerFactory.getLogger("fc."+manager.getCollectionName())); JobDetail updateJob = JobBuilder.newJob(UpdateCollectionJob.class) .withIdentity(jobName, "UpdateCollection") org.quartz.JobDataMap pmap = new org.quartz.JobDataMap(); pmap.put(DCM_NAME, manager); map.put(LOGGER, org.slf4j.LoggerFactory.getLogger("fc."+manager.getCollectionName())); JobDetail protoJob = JobBuilder.newJob(ChangeProtoJob.class) .withIdentity(jobName, "UpdateProto")
public void close() { manager.close(); }
GridDatasetInv inv = new GridDatasetInv(gds, cm.extractRunDate(mfile)); String xmlString = inv.writeXML( new Date(mfile.getLastModified())); ((CollectionManagerAbstract)cm).putMetadata(mfile, "fmrInv.xml", xmlString.getBytes(CDM.utf8Charset));
@Override public InvCatalogImpl makeCatalog(String match, String orgPath, URI catURI) { logger.debug("FcPoint make catalog for " + match + " " + catURI); State localState = null; try { localState = checkState(); } catch (IOException e) { logger.error("Error in checkState", e); return null; } try { if ((match == null) || (match.length() == 0)) { InvCatalogImpl main = makeCatalogTop(catURI, localState); main.addService(collectionService); main.getDataset().getLocalMetadataInheritable().setServiceName(collectionService.getName()); main.finish(); return main; } else if (match.startsWith(FILES) && wantDatasets.contains(FeatureCollectionConfig.PointDatasetType.Files)) { return makeCatalogFiles(catURI, localState, dcm.getFilenames(), true); } } catch (Exception e) { logger.error("Error making catalog for " + path, e); } return null; }
static public FeatureDataset factory(String location, FeatureType wantFeatureType, CollectionManager dcm, Formatter errlog) throws IOException { TimedCollection collection = new TimedCollection(dcm, errlog); if (collection.getDatasets().size() == 0) { throw new FileNotFoundException("Collection is empty; spec="+dcm); } if (wantFeatureType == FeatureType.ANY_POINT) { TimedCollection.Dataset d = collection.getPrototype(); FeatureDatasetPoint proto = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(FeatureType.ANY_POINT, d.getLocation(), null, errlog); wantFeatureType = proto.getFeatureType(); proto.close(); // LOOK - try to use } //LatLonRect bb = null; FeatureCollection fc = null; switch (wantFeatureType) { case POINT: CompositePointCollection pfc = new CompositePointCollection(dcm.getCollectionName(), collection); //bb = pfc.getBoundingBox(); fc = pfc; break; case STATION: CompositeStationCollection sfc = new CompositeStationCollection(dcm.getCollectionName(), collection, null, null); //bb = sfc.getBoundingBox(); fc = sfc; break; default: return null; } return new CompositePointDataset(location, wantFeatureType, fc, collection, null); }
/** * Manage collections of files that we can assign date ranges to * * @param manager the collection manager * @param errlog put error messsages here * @see CollectionSpecParser * @throws java.io.IOException on read error */ public TimedCollection(MCollection manager, Formatter errlog) throws IOException { this.manager = manager; // get the inventory, sorted by path if (manager instanceof CollectionManager) { ((CollectionManager)manager).scanIfNeeded(); } update(); if (debug) { System.out.printf("Datasets in collection=%s%n", manager.getCollectionName()); for (TimedCollection.Dataset d: datasets) { System.out.printf(" %s %n",d); } System.out.printf("%n"); } }
@Override protected State checkState() throws IOException { synchronized (lock) { if (state == null) { firstInit(); } else if (!dcm.scanIfNeeded()) { // perform new scan if needed, return false if no change return state; } // copy on write State localState = new State(state); makeDatasets(localState); // LOOK whats really needed is just the time range metadata updated update(CollectionManager.Force.test); if (null != fd) { localState.vars = MetadataExtractor.extractVariables(fd); localState.coverage = MetadataExtractor.extractGeospatial(fd); localState.dateRange = MetadataExtractor.extractCalendarDateRange(fd); } state = localState; return state; } }