/** * Check to see if its time to rescan directory, and if so, rescan and extend dataset if needed. * Note that this just calls sync(), so structural metadata may be modified (!!) * * @return true if directory was rescanned and dataset may have been updated * @throws IOException on io error */ public synchronized boolean syncExtend() throws IOException { return datasetManager.isScanNeeded() && _sync(); }
public void finish(CancelTask cancelTask) throws IOException { datasetManager.scan(true); // Make the list of Datasets, by scanning if needed. cacheDirty = true; closeDatasets(); makeDatasets(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( true); buildNetcdfDataset(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( false); }
/** * Close all resources (files, sockets, etc) associated with this dataset. * If the underlying file was acquired, it will be released, otherwise closed. */ @Override public synchronized void close() throws java.io.IOException { if (agg != null) agg.persistWrite(); // LOOK maybe only on real close ?? if (cache != null) { //unlocked = true; cache.release(this); } else { if (agg != null) agg.close(); agg = null; if (orgFile != null) orgFile.close(); orgFile = null; } }
public void finish(CancelTask cancelTask) throws IOException { datasetManager.scan(true); // Make the list of Datasets, by scanning if needed. cacheDirty = true; makeDatasets(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( true); buildNetcdfDataset(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( false); }
agg.addExplicitDataset(cacheName, realLocation, id, ncoords, coordValueS, sectionSpec, reader); agg.addDatasetScan(cdElement, dirLocation, suffix, regexpPatternString, dateFormatMark, enhanceMode, subdirs, olderS); agg.addCollection(collElem.getAttributeValue("spec"), collElem.getAttributeValue("olderThan")); if (!needMerge) needMerge = aggElem.getChildren("remove", ncNS).size() > 0; if (needMerge) agg.setModifications(aggElem);
public Array reallyRead(Variable mainV, Section section, CancelTask cancelTask) throws IOException, InvalidRangeException { NetcdfFile ncfile = null; try { ncfile = dataset.acquireFile(cancelTask); Variable proxyV = findVariable(ncfile, mainV); if ((cancelTask != null) && cancelTask.isCancel()) return null; return proxyV.read(section); } finally { dataset.close( ncfile); } } }
@Override public void getDetailInfo(Formatter f) { super.getDetailInfo(f); f.format(" timeUnitsChange=%s%n", timeUnitsChange); f.format(" totalCoords=%d%n", totalCoords); if (aggVarNames.size() > 0) { f.format(" Aggregation Variables specified in NcML%n"); for (String vname : aggVarNames) f.format(" %s%n", vname); } f.format("%nAggregation Variables%n"); for (VariableDS vds : aggVars) { f.format(" "); vds.getNameAndDimensions(f, true, false); f.format("%n"); } if (cacheList.size() > 0) { f.format("%nCache Variables%n"); for (CacheVar cv : cacheList) f.format(" %s%n", cv); } f.format("%nVariable Proxies%n"); for (Variable v : ncDataset.getVariables()) { if (v.hasCachedData()) { f.format(" %20s cached%n", v.getShortName()); } else { f.format(" %20s proxy %s%n", v.getShortName(), v.getProxyReader().getClass().getName()); } } }
/** * Open one of the nested datasets as a template for the aggregation dataset. * * @return a typical Dataset * @throws FileNotFoundException if there are no datasets */ protected Dataset getTypicalDataset() throws IOException { List<Dataset> nestedDatasets = getDatasets(); int n = nestedDatasets.size(); if (n == 0) throw new FileNotFoundException("No datasets in this aggregation"); int select; if (typicalDatasetMode == TypicalDataset.LATEST) select = n - 1; else if (typicalDatasetMode == TypicalDataset.PENULTIMATE) select = (n < 2) ? 0 : n - 2; else if (typicalDatasetMode == TypicalDataset.FIRST) select = 0; else // random is default select = (n < 2) ? 0 : new Random().nextInt(n); return nestedDatasets.get(select); }
if (agg == null) return; // cancel task targetDS.setAggregation(agg); agg.finish(cancelTask);
agg.addExplicitDataset(cacheName, realLocation, id, ncoords, coordValueS, sectionSpec, reader); agg.addDatasetScan(cdElement, dirLocation, suffix, regexpPatternString, dateFormatMark, enhanceMode, subdirs, olderS); agg.addCollection(collElem.getAttributeValue("spec"), collElem.getAttributeValue("olderThan")); if (!needMerge) needMerge = aggElem.getChildren("remove", ncNS).size() > 0; if (needMerge) agg.setModifications(aggElem);
public Array reallyRead(Variable mainV, CancelTask cancelTask) throws IOException { NetcdfFile ncfile = null; try { ncfile = dataset.acquireFile(cancelTask); if ((cancelTask != null) && cancelTask.isCancel()) return null; Variable proxyV = findVariable(ncfile, mainV); return proxyV.read(); } finally { dataset.close( ncfile); } }
@Override public void getDetailInfo(Formatter f) { super.getDetailInfo(f); f.format(" timeUnitsChange=%s%n", timeUnitsChange); f.format(" totalCoords=%d%n", totalCoords); if (aggVarNames.size() > 0) { f.format(" Aggregation Variables specified in NcML%n"); for (String vname : aggVarNames) f.format(" %s%n", vname); } f.format("%nAggregation Variables%n"); for (VariableDS vds : aggVars) { f.format(" "); vds.getNameAndDimensions(f, true, false); f.format("%n"); } if (cacheList.size() > 0) { f.format("%nCache Variables%n"); for (CacheVar cv : cacheList) f.format(" %s%n", cv); } f.format("%nVariable Proxies%n"); for (Variable v : ncDataset.getVariables()) { if (v.hasCachedData()) { f.format(" %20s cached%n", v.getShortName()); } else { f.format(" %20s proxy %s%n", v.getShortName(), v.getProxyReader().getClass().getName()); } } }
/** * Open one of the nested datasets as a template for the aggregation dataset. * * @return a typical Dataset * @throws IOException if there are no datasets */ protected Dataset getTypicalDataset() throws IOException { List<Dataset> nestedDatasets = getDatasets(); int n = nestedDatasets.size(); if (n == 0) throw new FileNotFoundException("No datasets in this aggregation"); int select; if (typicalDatasetMode == TypicalDataset.LATEST) select = n - 1; else if (typicalDatasetMode == TypicalDataset.PENULTIMATE) select = (n < 2) ? 0 : n - 2; else if (typicalDatasetMode == TypicalDataset.FIRST) select = 0; else { // random is default if (r == null) r = new Random(); select = (n < 2) ? 0 : r.nextInt(n); } return nestedDatasets.get(select); } private Random r;
Aggregation agg = readAgg(aggElem, ncmlLocation, targetDS, cancelTask); targetDS.setAggregation(agg); agg.finish(cancelTask);
agg.addExplicitDataset(cacheName, realLocation, id, ncoords, coordValueS, sectionSpec, reader); agg.addDatasetScan(cdElement, dirLocation, suffix, regexpPatternString, dateFormatMark, enhanceMode, subdirs, olderS); agg.addCollection(collElem.getAttributeValue("spec"), collElem.getAttributeValue("olderThan")); if (!needMerge) needMerge = aggElem.getChildren("remove", ncNS).size() > 0; if (needMerge) agg.setModifications(aggElem);
public void finish(CancelTask cancelTask) throws IOException { datasetManager.scan(true); // Make the list of Datasets, by scanning if needed. cacheDirty = true; closeDatasets(); makeDatasets(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( true); buildNetcdfDataset(cancelTask); //ucar.unidata.io.RandomAccessFile.setDebugAccess( false); }
/** * Close all resources (files, sockets, etc) associated with this dataset. * If the underlying file was acquired, it will be released, otherwise closed. */ @Override public synchronized void close() throws java.io.IOException { if (agg != null) agg.persistWrite(); // LOOK maybe only on real close ?? if (cache != null) { //unlocked = true; cache.release(this); } else { if (agg != null) agg.close(); agg = null; if (orgFile != null) orgFile.close(); orgFile = null; } }
@Override public Array reallyRead(Variable mainV, CancelTask cancelTask) throws IOException { NetcdfFile ncfile = null; try { ncfile = dataset.acquireFile(cancelTask); if ((cancelTask != null) && cancelTask.isCancel()) return null; Variable proxyV = findVariable(ncfile, mainV); return proxyV.read(); } finally { dataset.close( ncfile); } }
@Override public void getDetailInfo(Formatter f) { super.getDetailInfo(f); f.format(" timeUnitsChange=%s%n", timeUnitsChange); f.format(" totalCoords=%d%n", totalCoords); if (aggVarNames.size() > 0) { f.format(" Aggregation Variables specified in NcML%n"); for (String vname : aggVarNames) f.format(" %s%n", vname); } f.format("%nAggregation Variables%n"); for (VariableDS vds : aggVars) { f.format(" "); vds.getNameAndDimensions(f, true, false); f.format("%n"); } if (cacheList.size() > 0) { f.format("%nCache Variables%n"); for (CacheVar cv : cacheList) f.format(" %s%n", cv); } f.format("%nVariable Proxies%n"); for (Variable v : ncDataset.getVariables()) { if (v.hasCachedData()) { f.format(" %20s cached%n", v.getShortName()); } else { f.format(" %20s proxy %s%n", v.getShortName(), v.getProxyReader().getClass().getName()); } } }
/** * Check to see if its time to rescan directory, and if so, rescan and extend dataset if needed. * Note that this just calls sync(), so structural metadata may be modified (!!) * * @return true if directory was rescanned and dataset may have been updated * @throws IOException on io error */ public synchronized boolean syncExtend() throws IOException { return datasetManager.isScanNeeded() && _sync(); }