congrats Icon
New! Tabnine Pro 14-day free trial
Start a free trial
Tabnine Logo
DatasetUrl.findDatasetUrl
Code IndexAdd Tabnine to your IDE (free)

How to use
findDatasetUrl
method
in
ucar.nc2.dataset.DatasetUrl

Best Java code snippets using ucar.nc2.dataset.DatasetUrl.findDatasetUrl (Showing top 20 results out of 315)

origin: Unidata/thredds

/**
 * Factory method for opening a NetcdfFile through the netCDF API.
 *
 * @param location   location of dataset.
 * @param cancelTask use to allow task to be cancelled; may be null.
 * @return NetcdfFile object
 * @throws java.io.IOException on read error
 */
public static NetcdfFile openFile(String location, ucar.nc2.util.CancelTask cancelTask) throws IOException {
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 return openOrAcquireFile(null, null, null, durl, -1, cancelTask, null);
}
origin: Unidata/thredds

public TestNcmlWriteAndCompareShared(String location, boolean compareData) throws IOException {
 this.durl = DatasetUrl.findDatasetUrl(location);
 this.compareData = compareData;
}
origin: Unidata/thredds

/**
 * Factory method for opening a dataset through the netCDF API, and identifying its coordinate variables.
 *
 * @param location    location of file
 * @param enhance     if true, use defaultEnhanceMode, else no enhancements
 * @param buffer_size RandomAccessFile buffer size, if <= 0, use default size
 * @param cancelTask  allow task to be cancelled; may be null.
 * @param spiObject   sent to iosp.setSpecial() if not null
 * @return NetcdfDataset object
 * @throws java.io.IOException on read error
 */
static public NetcdfDataset openDataset(String location, boolean enhance, int buffer_size, ucar.nc2.util.CancelTask cancelTask, Object spiObject) throws IOException {
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 return openDataset(durl, enhance ? defaultEnhanceMode : null, buffer_size, cancelTask, spiObject);
}
origin: Unidata/thredds

/**
 * Open a netcdf dataset, using NetcdfDataset.defaultEnhanceMode plus CoordSystems
 * and turn into a DtCoverageDataset.
 *
 * @param location netcdf dataset to open, using NetcdfDataset.acquireDataset().
 * @return GridDataset
 * @throws java.io.IOException on read error
 * @see ucar.nc2.dataset.NetcdfDataset#acquireDataset
 */
static public DtCoverageDataset open(String location) throws java.io.IOException {
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 return open(durl, NetcdfDataset.getDefaultEnhanceMode());
}
origin: Unidata/thredds

/**
 * Open a netcdf dataset, using NetcdfDataset.defaultEnhanceMode plus CoordSystems
 * and turn into a GridDataset.
 *
 * @param location netcdf dataset to open, using NetcdfDataset.acquireDataset().
 * @param enhanceMode open netcdf dataset with this enhanceMode
 * @return GridDataset
 * @throws java.io.IOException on read error
 * @see ucar.nc2.dataset.NetcdfDataset#acquireDataset
 */
static public GridDataset open(String location, Set<NetcdfDataset.Enhance> enhanceMode) throws java.io.IOException {
 NetcdfDataset ds = ucar.nc2.dataset.NetcdfDataset.acquireDataset(null, DatasetUrl.findDatasetUrl(location), enhanceMode, -1, null, null);
 return new GridDataset(ds, null);
}
origin: Unidata/thredds

/**
 * Open a dataset as a TypedDataset.
 *
 * @param datatype open this kind of Typed Dataset; may be null, which means search all factories.
 *   If datatype is not null, only return correct TypedDataset (eg PointObsDataset for DataType.POINT).
 * @param location URL or file location of the dataset
 * @param task user may cancel
 * @param errlog place errors here, may not be null
 * @return a subclass of TypedDataset
 * @throws java.io.IOException on io error
 */
static public TypedDataset open( FeatureType datatype, String location, ucar.nc2.util.CancelTask task, StringBuilder errlog) throws IOException {
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 NetcdfDataset ncd = NetcdfDataset.acquireDataset(durl, true, task);
 return open( datatype, ncd, task, errlog);
}
origin: Unidata/thredds

public NetcdfFile acquireFile(CancelTask cancelTask) throws IOException {
 if (debugOpenFile) System.out.println(" try to acquire " + cacheLocation);
 long start = System.currentTimeMillis();
 if (durl == null)
  durl = DatasetUrl.findDatasetUrl(cacheLocation); // cache the ServiceType so we dont have to keep figuring it out
 NetcdfFile ncfile = NetcdfDataset.acquireFile(reader, null, durl, -1, cancelTask, spiObject);
 // must merge NcML before enhancing
 if (mergeNcml != null)
  ncfile = NcMLReader.mergeNcML(ncfile, mergeNcml); // create new dataset
 if (enhance == null || enhance.isEmpty()) {
  if (debugOpenFile) System.out.println(" acquire (no enhance) " + cacheLocation + " took " + (System.currentTimeMillis() - start));
  return ncfile;
 }
 // must enhance
 NetcdfDataset ds;
 if (ncfile instanceof NetcdfDataset) {
  ds = (NetcdfDataset) ncfile;
  ds.enhance(enhance); // enhance "in place", ie modify the NetcdfDataset
 } else {
  ds = new NetcdfDataset(ncfile, enhance); // enhance when wrapping
 }
 if (debugOpenFile) System.out.println(" acquire (enhance) " + cacheLocation + " took " + (System.currentTimeMillis() - start));
 return ds;
}
origin: Unidata/thredds

static private NetcdfFile acquireNcml(FileCache cache, FileFactory factory, Object hashKey,
                   String location, int buffer_size, ucar.nc2.util.CancelTask cancelTask, Object spiObject) throws IOException {
 if (cache == null) return NcMLReader.readNcML(location, cancelTask);
 if (factory == null) factory = new NcMLFactory();  // LOOK maybe always should use NcMLFactory ?
 return (NetcdfFile) cache.acquire(factory, hashKey, DatasetUrl.findDatasetUrl(location), buffer_size, cancelTask, spiObject);
}
origin: Unidata/thredds

public TestNcmlWriteAndCompareLocal(String location) throws IOException {
 this.location = StringUtil2.replace(location, '\\', "/");
 this.durl = DatasetUrl.findDatasetUrl(location);
}
origin: Unidata/thredds

  @Test
  public void testWrfNoTimeVar() throws IOException {
    String tstFile = TestDir.cdmLocalTestDataDir +"wrf/WrfNoTimeVar.nc";
    logger.info("Open '{}'", tstFile);
    Set<NetcdfDataset.Enhance> defaultEnhanceMode = NetcdfDataset.getDefaultEnhanceMode();
    EnumSet<NetcdfDataset.Enhance> enhanceMode = EnumSet.copyOf(defaultEnhanceMode);
    enhanceMode.add(NetcdfDataset.Enhance.IncompleteCoordSystems);
    DatasetUrl durl = DatasetUrl.findDatasetUrl (tstFile);
    NetcdfDataset ncd = NetcdfDataset.acquireDataset(durl, enhanceMode, null);

    List<CoordinateSystem> cs = ncd.getCoordinateSystems();
    Assert.assertEquals(1, cs.size());
    CoordinateSystem dsCs = cs.get(0);
    Assert.assertEquals(2, dsCs.getCoordinateAxes().size());

    VariableDS var = (VariableDS) ncd.findVariable("T2");
    List<CoordinateSystem> varCs = var.getCoordinateSystems();
    Assert.assertEquals(1, varCs.size());
    Assert.assertEquals(dsCs, varCs.get(0));
  }
}
origin: Unidata/thredds

DatasetUrl durl = DatasetUrl.findDatasetUrl(filename);
origin: Unidata/thredds

@Test
public void TestAggCached() throws IOException, InvalidRangeException {
 String filename = TestDir.cdmUnitTestDir + "agg/caching/wqb.ncml";
 DatasetUrl durl = DatasetUrl.findDatasetUrl(filename);
origin: Unidata/thredds

@Test
public void TestNotCached() throws IOException {
 String filename = TestDir.cdmUnitTestDir + "ncml/nestedAgg/test.ncml";
 try (NetcdfDataset ncd = NetcdfDataset.acquireDataset(DatasetUrl.findDatasetUrl(filename), true ,null)) {
  Variable time = ncd.findVariable("time");
  assert time != null;
  assert time.getSize() == 19723 : time.getSize();
  // System.out.printf(" time array = %s%n", NCdumpW.toString(time.read()));
 }
}
origin: Unidata/thredds

@Test
public void testUnpackedValidRange() throws IOException, URISyntaxException {
 File testResource = new File(getClass().getResource("testScaleOffsetMissingUnsigned.ncml").toURI());
 DatasetUrl location = DatasetUrl.findDatasetUrl(testResource.getAbsolutePath());
 Set<Enhance> enhanceMode = EnumSet.of(Enhance.ConvertUnsigned, Enhance.ApplyScaleOffset);  // No ConvertMissing!
 
 try (NetcdfDataset ncd = NetcdfDataset.openDataset(location, enhanceMode, -1, null, null)) {
  VariableDS var = (VariableDS) ncd.findVariable("unpackedValidRange");
  
  // valid_range will be interpreted as unpacked because of:
  /*
   * If valid_range is the same type as scale_factor (actually the wider of scale_factor and add_offset) and this
   * is wider than the external data, then it will be interpreted as being in the units of the internal (unpacked)
   * data. Otherwise it is in the units of the external (packed) data.</li>
   */
  // As a result, scale_factor will not be applied to it.
  Assert2.assertNearlyEquals(9.9f,  (float) var.getValidMin());
  Assert2.assertNearlyEquals(10.1f, (float) var.getValidMax());
  
  Assert.assertEquals(DataType.FLOAT, var.getDataType());  // scale_factor is float.
  
  float[] expecteds = new float[] { 9.8f, 9.9f, 10.0f, 10.1f, 10.2f };
  float[] actuals   = (float[]) var.read().getStorage();
  Assert2.assertArrayNearlyEquals(expecteds, actuals);
 }
}
 
origin: Unidata/thredds

 static public TrajectoryObsDataset open(String netcdfFileURI, ucar.nc2.util.CancelTask cancelTask)
     throws IOException {
  DatasetUrl durl = DatasetUrl.findDatasetUrl(netcdfFileURI);
  NetcdfDataset ds = NetcdfDataset.acquireDataset(durl, true, cancelTask);
  if (RafTrajectoryObsDataset.isValidFile(ds))
   return new RafTrajectoryObsDataset(ds);
  else if (SimpleTrajectoryObsDataset.isValidFile(ds))
   return new SimpleTrajectoryObsDataset(ds);
  else if (Float10TrajectoryObsDataset.isValidFile(ds))
   return new Float10TrajectoryObsDataset(ds);
  else if (ZebraClassTrajectoryObsDataset.isValidFile(ds))
   return new ZebraClassTrajectoryObsDataset(ds);
  else if (ARMTrajectoryObsDataset.isValidFile(ds))
   return new ARMTrajectoryObsDataset(ds);
  else
   return null;

 }
}
origin: Unidata/thredds

static public PointObsDataset open( String location, ucar.nc2.util.CancelTask task, StringBuffer log) throws java.io.IOException {
 // otherwise open as netcdf and have a look. use NetcdfDataset in order to deal with scale/enhance, etc.
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 NetcdfDataset ncfile = NetcdfDataset.acquireDataset( durl, true, task);
 // add record variable if there is one.
 ncfile.sendIospMessage(NetcdfFile.IOSP_MESSAGE_ADD_RECORD_STRUCTURE);
 if (UnidataStationObsDataset.isValidFile( ncfile))
  return new UnidataStationObsDataset( ncfile);
 if (UnidataPointObsDataset.isValidFile( ncfile))
  return new UnidataPointObsDataset( ncfile);
 /* if (DapperDataset.isValidFile( ncfile))
  return DapperDataset.factory( ncfile);
 if (SequenceObsDataset.isValidFile( ncfile))
  return new SequenceObsDataset( ncfile, task); */
 if (UnidataStationObsDataset2.isValidFile( ncfile))
  return new UnidataStationObsDataset2( ncfile);
 if (NdbcDataset.isValidFile( ncfile))
  return new NdbcDataset( ncfile);
 if (MadisStationObsDataset.isValidFile( ncfile))
  return new MadisStationObsDataset( ncfile);
 if (OldUnidataStationObsDataset.isValidFile(ncfile))
  return new OldUnidataStationObsDataset( ncfile);
 // put at end to minimize false positive
 if (OldUnidataPointObsDataset.isValidFile( ncfile))
   return new OldUnidataPointObsDataset( ncfile);
 if (null != log) log.append("Cant find a Point/Station adapter for ").append(location);
 ncfile.close();
 return null;
}
origin: Unidata/thredds

@Test
public void testScaling2() throws Exception {
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location+"fine.ncml");
 NetcdfFile ncfile = NetcdfDataset.acquireFile(durl, null);
 // make sure that scaling is applied
 VariableDS vs = (VariableDS) ncfile.findVariable("hs");
 Array data = vs.read("0,1,:,:)");
 while (data.hasNext()) {
  float val = data.nextFloat();
  if (!vs.isMissing(val))
   assert (val < 10.0) : val;
  //System.out.printf("%f %n",val);
 }
 ncfile.close();
}
origin: Unidata/thredds

@Test
public void testUpdateCache() throws IOException, InvalidRangeException, InterruptedException {
  // make sure that the extra file is not in the agg
 move(extraFile);
 DatasetUrl durl = DatasetUrl.findDatasetUrl(location);
 // open the agg
 NetcdfFile ncfile = NetcdfDataset.acquireDataset(new NcmlStringFileFactory(), durl, null, -1, null, null);
 check(ncfile, 12);
 // now make sure that the extra file is in the agg
 moveBack(extraFile);
 // reread
 ncfile.syncExtend();
 check(ncfile, 18);
 ncfile.close();
}
origin: Unidata/thredds

@Test
public void TestCached() throws IOException {
 try {
  NetcdfDataset.initNetcdfFileCache(10, 20, -1);
 String filename = TestDir.cdmUnitTestDir + "ncml/nestedAgg/test.ncml";
 try (NetcdfDataset ncd = NetcdfDataset.acquireDataset(DatasetUrl.findDatasetUrl(filename), true , null)) {
  Variable time = ncd.findVariable("time");
  assert time != null;
  assert time.getSize() == 19723 : time.getSize();
  //System.out.printf(" time array = %s%n", NCdumpW.toString(time.read()));
 }
  FileCacheIF cache = NetcdfDataset.getNetcdfFileCache();
  cache.showCache();
 } finally {
  NetcdfDataset.shutdown();
 }
}
origin: Unidata/thredds

@Test
public void shouldGetSameVerticalProfile() throws IOException, InvalidRangeException{
  System.out.printf("Open %s%n", sameUnitsFile);
  DatasetUrl durl = DatasetUrl.findDatasetUrl(sameUnitsFile);
  NetcdfDataset dsGood = NetcdfDataset.acquireDataset(durl, true, null);
  GridDataset gdsGood = new GridDataset(dsGood);
  GeoGrid gridGood =  gdsGood.findGridByName(var);        
  ProjectionImpl proj = gridGood.getProjection();
  ProjectionPoint pp = proj.latLonToProj(point);
      
  double[] dataGood = getVertTransformationForPoint(pp, 0, gridGood);
  
  NetcdfDataset dsDiff = NetcdfDataset.acquireDataset(durl, true, null);
  GridDataset gdsDiff = new GridDataset(dsDiff);
  GeoGrid gridDiff =  gdsDiff.findGridByName(var);        
  proj = gridDiff.getProjection();
  pp = proj.latLonToProj(point);
  
  double[] dataDiff = getVertTransformationForPoint(pp, 0, gridDiff);
  
  assertArrayEquals(dataGood, dataDiff, 0.00001);
  
}

ucar.nc2.datasetDatasetUrlfindDatasetUrl

Popular methods of DatasetUrl

  • <init>
  • getProtocols
    Return the set of leading protocols for a url; may be more than one. Watch out for Windows paths sta
  • checkIfCdmr
  • checkIfDap4
  • checkIfDods
  • checkIfNcml
  • checkIfRemoteNcml
  • decodeLeadProtocol
  • decodePathExtension
    Check path extension; assumes no query or fragment
  • disambiguateHttp
    If the URL alone is not sufficient to disambiguate the location, then this method will attempt to do
  • hashCode
  • parseFragment
    Given the fragment part of a url, see if it parses as name=value pairs separated by '&' (same as que
  • hashCode,
  • parseFragment,
  • searchFragment,
  • searchPath,
  • validateprotocol

Popular in Java

  • Making http requests using okhttp
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • getResourceAsStream (ClassLoader)
  • getContentResolver (Context)
  • FileOutputStream (java.io)
    An output stream that writes bytes to a file. If the output file exists, it can be replaced or appen
  • PrintWriter (java.io)
    Wraps either an existing OutputStream or an existing Writerand provides convenience methods for prin
  • Collections (java.util)
    This class consists exclusively of static methods that operate on or return collections. It contains
  • Handler (java.util.logging)
    A Handler object accepts a logging request and exports the desired messages to a target, for example
  • ImageIO (javax.imageio)
  • Scheduler (org.quartz)
    This is the main interface of a Quartz Scheduler. A Scheduler maintains a registry of org.quartz.Job
  • Top plugins for WebStorm
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now