reader = new ShapefileReader(shpFiles, true, false, new GeometryFactory()); int features = reader.getCount(0); max = 1; int nodes = 1; reader.close(); reader = new ShapefileReader(shpFiles, true, false, new GeometryFactory()); if (reader != null) reader.close();
int cnt = 0; int numRecs = shpIndex.getRecordCount(); ShapefileHeader header = reader.getHeader(); Envelope bounds = new Envelope(header.minX(), header.maxX(), header.minY(), header.maxY()); Record rec = null; while (reader.hasNext()) { rec = reader.nextRecord(); tree.insert(cnt++, new Envelope(rec.minX, rec.maxX, rec.minY, rec.maxY));
positionBufferForOffset(buffer, getNextOffset()); if (currentShape != UNKNOWN) currentShape++; ByteBuffer old = buffer; buffer = ensureCapacity(buffer, recordLength + 8, useMemoryMappedBuffer); buffer.put(old); NIOUtilities.clean(old, useMemoryMappedBuffer); fill(buffer, channel); buffer.position(0); } else this.currentOffset += buffer.position(); buffer.compact(); fill(buffer, channel); buffer.position(0); record.number = recordNumber; record.end = this.toFileOffset(buffer.position()) + recordLength - 4;
/** * Sets the current location of the byteStream to offset and returns the next record. Usually * used in conjuctions with the shx file or some other index file. Mind that: * * <ul> * <li>it's your responsibility to ensure the offset corresponds to the actual beginning of a * shape struct * <li>once you call this, reading with hasNext/next on sparse shapefiles will be broken (we * don't know anymore at which shape we are) * </ul> * * @param offset If using an shx file the offset would be: 2 * (index.getOffset(i)) * @return The record after the offset location in the bytestream * @throws IOException thrown in a read error occurs * @throws UnsupportedOperationException thrown if not a random access file */ public Record recordAt(int offset) throws IOException, UnsupportedOperationException { if (randomAccessEnabled) { this.goTo(offset); return nextRecord(); } throw new UnsupportedOperationException("Random Access not enabled"); }
/** * Moves the reader to the specified byte offset in the file. Mind that: * * <ul> * <li>it's your responsibility to ensure the offset corresponds to the actual beginning of a * shape struct * <li>once you call this, reading with hasNext/next on sparse shapefiles will be broken (we * don't know anymore at which shape we are) * </ul> * * @param offset * @throws IOException * @throws UnsupportedOperationException */ public void goTo(int offset) throws IOException, UnsupportedOperationException { disableShxUsage(); if (randomAccessEnabled) { positionBufferForOffset(buffer, offset); int oldRecordOffset = record.end; record.end = offset; try { hasNext(false); // don't check for next logical record equality } catch (IOException ioe) { record.end = oldRecordOffset; throw ioe; } } else { throw new UnsupportedOperationException("Random Access not enabled"); } }
/** * Parses the shpfile counting the records. * * @return the number of non-null records in the shapefile */ public int getCount(int count) throws DataSourceException { try { if (channel == null) return -1; count = 0; long offset = this.currentOffset; try { goTo(100); } catch (UnsupportedOperationException e) { return -1; } while (hasNext()) { count++; nextRecord(); } goTo((int) offset); } catch (IOException ioe) { count = -1; // What now? This seems arbitrarily appropriate ! throw new DataSourceException("Problem reading shapefile record", ioe); } return count; }
public static void main(String[] args) throws Exception { FileChannel channel = new FileInputStream(args[0]).getChannel(); ShapefileReader reader = new ShapefileReader(channel, new Lock()); System.out.println(reader.getHeader()); while (reader.hasNext()) { System.out.println(reader.nextRecord().shape()); } reader.close(); }
return null; ShapefileReader reader=new ShapefileReader(getReadChannel(shpURL), this.readWriteLock); try{ ret=new Envelope(); for (Iterator iter = records.iterator(); iter.hasNext();) { Data data = (Data) iter.next(); reader.goTo(((Long)data.getValue(1)).intValue()); Record record = reader.nextRecord(); ret.expandToInclude(new Envelope(record.minX, record.maxX, record.minY, record.maxY)); reader.close();
/** * If there exists another record. Currently checks the stream for the presence of 8 more bytes, * the length of a record. If this is true and the record indicates the next logical record * number, there exists more records. * * @throws IOException * @return True if has next record, false otherwise. */ public boolean hasNext() throws IOException { return this.hasNext(true); }
/** * Returns the shape at the specified byte distance from the beginning of the file. Mind that: * * <ul> * <li>it's your responsibility to ensure the offset corresponds to the actual beginning of a * shape struct * <li>once you call this, reading with hasNext/next on sparse shapefiles will be broken (we * don't know anymore at which shape we are) * </ul> * * @param offset * @throws IOException * @throws UnsupportedOperationException */ public Object shapeAt(int offset) throws IOException, UnsupportedOperationException { disableShxUsage(); if (randomAccessEnabled) { this.goTo(offset); return nextRecord().shape(); } throw new UnsupportedOperationException("Random Access not enabled"); }
@Override public void close() throws IOException { try { if (shp != null) { shp.close(); } } finally { try { if (dbf != null) { dbf.close(); } } finally { try { if (fidReader != null) { fidReader.close(); } } finally { shp = null; dbf = null; } } } }
buffer.position(this.toBufferOffset(record.end)); ByteBuffer old = buffer; buffer = ensureCapacity(buffer, recordLength + 8, useMemoryMappedBuffer); buffer.put(old); NIOUtilities.clean(old); fill(buffer, channel); buffer.position(0); } else this.currentOffset += buffer.position(); buffer.compact(); fill(buffer, channel); buffer.position(0); record.number = recordNumber; record.end = this.toFileOffset(buffer.position()) + recordLength - 4;
@Override protected int getCountInternal(Query query) throws IOException { if (query.getFilter() == Filter.INCLUDE) { IndexFile file = getDataStore().shpManager.openIndexFile(); if (file != null) { try { return file.getRecordCount(); } finally { file.close(); } } // no Index file so use the number of shapefile records ShapefileReader reader = getDataStore().shpManager.openShapeReader(new GeometryFactory(), false); int count = -1; try { count = reader.getCount(count); } catch (IOException e) { throw e; } finally { reader.close(); } return count; } return -1; }
shapeType = attReader.shp.getHeader().getShapeType(); handler = shapeType.getShapeHandler(gf); shapefileLength += attReader.shp.transferTo(shpWriter, ++records, env);
reader = new ShapefileReader(channel, true, false, lock); reader.close(); if( is!=null ) is.close();
public ShapefileReader.Record next() throws IOException { if (!hasNext()) throw new IndexOutOfBoundsException( "No more features in reader"); if (this.goodRecs != null) { Long l = (Long) next.getValue(1); ShapefileReader.Record record = shp.recordAt(l.intValue()); next = null; return record; } recno++; return shp.nextRecord(); }
private void init(boolean strict, GeometryFactory gf) throws IOException, ShapefileException { geometryFactory = gf; if (channel instanceof FileChannel && useMemoryMappedBuffer) { FileChannel fc = (FileChannel) channel; buffer = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); buffer.position(0); this.currentOffset = 0; } else { // force useMemoryMappedBuffer to false this.useMemoryMappedBuffer = false; // start small buffer = NIOUtilities.allocate(1024); fill(buffer, channel); buffer.flip(); this.currentOffset = 0; } header = new ShapefileHeader(); header.read(buffer, strict); fileShapeType = header.getShapeType(); handler = fileShapeType.getShapeHandler(gf); if (handler == null) { throw new IOException("Unsuported shape type:" + fileShapeType); } headerTransfer = ByteBuffer.allocate(8); headerTransfer.order(ByteOrder.BIG_ENDIAN); // make sure the record end is set now... record.end = this.toFileOffset(buffer.position()); }
Rectangle screenSize, MathTransform mt, boolean hasOpacity, boolean returnJTS ) throws IOException, TransformException { ShapefileReader reader = ds.openShapeReader(new GeometryFactory()); ShapeType type = reader.getHeader().getShapeType(); reader.setHandler(new org.geotools.renderer.shape.shapehandler.jts.MultiLineHandler(type, bbox, mt, hasOpacity, screenSize)); else reader.setHandler(new MultiLineHandler(type, bbox, mt, hasOpacity, screenSize)); || (type == ShapeType.POLYGONZ)) { if( returnJTS ) reader.setHandler(new org.geotools.renderer.shape.shapehandler.jts.PolygonHandler(type, bbox, mt, hasOpacity)); else reader.setHandler(new PolygonHandler(type, bbox, mt, hasOpacity)); reader.setHandler(new org.geotools.renderer.shape.shapehandler.jts.PointHandler(type, bbox, screenSize, mt, hasOpacity)); else reader.setHandler(new PointHandler(type, bbox, screenSize, mt, hasOpacity)); || (type == ShapeType.MULTIPOINTZ)) { if( returnJTS ) reader.setHandler(new org.geotools.renderer.shape.shapehandler.jts.MultiPointHandler(type, bbox, screenSize, mt, hasOpacity)); else reader.setHandler(new MultiPointHandler(type, bbox, screenSize, mt, hasOpacity));
attReader.shp.disableShxUsage(); if(attReader.hasNext()) { shapeType = attReader.shp.getHeader().getShapeType(); handler = shapeType.getShapeHandler(new GeometryFactory()); shpWriter.writeHeaders(bounds, shapeType, records, shapefileLength);