private byte[] readRawBlob(Fileformat.BlobHeader blobHeader) throws IOException { byte[] rawBlob = new byte[blobHeader.getDatasize()]; dis.readFully(rawBlob); return rawBlob; }
private void getNextBlob() { try { // Read the length of the next header block. This is the only time // we should expect to encounter an EOF exception. In all other // cases it indicates a corrupt or truncated file. int headerLength; try { headerLength = dis.readInt(); } catch (EOFException e) { eof = true; return; } if (log.isLoggable(Level.FINER)) { log.finer("Reading header for blob " + dataBlockCount++); } Fileformat.BlobHeader blobHeader = readHeader(headerLength); if (log.isLoggable(Level.FINER)) { log.finer("Processing blob of type " + blobHeader.getType() + "."); } byte[] blobData = readRawBlob(blobHeader); nextBlob = new PbfRawBlob(blobHeader.getType(), blobData); } catch (IOException e) { throw new RuntimeException("Unable to get next blob from PBF stream.", e); } }
private Fileformat.BlobHeader readHeader(int headerLength) throws IOException { byte[] headerBuffer = new byte[headerLength]; dis.readFully(headerBuffer); Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer); return blobHeader; }
switch (method) { case NEW_MUTABLE_INSTANCE: { return new org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader(); if (!hasType()) { return null; if (!hasDatasize()) { return null; org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader other = (org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader) arg1; type_ = visitor.visitString( hasType(), type_, other.hasType(), other.type_); indexdata_ = visitor.visitByteString( hasIndexdata(), indexdata_, other.hasIndexdata(), other.indexdata_); datasize_ = visitor.visitInt( hasDatasize(), datasize_, other.hasDatasize(), other.datasize_); if (visitor == com.google.protobuf.GeneratedMessageLite.MergeFromVisitor .INSTANCE) { if (!parseUnknownField(tag, input)) { done = true;
return null; Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(blobHeaderBytes); byte[] blobBytes = new byte[blobHeader.getDatasize()]; int readBlob = dataInputStream.read(blobBytes); if (readBlob != blobBytes.length) { if (blobHeader.getType().equals(OSM_HEADER)) { Osmformat.HeaderBlock headerBlock = Osmformat.HeaderBlock.parseFrom(blobData); if (headerBlock.hasBbox()) log.info("Skipped block " + blobHeader.getType() + " with " + blobBytes.length + " bytes");
Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.newBuilder() .setType(blobTypeString).setDatasize(serializedBlob.length).build(); byte[] serializedBlobHeader = blobHeader.toByteArray(); try {
public FileBlockPosition writeTo(OutputStream outwrite, CompressFlags flags) throws IOException { BlobHeader.Builder builder = Fileformat.BlobHeader .newBuilder(); if (indexdata != null) builder.setIndexdata(indexdata); int size = message.getSerializedSize(); message.writeTo(outwrite); long offset = -1;
/** * Read the header. After reading the header, either the contents must be * skipped or read */ static FileBlockHead readHead(InputStream input) throws IOException { DataInputStream datinput = new DataInputStream(input); int headersize = datinput.readInt(); // System.out.format("Header size %d %x\n",headersize,headersize); if (headersize > MAX_HEADER_SIZE) { throw new FileFormatException("Unexpectedly long header "+MAX_HEADER_SIZE+ " bytes. Possibly corrupt file."); } byte buf[] = new byte[headersize]; datinput.readFully(buf); // System.out.format("Read buffer for header of %d bytes\n",buf.length); Fileformat.BlobHeader header = Fileformat.BlobHeader .parseFrom(buf); FileBlockHead fileblock = new FileBlockHead(header.getType(), header .getIndexdata()); fileblock.datasize = header.getDatasize(); if (header.getDatasize() > MAX_BODY_SIZE) { throw new FileFormatException("Unexpectedly long body "+MAX_BODY_SIZE+ " bytes. Possibly corrupt file."); } fileblock.input = input; if (input instanceof FileInputStream) fileblock.data_offset = ((FileInputStream) input).getChannel() .position(); return fileblock; }
private void getNextBlob() { try { // Read the length of the next header block. This is the only time // we should expect to encounter an EOF exception. In all other // cases it indicates a corrupt or truncated file. int headerLength; try { headerLength = dis.readInt(); } catch (EOFException e) { eof = true; return; } if (log.isLoggable(Level.FINER)) { log.finer("Reading header for blob " + dataBlockCount++); } Fileformat.BlobHeader blobHeader = readHeader(headerLength); if (log.isLoggable(Level.FINER)) { log.finer("Processing blob of type " + blobHeader.getType() + "."); } byte[] blobData = readRawBlob(blobHeader); nextBlob = new PbfRawBlob(blobHeader.getType(), blobData); } catch (IOException e) { throw new RuntimeException("Unable to get next blob from PBF stream.", e); } }
private void getNextBlob() { try { // Read the length of the next header block. This is the only time // we should expect to encounter an EOF exception. In all other // cases it indicates a corrupt or truncated file. int headerLength; try { headerLength = dis.readInt(); } catch (EOFException e) { eof = true; return; } if (log.isLoggable(Level.FINER)) { log.finer("Reading header for blob " + dataBlockCount++); } Fileformat.BlobHeader blobHeader = readHeader(headerLength); if (log.isLoggable(Level.FINER)) { log.finer("Processing blob of type " + blobHeader.getType() + "."); } byte[] blobData = readRawBlob(blobHeader); nextBlob = new PbfRawBlob(blobHeader.getType(), blobData); } catch (IOException e) { throw new RuntimeException("Unable to get next blob from PBF stream.", e); } }
private void getNextBlob() { try { // Read the length of the next header block. This is the only time // we should expect to encounter an EOF exception. In all other // cases it indicates a corrupt or truncated file. int headerLength; try { headerLength = dis.readInt(); } catch (EOFException e) { eof = true; return; } if (log.isLoggable(Level.FINER)) { log.finer("Reading header for blob " + dataBlockCount++); } BlobHeader blobHeader = readHeader(headerLength); if (log.isLoggable(Level.FINER)) { log.finer("Processing blob of type " + blobHeader.getType() + "."); } byte[] blobData = readRawBlob(blobHeader); nextBlob = new RawBlob(blobHeader.getType(), blobData); } catch (IOException e) { throw new OsmosisRuntimeException("Unable to get next blob from PBF stream.", e); } }
@java.lang.Override public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeStringSize(1, getType()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, indexdata_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(3, datasize_); } size += unknownFields.getSerializedSize(); memoizedSerializedSize = size; return size; }
@java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeString(1, getType()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, indexdata_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeInt32(3, datasize_); } unknownFields.writeTo(output); }
public static Builder newBuilder(org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader prototype) { return (Builder) DEFAULT_INSTANCE.createBuilder(prototype); }
public static Builder newBuilder() { return (Builder) DEFAULT_INSTANCE.createBuilder(); } public static Builder newBuilder(org.openstreetmap.osmosis.osmbinary.Fileformat.BlobHeader prototype) {
private byte[] readRawBlob(Fileformat.BlobHeader blobHeader) throws IOException { byte[] rawBlob = new byte[blobHeader.getDatasize()]; dis.readFully(rawBlob); return rawBlob; }
private Fileformat.BlobHeader readHeader(int headerLength) throws IOException { byte[] headerBuffer = new byte[headerLength]; dis.readFully(headerBuffer); Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer); return blobHeader; }
private Fileformat.BlobHeader readHeader( int headerLength ) throws IOException { byte[] headerBuffer = new byte[headerLength]; dis.readFully(headerBuffer); Fileformat.BlobHeader blobHeader = Fileformat.BlobHeader.parseFrom(headerBuffer); return blobHeader; }
private byte[] readRawBlob(BlobHeader blobHeader) throws IOException { byte[] rawBlob = new byte[blobHeader.getDatasize()]; dis.readFully(rawBlob); return rawBlob; }