long filePos, long maxPos) { ByteBuffer buff; int maxLength = DataUtils.getPageMaxLength(pos); if (maxLength == DataUtils.PAGE_LARGE) { buff = fileStore.readFully(filePos, 128); int length = maxLength; if (length < 0) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "Illegal page length {0} reading at {1}; max pos {2} ", int chunkId = DataUtils.getPageChunkId(pos); int offset = DataUtils.getPageOffset(pos); int start = buff.position(); int pageLength = buff.getInt(); if (pageLength > maxLength) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected page length =< {1}, got {2}", int m = DataUtils.readVarInt(buff); if (m != mapId) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected map id {1}, got {2}", chunkId, mapId, m); int checkTest = DataUtils.getCheckValue(chunkId)
byte[] getFooterBytes() { StringBuilder buff = new StringBuilder(FOOTER_LENGTH); DataUtils.appendMap(buff, "chunk", id); DataUtils.appendMap(buff, "block", block); DataUtils.appendMap(buff, "version", version); byte[] bytes = buff.toString().getBytes(StandardCharsets.ISO_8859_1); int checksum = DataUtils.getFletcher32(bytes, 0, bytes.length); DataUtils.appendMap(buff, "fletcher", checksum); while (buff.length() < FOOTER_LENGTH - 1) { buff.append(' '); } buff.append('\n'); return buff.toString().getBytes(StandardCharsets.ISO_8859_1); }
private Set<Integer> collectReferencedChunks() { long testVersion = lastChunk.version; DataUtils.checkArgument(testVersion > 0, "Collect references on version 0"); long readCount = getFileStore().readCount.get(); Set<Integer> referenced = new HashSet<>(); for (Cursor<String, String> c = meta.cursor("root."); c.hasNext();) { String key = c.next(); if (!key.startsWith("root.")) { break; } long pos = DataUtils.parseHexLong(c.getValue()); if (pos == 0) { continue; } int mapId = DataUtils.parseHexInt(key.substring("root.".length())); collectReferencedChunks(referenced, mapId, pos, 0); } long pos = lastChunk.metaRootPos; collectReferencedChunks(referenced, 0, pos, 0); readCount = fileStore.readCount.get() - readCount; return referenced; }
@Override public Object read(ByteBuffer buff, int tag) { int len; if (tag == TYPE_STRING) { len = DataUtils.readVarInt(buff); } else { len = tag - TAG_STRING_0_15; } return DataUtils.readString(buff, len); }
/** * Create a new IllegalArgumentException. * * @param message the message * @param arguments the arguments * @return the exception */ public static IllegalArgumentException newIllegalArgumentException( String message, Object... arguments) { return initCause(new IllegalArgumentException( formatMessage(0, message, arguments)), arguments); }
byte[] comp = new byte[expLen * 2]; int compLen = compressor.compress(exp, expLen, comp, 0); int plus = DataUtils.getVarIntLen(compLen - expLen); if (compLen + plus < expLen) { buff.position(typePos). int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength); buff.putInt(start, pageLength). putShort(start + 4, (short) check); if (pos != 0) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_INTERNAL, "Page already stored"); pos = DataUtils.getPagePos(chunkId, start, pageLength, type); store.cachePage(pos, this, getMemory()); if (type == DataUtils.PAGE_TYPE_NODE) { long max = DataUtils.getPageMaxLength(pos); chunk.maxLen += max; chunk.maxLenLive += max;
int pageLength = buff.getInt(); if (pageLength > maxLength || pageLength < 4) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected page length 4..{1}, got {2}", int mapId = DataUtils.readVarInt(buff); if (mapId != map.getId()) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected map id {1}, got {2}", chunkId, map.getId(), mapId); int checkTest = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(offset) ^ DataUtils.getCheckValue(pageLength); if (check != (short) checkTest) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_FILE_CORRUPT, "File corrupted in chunk {0}, expected check value {1}, got {2}", chunkId, checkTest, check); int len = DataUtils.readVarInt(buff); keys = new Object[len]; int type = buff.get(); long s = DataUtils.readVarLong(buff); total += s; children[i] = new PageReference(null, p[i], s);
lastBlock.get(buff); String s = new String(buff, DataUtils.LATIN).trim(); HashMap<String, String> m = DataUtils.parseMap(s); int check = DataUtils.readHexInt(m, "fletcher", 0); m.remove("fletcher"); s = s.substring(0, s.lastIndexOf("fletcher") - 1); byte[] bytes = s.getBytes(DataUtils.LATIN); int checksum = DataUtils.getFletcher32(bytes, bytes.length); if (check == checksum) { int chunk = DataUtils.readHexInt(m, "chunk", 0); Chunk c = new Chunk(chunk); c.version = DataUtils.readHexLong(m, "version", 0); c.block = DataUtils.readHexLong(m, "block", 0); return c;
case 0: int len = DataUtils.readVarInt(idBuffer); idBuffer.position(idBuffer.position() + len); length += len; case 1: length += DataUtils.readVarInt(idBuffer); DataUtils.readVarLong(idBuffer); break; case 2: length += DataUtils.readVarLong(idBuffer); DataUtils.readVarLong(idBuffer); break; default: throw DataUtils.newIllegalArgumentException( "Unsupported id {0}", Arrays.toString(id));
@Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_BIG_DECIMAL_0: return BigDecimal.ZERO; case TAG_BIG_DECIMAL_1: return BigDecimal.ONE; case TAG_BIG_DECIMAL_SMALL: return BigDecimal.valueOf(DataUtils.readVarLong(buff)); case TAG_BIG_DECIMAL_SMALL_SCALED: int scale = DataUtils.readVarInt(buff); return BigDecimal.valueOf(DataUtils.readVarLong(buff), scale); } int scale = DataUtils.readVarInt(buff); int len = DataUtils.readVarInt(buff); byte[] bytes = Utils.newBytes(len); buff.get(bytes); BigInteger b = new BigInteger(bytes); return new BigDecimal(b, scale); }
i = s.indexOf(':', i); if (i < 0) { throw newIllegalStateException(ERROR_FILE_CORRUPT, "Not a map: {0}", s); parseMapValue(buff, s, i + 1, size); int check = (int) Long.parseLong(buff.toString(), 16); if (check == getFletcher32(bytes, start, startKey - 1)) { return map; i = parseMapValue(buff, s, i, size); map.put(key, buff.toString()); buff.setLength(0);
/** * Convert the illegal state exception to a database exception. * * @param e the illegal state exception * @return the database exception */ DbException convertException(IllegalStateException e) { if (DataUtils.getErrorCode(e.getMessage()) == DataUtils.ERROR_TRANSACTION_LOCKED) { throw DbException.get(ErrorCode.CONCURRENT_UPDATE_1, e, getName()); } return store.convertIllegalStateException(e); }
public SpatialDataType(int dimensions) { // Because of how we are storing the // min-max-flag in the read/write method // the number of dimensions must be < 32. DataUtils.checkArgument( dimensions >= 1 && dimensions < 32, "Dimensions must be between 1 and 31, is {0}", dimensions); this.dimensions = dimensions; }
private void removeChild(int index) { if (index == 0 && children.length == 1) { children = EMPTY_ARRAY; return; } long[] c2 = new long[children.length - 1]; DataUtils.copyExcept(children, c2, children.length, index); children = c2; }
/** * Insert a key-value pair into this leaf. * * @param index the index * @param key the key * @param value the value */ public void insertLeaf(int index, Object key, Object value) { int len = keys.length + 1; Object[] newKeys = new Object[len]; DataUtils.copyWithGap(keys, newKeys, len - 1, index); keys = newKeys; Object[] newValues = new Object[len]; DataUtils.copyWithGap(values, newValues, len - 1, index); values = newValues; keys[index] = key; values[index] = value; totalCount++; if(isPersistent()) { addMemory(map.getKeyType().getMemory(key) + map.getValueType().getMemory(value)); } }
private static int readVarInt(ByteBuffer buff) { return DataUtils.readVarInt(buff); }
/** * Create a new UnsupportedOperationException. * * @param message the message * @return the exception */ public static UnsupportedOperationException newUnsupportedOperationException(String message) { return new UnsupportedOperationException(formatMessage(0, message)); }
/** * Get the chunk data as a string. * * @return the string */ public String asString() { StringBuilder buff = new StringBuilder(240); DataUtils.appendMap(buff, "chunk", id); DataUtils.appendMap(buff, "block", block); DataUtils.appendMap(buff, "len", len); if (maxLen != maxLenLive) { DataUtils.appendMap(buff, "liveMax", maxLenLive); } if (pageCount != pageCountLive) { DataUtils.appendMap(buff, "livePages", pageCountLive); } DataUtils.appendMap(buff, "map", mapId); DataUtils.appendMap(buff, "max", maxLen); if (next != 0) { DataUtils.appendMap(buff, "next", next); } DataUtils.appendMap(buff, "pages", pageCount); DataUtils.appendMap(buff, "root", metaRootPos); DataUtils.appendMap(buff, "time", time); if (unused != 0) { DataUtils.appendMap(buff, "unused", unused); } DataUtils.appendMap(buff, "version", version); return buff.toString(); }
/** * Get the position of this page. The following information is encoded in * the position: the chunk id, the offset, the maximum length, and the type * (node or leaf). * * @param chunkId the chunk id * @param offset the offset * @param length the length * @param type the page type (1 for node, 0 for leaf) * @return the position */ public static long getPagePos(int chunkId, int offset, int length, int type) { long pos = (long) chunkId << 38; pos |= (long) offset << 6; pos |= encodeLength(length) << 1; pos |= type; return pos; }
byte[] comp = new byte[expLen * 2]; int compLen = compressor.compress(exp, expLen, comp, 0); int plus = DataUtils.getVarIntLen(compLen - expLen); if (compLen + plus < expLen) { buff.position(typePos). int check = DataUtils.getCheckValue(chunkId) ^ DataUtils.getCheckValue(start) ^ DataUtils.getCheckValue(pageLength); buff.putInt(start, pageLength). putShort(start + 4, (short) check); if (pos != 0) { throw DataUtils.newIllegalStateException( DataUtils.ERROR_INTERNAL, "Page already stored"); pos = DataUtils.getPagePos(chunkId, start, pageLength, type); store.cachePage(pos, this, getMemory()); if (type == DataUtils.PAGE_TYPE_NODE) { long max = DataUtils.getPageMaxLength(pos); chunk.maxLen += max; chunk.maxLenLive += max;