@Override public Hasher putInt(int i) { for (Hasher hasher : hashers) { hasher.putInt(i); } return this; }
public StatisticsHasher putInt(int value) { hasher.putInt(value); return this; }
@Override public Hasher putInt(int i) { for (Hasher hasher : hashers) { hasher.putInt(i); } return this; }
@Override public Hasher putInt(int i) { for (Hasher hasher : hashers) { hasher.putInt(i); } return this; }
@Override public HashCode hashInt(int input) { return newHasher(4).putInt(input).hash(); }
public StatisticsHasher putOptionalInt(boolean present, int value) { hasher.putBoolean(present); hasher.putInt(present ? value : 0); return this; }
public void putLongs(long[] array) { hasher.putInt(array.length); for (long entry : array) { hasher.putLong(entry); } }
@Override public HashCode hashInt(int input) { return newHasher(4).putInt(input).hash(); }
public Example(Feature target, Feature... features) { Preconditions.checkArgument(features != null); this.features = features; this.target = target; Hasher hasher = HASH.newHasher(); for (Feature feature : features) { if (feature != null) { hasher.putInt(feature.hashCode()); } } if (target != null) { hasher.putInt(target.hashCode()); } cachedHashCode = hasher.hashCode(); }
public StatisticsHasher putOptionalBigDecimal(BigDecimal value) { hasher.putBoolean(value != null); if (value != null) { // this should really be 128 bits hasher.putInt(value.scale()); hasher.putBytes(value.unscaledValue().toByteArray()); } return this; }
@Override public HashCode hashInt(int input) { return newHasher(4).putInt(input).hash(); }
/** * Hash the scheduled arrival/departure times. Used in creating stable IDs for trips across GTFS feed versions. * Use hops rather than stops because: * a) arrival at stop zero and departure from last stop are irrelevant * b) this hash function needs to stay stable when users switch from 0.10.x to 1.0 */ public HashCode semanticHash(final HashFunction hashFunction) { final Hasher hasher = hashFunction.newHasher(); for (int hop = 0; hop < getNumStops() - 1; hop++) { hasher.putInt(getScheduledDepartureTime(hop)); hasher.putInt(getScheduledArrivalTime(hop + 1)); } return hasher.hash(); } }
private static void assertHashIntEquivalence(HashFunction hashFunction, Random random) { int i = random.nextInt(); assertEquals(hashFunction.hashInt(i), hashFunction.newHasher().putInt(i).hash()); }
public void putRowKeyToHLLOld(String[] row) { //generate hash for each row key column byte[][] rowHashCodes = new byte[nRowKey][]; for (int i = 0; i < nRowKey; i++) { Hasher hc = hf.newHasher(); String colValue = row[i]; if (colValue != null) { rowHashCodes[i] = hc.putString(colValue).hash().asBytes(); } else { rowHashCodes[i] = hc.putInt(0).hash().asBytes(); } } // use the row key column hash to get a consolidated hash for each cuboid for (int i = 0; i < cuboidIds.length; i++) { Hasher hc = hf.newHasher(); for (int position = 0; position < allCuboidsBitSet[i].length; position++) { hc.putBytes(rowHashCodes[allCuboidsBitSet[i][position]]); } allCuboidsHLL[i].add(hc.hash().asBytes()); } }
private void putRowKeyToHLLOld(String[] row) { //generate hash for each row key column byte[][] rowHashCodes = new byte[nRowKey][]; for (int i = 0; i < nRowKey; i++) { Hasher hc = hf.newHasher(); String colValue = row[rowkeyColIndex[i]]; if (colValue != null) { rowHashCodes[i] = hc.putString(colValue).hash().asBytes(); } else { rowHashCodes[i] = hc.putInt(0).hash().asBytes(); } } // user the row key column hash to get a consolidated hash for each cuboid for (int i = 0, n = cuboidsBitSet.length; i < n; i++) { Hasher hc = hf.newHasher(); for (int position = 0; position < cuboidsBitSet[i].length; position++) { hc.putBytes(rowHashCodes[cuboidsBitSet[i][position]]); } cuboidsHLL[i].add(hc.hash().asBytes()); } }
private void putRowKeyToHLLOld(String[] row) { //generate hash for each row key column byte[][] rowHashCodes = new byte[nRowKey][]; for (int i = 0; i < nRowKey; i++) { Hasher hc = hf.newHasher(); String colValue = row[rowkeyColIndex[i]]; if (colValue != null) { rowHashCodes[i] = hc.putString(colValue).hash().asBytes(); } else { rowHashCodes[i] = hc.putInt(0).hash().asBytes(); } } // user the row key column hash to get a consolidated hash for each cuboid for (int i = 0, n = cuboidsBitSet.length; i < n; i++) { Hasher hc = hf.newHasher(); for (int position = 0; position < cuboidsBitSet[i].length; position++) { hc.putBytes(rowHashCodes[cuboidsBitSet[i][position]]); } cuboidsHLL[i].add(hc.hash().asBytes()); } }
public void testPutAfterHash() { Hasher hasher = Hashing.hmacMd5(MD5_KEY).newHasher(); assertEquals( "9753980fe94daa8ecaa82216519393a9", hasher.putString("The quick brown fox jumps over the lazy dog", UTF_8).hash().toString()); try { hasher.putInt(42); fail(); } catch (IllegalStateException expected) { } }
public void testPutAfterHash() { Hasher sha1 = Hashing.sha1().newHasher(); assertEquals( "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12", sha1.putString("The quick brown fox jumps over the lazy dog", Charsets.UTF_8) .hash() .toString()); try { sha1.putInt(42); fail(); } catch (IllegalStateException expected) { } }
hasher.putInt(intToPut);
/** * Hash our data into a consistent long */ protected Hasher doHash( final ApplicationScope scope, final DirectedEdgeMeta directedEdgeMeta, final ShardEntryGroup shardEntryGroup ) { final Hasher hasher = MURMUR_128.newHasher(); addToHash( hasher, scope.getApplication() ); for ( DirectedEdgeMeta.NodeMeta nodeMeta : directedEdgeMeta.getNodes() ) { addToHash( hasher, nodeMeta.getId() ); hasher.putInt( nodeMeta.getNodeType().getStorageValue() ); } /** * Add our edge type */ for ( String type : directedEdgeMeta.getTypes() ) { hasher.putString( type, CHARSET ); } return hasher; }