/** * Computes the memory limits for hash table flush (spill). */ private void computeMemoryLimits() { JavaDataModel model = JavaDataModel.get(); fixedHashEntrySize = model.hashMapEntry() + keyWrappersBatch.getKeysFixedSize() + aggregationBatchInfo.getAggregatorsFixedSize(); MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); maxMemory = memoryMXBean.getHeapMemoryUsage().getMax(); memoryThreshold = conf.getMemoryThreshold(); // Tests may leave this unitialized, so better set it to 1 if (memoryThreshold == 0.0f) { memoryThreshold = 1.0f; } maxHashTblMemory = (int)(maxMemory * memoryThreshold); if (LOG.isDebugEnabled()) { LOG.debug(String.format("maxMemory:%dMb (%d * %f) fixSize:%d (key:%d agg:%d)", maxHashTblMemory/1024/1024, maxMemory/1024/1024, memoryThreshold, fixedHashEntrySize, keyWrappersBatch.getKeysFixedSize(), aggregationBatchInfo.getAggregatorsFixedSize())); } }
private static void addHardcodedEstimators(HashMap<Class<?>, ObjectEstimator> byType) { // Add hacks for well-known collections and maps to avoid estimating them. byType.put(ArrayList.class, new CollectionEstimator(memoryModel.arrayList(), memoryModel.ref())); byType.put(LinkedList.class, new CollectionEstimator(memoryModel.linkedListBase(), memoryModel.linkedListEntry())); byType.put(HashSet.class, new CollectionEstimator(memoryModel.hashSetBase(), memoryModel.hashSetEntry())); byType.put(HashMap.class, new CollectionEstimator(memoryModel.hashMapBase(), memoryModel.hashMapEntry())); }
/** * Computes the memory limits for hash table flush (spill). */ private void computeMemoryLimits() { JavaDataModel model = JavaDataModel.get(); fixedHashEntrySize = model.hashMapEntry() + keyWrappersBatch.getKeysFixedSize() + aggregationBatchInfo.getAggregatorsFixedSize(); MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); maxMemory = isLlap ? getConf().getMaxMemoryAvailable() : memoryMXBean.getHeapMemoryUsage().getMax(); memoryThreshold = conf.getMemoryThreshold(); // Tests may leave this unitialized, so better set it to 1 if (memoryThreshold == 0.0f) { memoryThreshold = 1.0f; } maxHashTblMemory = (int)(maxMemory * memoryThreshold); if (LOG.isDebugEnabled()) { LOG.debug("GBY memory limits - isLlap: {} maxMemory: {} ({} * {}) fixSize:{} (key:{} agg:{})", isLlap, LlapUtil.humanReadableByteCount(maxHashTblMemory), LlapUtil.humanReadableByteCount(maxMemory), memoryThreshold, fixedHashEntrySize, keyWrappersBatch.getKeysFixedSize(), aggregationBatchInfo.getAggregatorsFixedSize()); } }
private static void addHardcodedEstimators(HashMap<Class<?>, ObjectEstimator> byType) { // Add hacks for well-known collections and maps to avoid estimating them. byType.put(ArrayList.class, new CollectionEstimator(memoryModel.arrayList(), memoryModel.ref())); byType.put(LinkedList.class, new CollectionEstimator(memoryModel.linkedListBase(), memoryModel.linkedListEntry())); byType.put(HashSet.class, new CollectionEstimator(memoryModel.hashSetBase(), memoryModel.hashSetEntry())); byType.put(HashMap.class, new CollectionEstimator(memoryModel.hashMapBase(), memoryModel.hashMapEntry())); }
/** * Computes the memory limits for hash table flush (spill). */ private void computeMemoryLimits() { JavaDataModel model = JavaDataModel.get(); fixedHashEntrySize = model.hashMapEntry() + keyWrappersBatch.getKeysFixedSize() + aggregationBatchInfo.getAggregatorsFixedSize(); MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean(); maxMemory = memoryMXBean.getHeapMemoryUsage().getMax(); memoryThreshold = conf.getMemoryThreshold(); // Tests may leave this unitialized, so better set it to 1 if (memoryThreshold == 0.0f) { memoryThreshold = 1.0f; } maxHashTblMemory = (int)(maxMemory * memoryThreshold); if (LOG.isDebugEnabled()) { LOG.debug(String.format("maxMemory:%dMb (%d * %f) fixSize:%d (key:%d agg:%d)", maxHashTblMemory/1024/1024, maxMemory/1024/1024, memoryThreshold, fixedHashEntrySize, keyWrappersBatch.getKeysFixedSize(), aggregationBatchInfo.getAggregatorsFixedSize())); } }