Tabnine Logo
ChainedDynamicProperty$IntProperty.get
Code IndexAdd Tabnine to your IDE (free)

How to use
get
method
in
com.netflix.config.ChainedDynamicProperty$IntProperty

Best Java code snippets using com.netflix.config.ChainedDynamicProperty$IntProperty.get (Showing top 20 results out of 315)

origin: Netflix/EVCache

/**
 * Returns the number of discrete hashes that should be defined for each
 * node in the continuum.
 *
 * @return NUM_REPS repetitions.
 */
public int getNodeRepetitions() {
  return bucketSize.get().intValue();
}
origin: Netflix/EVCache

if(log.isDebugEnabled()) log.debug("Keys - " + keys);
final Map<String, CachedData> dataMap = evcacheMemcachedClient.asyncGetBulk(keys, chunkingTranscoder, null, "GetAllChunksOperation")
    .getSome(readTimeout.get().intValue(), TimeUnit.MILLISECONDS, false, false);
origin: Netflix/EVCache

private <T> ChunkDetails<T> getChunkDetails(String key) {
  final List<String> firstKeys = new ArrayList<String>(2);
  firstKeys.add(key);
  final String firstKey = key + "_00";
  firstKeys.add(firstKey);
  try {
    final Map<String, CachedData> metadataMap = evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null, "GetChunkMetadataOperation")
        .getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false);
    if (metadataMap.containsKey(key)) {
      return new ChunkDetails(null, null, false, metadataMap.get(key));
    } else if (metadataMap.containsKey(firstKey)) {
      final ChunkInfo ci = getChunkInfo(firstKey, (String) decodingTranscoder.decode(metadataMap.get(
          firstKey)));
      if (ci == null) return null;
      final List<String> keys = new ArrayList<>();
      for (int i = 1; i < ci.getChunks(); i++) {
        final String prefix = (i < 10) ? "0" : "";
        keys.add(ci.getKey() + "_" + prefix + i);
      }
      return new ChunkDetails(keys, ci, true, null);
    } else {
      return null;
    }
  } catch (Exception e) {
    log.error(e.getMessage(), e);
  }
  return null;
}
origin: Netflix/EVCache

.getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.flatMap(metadataMap -> {
  if (metadataMap == null) return null;
    .getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
    .map(dataMap -> {
      for (Entry<ChunkInfo, Pair<List<String>, byte[]>> entry : responseMap.entrySet()) {
origin: Netflix/EVCache

public <T> EVCacheOperationFuture<T> asyncGet(final String key, final Transcoder<T> tc, EVCacheGetOperationListener<T> listener) {
  final CountDownLatch latch = new CountDownLatch(1);
  final EVCacheOperationFuture<T> rv = new EVCacheOperationFuture<T>(key, latch, new AtomicReference<T>(null), readTimeout.get().intValue(), executorService, appName, serverGroup);
  final Stopwatch operationDuration = getTimer(GET_OPERATION_STRING).start();
  Operation op = opFact.get(key, new GetOperation.Callback() {
origin: Netflix/EVCache

.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
.map(dataMap -> {
  if (dataMap.size() != ci.getChunks() - 1) {
origin: Netflix/EVCache

.getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false);
origin: Netflix/EVCache

} else if(_cacheDuration.get().intValue() > 0) {
  builder = builder.expireAfterWrite(_cacheDuration.get(), TimeUnit.MILLISECONDS);
origin: Netflix/EVCache

final String hKey = getHashedKey(key);
if(ignoreTouch.get()) {
  final Single<Object> value = _client.asyncGet(hKey, evcacheValueTranscoder, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
  return value.flatMap(r -> {
    final CASValue<Object> rObj = (CASValue<Object>)r;
  final Single<CASValue<Object>> value = _client.asyncGetAndTouch(hKey, timeToLive, evcacheValueTranscoder).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
  if(value != null ) {
    return value.flatMap(r -> {
  .get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler)
  .map(value -> (value == null) ? null : value.getValue());
origin: Netflix/EVCache

private CachedData[] createChunks(CachedData cd, String key) {
  final int cSize = chunkSize.get();
  if ((key.length() + 3) > cSize) throw new IllegalArgumentException("The chunksize " + cSize
      + " is smaller than the key size. Will not be able to proceed. key size = "
origin: Netflix/EVCache

  hashKeys.add(hKey);
final Single<Map<String, Object>> vals = evcacheMemcachedClient.asyncGetBulk(hashKeys, evcacheValueTranscoder, null, "BulkOperation").getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
if(vals != null ) {
  return vals.flatMap(r -> {
  .getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
origin: Netflix/EVCache

  hashKeys.add(hKey);
final Map<String, Object> vals = evcacheMemcachedClient.asyncGetBulk(hashKeys, evcacheValueTranscoder, null, "BulkOperation").getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
if(vals != null && !vals.isEmpty()) {
  returnVal = new HashMap<String, T>(vals.size());
    .getSome(bulkReadTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
origin: Netflix/EVCache

final Object obj;
if(ignoreTouch.get()) {
  obj = _client.asyncGet(hKey, evcacheValueTranscoder, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
} else {
  final CASValue<Object> value = _client.asyncGetAndTouch(key, timeToLive, evcacheValueTranscoder).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
  obj = (value == null) ? null : value.getValue();
  returnVal = _client.asyncGet(key, tc, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
} else {
  final CASValue<T> value = _client.asyncGetAndTouch(key, timeToLive, tc).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
  returnVal = (value == null) ? null : value.getValue();
origin: Netflix/EVCache

if (dataSize > chunkSize.get()) {
  final CachedData[] cd = createChunks(value, key);
  final int len = cd.length;
origin: Netflix/EVCache

if (enableChunking.get() && dataSize > chunkSize.get()) {
  final CachedData[] cd = createChunks(value, key);
  final int len = cd.length;
origin: Netflix/EVCache

private boolean ensureWriteQueueSize(MemcachedNode node, String key) throws EVCacheException {
  if (node instanceof EVCacheNodeImpl) {
    final EVCacheNodeImpl evcNode = (EVCacheNodeImpl) node;
    int i = 0;
    while (true) {
      final int size = evcNode.getWriteQueueSize();
      final boolean canAddToOpQueue = size < maxWriteQueueSize;
      if (log.isDebugEnabled()) log.debug("App : " + appName + "; zone : " + zone + "; key : " + key
          + "; WriteQSize : " + size);
      if (canAddToOpQueue) break;
      EVCacheMetricsFactory.getCounter("EVCacheClient-" + appName + "-WRITE_BLOCK", evcNode.getBaseTags()).increment();
      try {
        Thread.sleep(writeBlock.get());
      } catch (InterruptedException e) {
        throw new EVCacheException("Thread was Interrupted", e);
      }
      if(i++ > 3) {
        EVCacheMetricsFactory.getCounter("EVCacheClient-" + appName + "-INACTIVE_NODE", evcNode.getBaseTags()).increment();
        if (log.isDebugEnabled()) log.debug("Node : " + evcNode + " for app : " + appName + "; zone : "
            + zone + " is not active. Will Fail Fast and the write will be dropped for key : " + key);
        evcNode.shutdown();
        return false;
      }
    }
  }
  return true;
}
origin: Netflix/EVCache

public <T> Single<T> get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF, boolean chunked, Scheduler scheduler)  throws Exception {
  if (chunked) {
    return assembleChunks(key, _throwException, 0, tc, hasZF, scheduler);
  }  else if(shouldHashKey()) {
    final String hKey = getHashedKey(key);
    final Object obj = evcacheMemcachedClient.asyncGet(hKey, evcacheValueTranscoder, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
    if(obj instanceof EVCacheValue) {
      final EVCacheValue val = (EVCacheValue)obj;
      if(val == null || !(val.getKey().equals(key))) {
        EVCacheMetricsFactory.increment(appName, null, serverGroup.getName(), appName + "-KEY_HASH_COLLISION");
        return null;
      }
      final CachedData cd = new CachedData(val.getFlags(), val.getValue(), CachedData.MAX_SIZE);
      if(tc == null) {
        return Single.just((T)evcacheMemcachedClient.getTranscoder().decode(cd));
      } else {
        return Single.just(tc.decode(cd));
      }
    } else {
      return null;
    }
  } else {
    return evcacheMemcachedClient.asyncGet(key, tc, null)
      .get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF, scheduler);
  }
}
origin: Netflix/EVCache

private <T> Single<ChunkDetails<T>> getChunkDetails(String key, Scheduler scheduler) {
  final List<String> firstKeys = new ArrayList<>(2);
  firstKeys.add(key);
  final String firstKey = key + "_00";
  firstKeys.add(firstKey);
  return evcacheMemcachedClient.asyncGetBulk(firstKeys, chunkingTranscoder, null, "GetChunkMetadataOperation")
    .getSome(readTimeout.get(), TimeUnit.MILLISECONDS, false, false, scheduler)
    .map(metadataMap -> {
      if (metadataMap.containsKey(key)) {
        return new ChunkDetails(null, null, false, metadataMap.get(key));
      } else if (metadataMap.containsKey(firstKey)) {
        final ChunkInfo ci = getChunkInfo(firstKey, (String) decodingTranscoder.decode(metadataMap.get(
          firstKey)));
        if (ci == null) return null;
        final List<String> keys = new ArrayList<>();
        for (int i = 1; i < ci.getChunks(); i++) {
          final String prefix = (i < 10) ? "0" : "";
          keys.add(ci.getKey() + "_" + prefix + i);
        }
        return new ChunkDetails(keys, ci, true, null);
      } else {
        return null;
      }
    });
}
origin: Netflix/EVCache

public <T> T get(String key, Transcoder<T> tc, boolean _throwException, boolean hasZF, boolean chunked) throws Exception {
  if (chunked) {
    return assembleChunks(key, false, 0, tc, hasZF);
  } else if(shouldHashKey()) {
    final String hKey = getHashedKey(key);
    final Object obj = evcacheMemcachedClient.asyncGet(hKey, evcacheValueTranscoder, null).get(readTimeout.get(), TimeUnit.MILLISECONDS, _throwException, hasZF);
    if(obj instanceof EVCacheValue) {
      final EVCacheValue val = (EVCacheValue)obj;
      if(val == null || !(val.getKey().equals(key))) {
        EVCacheMetricsFactory.increment(appName, null, serverGroup.getName(), appName + "-KEY_HASH_COLLISION");
        return null;
      }
      final CachedData cd = new CachedData(val.getFlags(), val.getValue(), CachedData.MAX_SIZE);
      if(tc == null) {
        return (T)evcacheMemcachedClient.getTranscoder().decode(cd);
      } else {
        return tc.decode(cd);
      }
    } else {
      return null;
    }
  } else {
    return evcacheMemcachedClient.asyncGet(key, tc, null).get(readTimeout.get(),
        TimeUnit.MILLISECONDS, _throwException, hasZF);
  }
}
origin: Netflix/EVCache

private void updateQueueStats() {
  for (ServerGroup serverGroup : memcachedInstancesByServerGroup.keySet()) {
    List<EVCacheClient> clients = memcachedInstancesByServerGroup.get(serverGroup);
    for(EVCacheClient client : clients) {
      final int wSize = client.getWriteQueueLength();
      EVCacheMetricsFactory.getLongGauge("EVCacheClientPool-WriteQueueSize", client.getTagList()).set(Long.valueOf(wSize));
      final int rSize = client.getReadQueueLength();
      EVCacheMetricsFactory.getLongGauge("EVCacheClientPool-ReadQueueSize", client.getTagList()).set(Long.valueOf(rSize));
      if(refreshConnectionOnReadQueueFull.get()) {
        final Collection<MemcachedNode> allNodes = client.getNodeLocator().getAll();
        for (MemcachedNode node : allNodes) {
          if (node instanceof EVCacheNodeImpl) {
            final EVCacheNodeImpl evcNode = ((EVCacheNodeImpl) node);
            if(evcNode.getReadQueueSize() >= refreshConnectionOnReadQueueFullSize.get().intValue()) {
              EVCacheMetricsFactory.getCounter("EVCacheClientPool-REFRESH_ON_QUEUE_FULL", evcNode.getBaseTags()).increment();
              client.getEVCacheMemcachedClient().reconnectNode(evcNode);
            }
          }
        }
      }
    }
  }
}
com.netflix.configChainedDynamicProperty$IntPropertyget

Popular methods of ChainedDynamicProperty$IntProperty

  • <init>
  • checkAndFlip
  • getName
  • getValue
  • addCallback

Popular in Java

  • Making http requests using okhttp
  • requestLocationUpdates (LocationManager)
  • getExternalFilesDir (Context)
  • setRequestProperty (URLConnection)
  • ConnectException (java.net)
    A ConnectException is thrown if a connection cannot be established to a remote host on a specific po
  • Comparator (java.util)
    A Comparator is used to compare two objects to determine their ordering with respect to each other.
  • Scanner (java.util)
    A parser that parses a text string of primitive types and strings with the help of regular expressio
  • Servlet (javax.servlet)
    Defines methods that all servlets must implement. A servlet is a small Java program that runs within
  • JOptionPane (javax.swing)
  • Options (org.apache.commons.cli)
    Main entry-point into the library. Options represents a collection of Option objects, which describ
  • Top plugins for Android Studio
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now