protected Cache<K,V> createCache(final ControllerServiceInitializationContext context) throws IOException, InterruptedException { final int capacity = context.getPropertyValue(CACHE_SIZE).asInteger(); return new LRUCache<K,V>(capacity); } }
protected Cache<String, Record> createCache(final ControllerServiceInitializationContext context) throws IOException, InterruptedException { final int capacity = context.getPropertyValue(CACHE_SIZE).asInteger(); return new LRUCache<String, Record>(capacity); }
@Override protected void setupDynamicProperties(ProcessContext context) { for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) { if (!entry.getKey().isDynamic()) { continue; } String key = entry.getKey().getName(); String value = entry.getValue() .replaceAll("cache\\((\\S*\\))", "cache.get(\"test\", new com.hurence.logisland.record.StandardRecord().setId($1)") .replaceAll("\\.value", ".getField(com.hurence.logisland.record.FieldDictionary.RECORD_VALUE).asDouble()"); StringBuilder sb = new StringBuilder(); sb.append("var match=false;\n") .append("try {\n") .append("if( ") .append(value) .append(" ) { match=true; }\n") .append("} catch(error) {}"); dynamicTagValuesMap.put(entry.getKey().getName(), sb.toString()); // System.out.println(sb.toString()); // logger.debug(sb.toString()); } defaultCollection = context.getPropertyValue(DATASTORE_CACHE_COLLECTION).asString(); recordTTL = context.getPropertyValue(RECORD_TTL).asInteger(); }
protected void createChronixStorage(ControllerServiceInitializationContext context) throws ProcessException { if (updater != null) { return; } // setup a thread pool of solr updaters int batchSize = context.getPropertyValue(BATCH_SIZE).asInteger(); long flushInterval = context.getPropertyValue(FLUSH_INTERVAL).asLong(); updater = new ChronixUpdater(solr, queue, createMetricsTypeMapping(context), batchSize, flushInterval); executorService.execute(updater); }
@Override public Collection<Record> process(ProcessContext context, Collection<Record> records) { SamplingAlgorithm algorithm = SamplingAlgorithm.valueOf( context.getPropertyValue(SAMPLING_ALGORITHM).asString().toUpperCase()); String valueFieldName = context.getPropertyValue(RECORD_VALUE_FIELD).asString(); String timeFieldName = context.getPropertyValue(RECORD_TIME_FIELD).asString(); int parameter = context.getPropertyValue(SAMPLING_PARAMETER).asInteger(); Sampler sampler = SamplerFactory.getSampler(algorithm, valueFieldName, timeFieldName, parameter); return sampler.sample(new ArrayList<>(records)).stream() .map(r -> { return r.setField("is_sampled", FieldType.BOOLEAN, true); }).collect(Collectors.toList()); }
final AvroSerializer avroSerializer = new AvroSerializer(schema); final int minEventsCount = context.getPropertyValue(MIN_EVENTS_COUNT).asInteger(); final int maxEventsCount = context.getPropertyValue(MAX_EVENTS_COUNT).asInteger(); final int eventsCount = randomData.nextInt(minEventsCount, maxEventsCount); logger.debug("generating {} events in [{},{}]", eventsCount, minEventsCount, maxEventsCount);
@Override public Collection<Record> process(final ProcessContext context, final Collection<Record> records) throws ProcessException { final int batchSize = context.getPropertyValue(BATCH_SIZE).asInteger();
final String connectionString = context.getPropertyValue(SOLR_CONNECTION_STRING).asString(); final String collection = context.getPropertyValue(SOLR_COLLECTION).asString(); setSchemaUpdateTimeout(context.getPropertyValue(SCHEMA_UPDATE_TIMEOUT).asInteger()); int batchSize = context.getPropertyValue(BATCH_SIZE).asInteger(); int numConcurrentRequests = context.getPropertyValue(CONCURRENT_REQUESTS).asInteger(); long flushInterval = context.getPropertyValue(FLUSH_INTERVAL).asLong(); updaters = new ArrayList<>(numConcurrentRequests);
userAgentKeep = context.getPropertyValue(USERAGENT_KEEP).asBoolean(); useCache = context.getPropertyValue(CACHE_ENABLED).asBoolean(); cacheSize = context.getPropertyValue(CACHE_SIZE).asInteger(); String tmp = context.getPropertyValue(FIELDS_TO_RETURN).asString(); selectedFields = Arrays.asList(tmp.split(",")).stream().map(String::trim).collect(Collectors.toList());
@Override public void init(final ProcessContext context) { this.fieldsNameMapping = getFieldsNameMapping(context); this.nbSplitLimit = context.getPropertyValue(NB_SPLIT_LIMIT).asInteger(); this.isEnabledSplitCounter = context.getPropertyValue(ENABLE_SPLIT_COUNTER).asBoolean(); this.splitCounterSuffix = context.getPropertyValue(SPLIT_COUNTER_SUFFIX).asString(); }
.setBulkActions(context.getPropertyValue(BATCH_SIZE).asInteger()) .setBulkSize(new ByteSizeValue(context.getPropertyValue(BULK_SIZE).asInteger(), ByteSizeUnit.MB)) .setFlushInterval(TimeValue.timeValueSeconds(context.getPropertyValue(FLUSH_INTERVAL).asInteger())) .setConcurrentRequests(context.getPropertyValue(CONCURRENT_REQUESTS).asInteger()) .setBackoffPolicy(getBackOffPolicy(context)) .build();
public static JedisConnectionFactory createConnectionFactory(final ControllerServiceInitializationContext context) { final String redisMode = context.getPropertyValue(RedisUtils.REDIS_MODE).asString(); final String connectionString = context.getPropertyValue(RedisUtils.CONNECTION_STRING).asString(); final Integer dbIndex = context.getPropertyValue(RedisUtils.DATABASE).asInteger(); final String password = context.getPropertyValue(RedisUtils.PASSWORD).asString(); final Integer timeout = context.getPropertyValue(RedisUtils.COMMUNICATION_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).intValue(); final Integer maxRedirects = context.getPropertyValue(RedisUtils.CLUSTER_MAX_REDIRECTS).asInteger();
/** * set up BackoffPolicy */ private BackoffPolicy getBackOffPolicy(ControllerServiceInitializationContext context) { BackoffPolicy backoffPolicy = BackoffPolicy.exponentialBackoff(); if (context.getPropertyValue(BULK_BACK_OFF_POLICY).getRawValue().equals(DEFAULT_EXPONENTIAL_BACKOFF_POLICY.getValue())) { backoffPolicy = BackoffPolicy.exponentialBackoff(); } else if (context.getPropertyValue(BULK_BACK_OFF_POLICY).getRawValue().equals(EXPONENTIAL_BACKOFF_POLICY.getValue())) { backoffPolicy = BackoffPolicy.exponentialBackoff( TimeValue.timeValueMillis(context.getPropertyValue(BULK_THROTTLING_DELAY).asLong()), context.getPropertyValue(BULK_RETRY_NUMBER).asInteger() ); } else if (context.getPropertyValue(BULK_BACK_OFF_POLICY).getRawValue().equals(CONSTANT_BACKOFF_POLICY.getValue())) { backoffPolicy = BackoffPolicy.constantBackoff( TimeValue.timeValueMillis(context.getPropertyValue(BULK_THROTTLING_DELAY).asLong()), context.getPropertyValue(BULK_RETRY_NUMBER).asInteger() ); } else if (context.getPropertyValue(BULK_BACK_OFF_POLICY).getRawValue().equals(NO_BACKOFF_POLICY.getValue())) { backoffPolicy = BackoffPolicy.noBackoff(); } return backoffPolicy; }
private static JedisPoolConfig createJedisPoolConfig(final ControllerServiceInitializationContext context) { final JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setMaxTotal(context.getPropertyValue(RedisUtils.POOL_MAX_TOTAL).asInteger()); poolConfig.setMaxIdle(context.getPropertyValue(RedisUtils.POOL_MAX_IDLE).asInteger()); poolConfig.setMinIdle(context.getPropertyValue(RedisUtils.POOL_MIN_IDLE).asInteger()); poolConfig.setBlockWhenExhausted(context.getPropertyValue(RedisUtils.POOL_BLOCK_WHEN_EXHAUSTED).asBoolean()); poolConfig.setMaxWaitMillis(context.getPropertyValue(RedisUtils.POOL_MAX_WAIT_TIME).asTimePeriod(TimeUnit.MILLISECONDS)); poolConfig.setMinEvictableIdleTimeMillis(context.getPropertyValue(RedisUtils.POOL_MIN_EVICTABLE_IDLE_TIME).asTimePeriod(TimeUnit.MILLISECONDS)); poolConfig.setTimeBetweenEvictionRunsMillis(context.getPropertyValue(RedisUtils.POOL_TIME_BETWEEN_EVICTION_RUNS).asTimePeriod(TimeUnit.MILLISECONDS)); poolConfig.setNumTestsPerEvictionRun(context.getPropertyValue(RedisUtils.POOL_NUM_TESTS_PER_EVICTION_RUN).asInteger()); poolConfig.setTestOnCreate(context.getPropertyValue(RedisUtils.POOL_TEST_ON_CREATE).asBoolean()); poolConfig.setTestOnBorrow(context.getPropertyValue(RedisUtils.POOL_TEST_ON_BORROW).asBoolean()); poolConfig.setTestOnReturn(context.getPropertyValue(RedisUtils.POOL_TEST_ON_RETURN).asBoolean()); poolConfig.setTestWhileIdle(context.getPropertyValue(RedisUtils.POOL_TEST_WHILE_IDLE).asBoolean()); return poolConfig; }
final Integer dbIndex = validationContext.getPropertyValue(RedisUtils.DATABASE).asInteger();
Long maxMemory = context.getPropertyValue(MAX_MEMORY).asLong(); Boolean allowNoBrace = context.getPropertyValue(ALLOw_NO_BRACE).asBoolean(); Integer maxPreparedStatements = context.getPropertyValue(MAX_PREPARED_STATEMENTS).asInteger();