@Override public LoggingService getLoggingService() { return delegatedInstance.getLoggingService(); }
@Override public LoggingService getLoggingService() { return delegatedInstance.getLoggingService(); }
@Override public LoggingService getLoggingService() { return delegate.getLoggingService(); }
@Override public LoggingService getLoggingService() { return getHazelcastInstance().getLoggingService(); }
public JobRepository(JetInstance jetInstance) { this.instance = jetInstance.getHazelcastInstance(); this.logger = instance.getLoggingService().getLogger(getClass()); this.randomIds = instance.getMap(RANDOM_IDS_MAP_NAME); this.jobRecords = instance.getMap(JOB_RECORDS_MAP_NAME); this.jobExecutionRecords = instance.getMap(JOB_EXECUTION_RECORDS_MAP_NAME); this.jobResults = instance.getMap(JOB_RESULTS_MAP_NAME); this.exportedSnapshotDetailsCache = instance.getMap(EXPORTED_SNAPSHOTS_DETAIL_CACHE); }
@Override public final ILogger getLogger() { final String name = getClass().getName(); try { return instance.getLoggingService().getLogger(name); } catch (UnsupportedOperationException e) { // HazelcastInstance is instance of HazelcastClient. return Logger.getLogger(name); } } }
@Override public final ILogger getLogger() { final String name = getClass().getName(); try { return instance.getLoggingService().getLogger(name); } catch (UnsupportedOperationException e) { // HazelcastInstance is instance of HazelcastClient. return Logger.getLogger(name); } } }
public final ILogger getLogger() { final String name = getClass().getName(); try { return instance.getLoggingService().getLogger(name); } catch (UnsupportedOperationException e) { // HazelcastInstance is instance of HazelcastClient. return Logger.getLogger(name); } } }
@Override public final ILogger getLogger() { final String name = getClass().getName(); try { return instance.getLoggingService().getLogger(name); } catch (UnsupportedOperationException e) { // HazelcastInstance is instance of HazelcastClient. return Logger.getLogger(name); } } }
private SimpleCacheTest(final int threadCount, final int entryCount, final int valueSize, final int getPercentage, final int putPercentage, final boolean load) { this.threadCount = threadCount; this.entryCount = entryCount; this.valueSize = valueSize; this.getPercentage = getPercentage; this.putPercentage = putPercentage; this.load = load; Config cfg = new XmlConfigBuilder().build(); instance = Hazelcast.newHazelcastInstance(cfg); Hazelcast.newHazelcastInstance(cfg); logger = instance.getLoggingService().getLogger("SimpleCacheTest"); random = new Random(); }
private SimpleMapTest(int threadCount, int entryCount, int valueSize, int getPercentage, int putPercentage, boolean load) { this.threadCount = threadCount; this.entryCount = entryCount; this.valueSize = valueSize; this.getPercentage = getPercentage; this.putPercentage = putPercentage; this.load = load; Config cfg = new XmlConfigBuilder().build(); instance = Hazelcast.newHazelcastInstance(cfg); logger = instance.getLoggingService().getLogger("SimpleMapTest"); random = new Random(); }
@SuppressFBWarnings(value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE", justification = "jetInstance() can be null in TestProcessorContext") private ILogger getLogger(@Nonnull Context context) { return context.jetInstance() != null ? context.jetInstance().getHazelcastInstance().getLoggingService().getLogger(getClass() + "." + toString()) : Logger.getLogger(getClass()); }
public void run() { final ILogger logger = hazelcast.getLoggingService().getLogger(hazelcast.getName()); while (running) { try { Thread.sleep(STATS_SECONDS * 1000); int clusterSize = hazelcast.getCluster().getMembers().size(); Stats currentStats = stats.getAndReset(); logger.info("Cluster size: " + clusterSize + ", Operations per second: " + (currentStats.total() / STATS_SECONDS)); } catch (HazelcastInstanceNotActiveException e) { throw new RuntimeException(e); } catch (Exception e) { throw new RuntimeException(e); } } } });
@Override public void init(@Nonnull Context context) throws Exception { logger = context.jetInstance().getHazelcastInstance().getLoggingService().getLogger(ReadHdfsP.class); int totalParallelism = context.totalParallelism(); InputFormat inputFormat = jobConf.getInputFormat(); InputSplit[] splits = inputFormat.getSplits(jobConf, totalParallelism); IndexedInputSplit[] indexedInputSplits = new IndexedInputSplit[splits.length]; Arrays.setAll(indexedInputSplits, i -> new IndexedInputSplit(i, splits[i])); Address[] addrs = context.jetInstance().getCluster().getMembers() .stream().map(Member::getAddress).toArray(Address[]::new); assigned = assignSplitsToMembers(indexedInputSplits, addrs); printAssignments(assigned); }
@Override public void init(@Nonnull Context context) { logger = context.jetInstance().getHazelcastInstance().getLoggingService().getLogger(ReadHdfsP.class); try { int totalParallelism = context.totalParallelism(); InputFormat inputFormat = jobConf.getInputFormat(); InputSplit[] splits = inputFormat.getSplits(jobConf, totalParallelism); IndexedInputSplit[] indexedInputSplits = new IndexedInputSplit[splits.length]; Arrays.setAll(indexedInputSplits, i -> new IndexedInputSplit(i, splits[i])); Address[] addrs = context.jetInstance().getCluster().getMembers() .stream().map(Member::getAddress).toArray(Address[]::new); assigned = assignSplitsToMembers(indexedInputSplits, addrs); printAssignments(assigned); } catch (IOException e) { throw rethrow(e); } }
private EnterpriseCacheTestServer(String memory) { this.memorySize = MemorySize.parse(memory, MemoryUnit.GIGABYTES); InputStream configInputStream = EnterpriseCacheTestServer.class.getResourceAsStream("/hazelcast-hd-memory.xml"); Config config = new XmlConfigBuilder(configInputStream).build(); config.setLicenseKey(ENTERPRISE_LICENSE_KEY); NativeMemoryConfig memoryConfig = config.getNativeMemoryConfig(); if (!memoryConfig.isEnabled()) { memoryConfig.setSize(memorySize).setEnabled(true); memoryConfig.setAllocatorType(NativeMemoryConfig.MemoryAllocatorType.POOLED); } instance = Hazelcast.newHazelcastInstance(config); memoryStats = MemoryStatsUtil.getMemoryStats(instance); logger = instance.getLoggingService().getLogger(EnterpriseCacheTestServer.class); }
private void initializeCluster(Config config) { if (clusterInitialized.compareAndSet(false, true)) { clusterInstance = Hazelcast.newHazelcastInstance(config); if (clusterInstance == null) { throw new RuntimeException("Unable to initialize the cluster"); } Cluster cluster = clusterInstance.getCluster(); cluster.addMembershipListener(this.membershipListener); clusterInstance.getLifecycleService().addLifecycleListener(new LifecycleListener() { @Override public void stateChanged(LifecycleEvent event) { if (event.getState() == SHUTTING_DOWN) { removeMember(localNodeId); } } }); // Register a listener for Hazelcast logging events LoggingService loggingService = clusterInstance.getLoggingService(); loggingService.addLogListener(Level.FINEST, this); this.collectionsFactory = new ClusterCollectionsFactory(clusterInstance); localNodeId = getMemberId(cluster.getLocalMember()); IMap<MemberId, String> instanceKeyMap = collectionsFactory.getMap(INSTANCE_KEY_MAP); instanceKeyMap.put(localNodeId, localInstanceKey); IMap<String, Collection<String>> balancerMap = collectionsFactory.getMap(BALANCER_MAP_NAME); addBalancerMapEntryListeners(balancerMap); } }
public static void main(String[] args) { // Start a member with no explicit configuration HazelcastInstance instance = Hazelcast.newHazelcastInstance(); ILogger logger = instance.getLoggingService().getLogger(ClientDynamicConfig.class); // Start a client -- we will be using the map to access & configure data structures HazelcastInstance client = HazelcastClient.newHazelcastClient(); // Obtain a map IMap<String, String> defaultMap = client.getMap("defaultMap"); // no data has been added yet, so get("1") will return null logger.info("defaultMap[\"1\"] = " + defaultMap.get("1")); // Another application will be using a map with a map loader and no backups // Wildcards also work for dynamically added data structure configurations MapConfig mapWithLoaderConfig = new MapConfig("map-with-loader-*").setBackupCount(0); mapWithLoaderConfig.getMapStoreConfig() .setEnabled(true) .setInitialLoadMode(EAGER) .setClassName("EchoMapLoader"); // add the configuration to the already running member client.getConfig().addMapConfig(mapWithLoaderConfig); IMap<String, String> mapWithLoader1 = client.getMap("map-with-loader-1"); logger.info("mapWithLoader1[\"1\"] = " + mapWithLoader1.get("1") + " (loaded from configured map loader)"); client.shutdown(); instance.shutdown(); }
public static void main(String[] args) { // Start a member with no explicit configuration HazelcastInstance instance = Hazelcast.newHazelcastInstance(); ILogger logger = instance.getLoggingService().getLogger(MemberDynamicConfig.class); // Obtain a map IMap<String, String> defaultMap = instance.getMap("defaultMap"); // Inspect its config: default is 1 sync backup MapConfig defaultMapConfig = instance.getConfig().getMapConfig("defaultMap"); logger.info("Map \"defaultMapConfig\" has backup count " + defaultMapConfig.getBackupCount()); logger.info("defaultMap[\"1\"] = " + defaultMap.get("1")); // Another application will be using a map with a map loader and no backups // Note that wildcards also work for dynamically added data structure configurations MapConfig mapWithLoaderConfig = new MapConfig("map-with-loader-*").setBackupCount(0); mapWithLoaderConfig.getMapStoreConfig() .setEnabled(true) .setInitialLoadMode(EAGER) .setClassName("EchoMapLoader"); // add the configuration to the already running member instance.getConfig().addMapConfig(mapWithLoaderConfig); IMap<String, String> mapWithLoader1 = instance.getMap("map-with-loader-1"); MapConfig mapWithLoader1Config = instance.getConfig().getMapConfig("map-with-loader-1"); logger.info("Map \"mapWithLoader1\" has backup count " + mapWithLoader1Config.getBackupCount()); logger.info("mapWithLoader1[\"1\"] = " + mapWithLoader1.get("1") + " (loaded from configured map loader)"); instance.shutdown(); } }