Refine search
private void debug(String message, Object... params) { if (logger.isDebugEnabled()) { logger.debug(message, params); } else if (logger.isInfoEnabled() && DEBUG) { logger.info(message, params); } }
/** {@inheritDoc} */ @Override public void logHttpRequest(HttpServletRequest httpRequest, String requestName, long duration, boolean systemError, int responseSize, String loggerName) { final org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager .getLogger(loggerName); if (logger.isInfoEnabled()) { logger.info(LOG.buildLogMessage(httpRequest, duration, systemError, responseSize)); } }
@Override public CacheRuntimeException createCancelationException(final long timeLimitMillis, final DefaultQuery query) { final String message = String.format( "Query execution canceled after exceeding max execution time %sms.", timeLimitMillis); if (logger.isInfoEnabled()) { logger.info(String.format("%s %s", message, query)); } return new QueryExecutionTimeoutException(message); }
private void handleMemberDeparture(InternalDistributedMember member) { Integer queueId = getQueueId(member, false); if (queueId == null) { return; } boolean isUsed = false; synchronized (senderToSerialQueueIdMap) { senderToSerialQueueIdMap.remove(member); // Check if any other members are using the same executor. for (Iterator iter = senderToSerialQueueIdMap.values().iterator(); iter.hasNext();) { Integer value = (Integer) iter.next(); if (value.equals(queueId)) { isUsed = true; break; } } // If not used mark this as unused. if (!isUsed) { if (logger.isInfoEnabled(LogMarker.DM_MARKER)) logger.info(LogMarker.DM_MARKER, "Marking the SerialQueuedExecutor with id : {} used by the member {} to be unused.", new Object[] {queueId, member}); threadMarkedForUse.add(queueId); } } }
public PdxType getType(int typeId) { PdxType pdxType = this.idToType.get(typeId); if (pdxType != null) { return pdxType; } synchronized (this) { pdxType = this.distributedTypeRegistry.getType(typeId); if (pdxType != null) { this.idToType.put(typeId, pdxType); this.typeToId.put(pdxType, typeId); if (logger.isInfoEnabled()) { logger.info("Adding: {}", pdxType.toFormattedString()); } if (logger.isDebugEnabled()) { logger.debug("Adding entry into pdx type registry, typeId: {} {}", typeId, pdxType); } return pdxType; } } return null; }
public int defineEnum(EnumInfo newInfo) { Integer existingId = this.enumInfoToId.get(newInfo); if (existingId != null) { return existingId; } int id = this.distributedTypeRegistry.defineEnum(newInfo); EnumInfo oldInfo = this.idToEnum.get(id); if (oldInfo == null) { this.idToEnum.put(id, newInfo); this.enumInfoToId.put(newInfo, id); if (logger.isInfoEnabled()) { logger.info("Caching PDX Enum: {}, dsid={} typenum={}", newInfo, id >> 24, id & 0xFFFFFF); } } else if (!oldInfo.equals(newInfo)) { Assert.fail("Old enum does not equal new enum for the same id. oldEnum=" + oldInfo + " newEnum=" + newInfo); } return id; }
public void addRemoteType(int typeId, PdxType newType) { PdxType oldType = this.idToType.get(typeId); if (oldType == null) { this.distributedTypeRegistry.addRemoteType(typeId, newType); this.idToType.put(typeId, newType); this.typeToId.put(newType, typeId); if (logger.isInfoEnabled()) { logger.info("Adding, from remote WAN: {}", newType.toFormattedString()); } } else if (!oldType.equals(newType)) { Assert.fail("Old type does not equal new type for the same id. oldType=" + oldType + " new type=" + newType); } }
private void initializeServerSocket() throws IOException { if (srv_sock == null || srv_sock.isClosed()) { if (bind_address == null) { srv_sock = getSocketCreator().createServerSocket(port, BACKLOG); bind_address = srv_sock.getInetAddress(); } else { srv_sock = getSocketCreator().createServerSocket(port, BACKLOG, bind_address); } // GEODE-4176 - set the port from a wild-card bind so that handlers know the correct value if (this.port <= 0) { this.port = srv_sock.getLocalPort(); } if (log.isInfoEnabled()) { log.info("Locator was created at " + new Date()); log.info("Listening on port " + getPort() + " bound on address " + bind_address); } srv_sock.setReuseAddress(true); } }
public void addImportedType(int typeId, PdxType importedType) { PdxType existing = getType(typeId); if (existing != null && !existing.equals(importedType)) { throw new PdxSerializationException( String.format( "Detected conflicting PDX types during import:%s%sSnapshot data containing PDX types must be imported into an empty cache with no pre-existing type definitions. Allow the import to complete prior to inserting additional data into the cache.", importedType, existing)); } this.distributedTypeRegistry.addImportedType(typeId, importedType); this.idToType.put(typeId, importedType); this.typeToId.put(importedType, typeId); if (logger.isInfoEnabled()) { logger.info("Importing type: {}", importedType.toFormattedString()); } }
/** * Create a type id for a type that may come locally, or from a remote member. */ public int defineType(PdxType newType) { Integer existingId = this.typeToId.get(newType); if (existingId != null) { int eid = existingId; newType.setTypeId(eid); return eid; } int id = this.distributedTypeRegistry.defineType(newType); newType.setTypeId(id); PdxType oldType = this.idToType.get(id); if (oldType == null) { this.idToType.put(id, newType); this.typeToId.put(newType, id); if (logger.isInfoEnabled()) { logger.info("Caching {}", newType.toFormattedString()); } } else if (!oldType.equals(newType)) { Assert.fail("Old type does not equal new type for the same id. oldType=" + oldType + " new type=" + newType); } return id; }
pool.getStats().setLocatorCount(newLocatorAddresses.size()); if (logger.isInfoEnabled() || !locatorCallback.getClass().equals(LocatorDiscoveryCallbackAdapter.class)) { List<InetSocketAddress> newLocators = newLocatorList.getLocators(); if (!addedLocators.isEmpty()) { locatorCallback.locatorsDiscovered(Collections.unmodifiableList(addedLocators)); logger.info("AutoConnectionSource discovered new locators {}", addedLocators); logger.info("AutoConnectionSource dropping previously discovered locators {}", removedLocators);
if (logger.isInfoEnabled()) { logger.info( "ClientStatsManager, intializing the statistics...");
if (prQueryTraceInfoList != null && this.query.isTraced() && logger.isInfoEnabled()) { if (DefaultQuery.testHook != null) { DefaultQuery.testHook sb.append(queryTraceInfo.createLogLine(me)).append("\n"); logger.info(sb.toString());;
logger.info("Did not hear back from any other system. I am the first one."); } else if (transport.isMcastEnabled()) { if (logger.isInfoEnabled()) { long delta = System.currentTimeMillis() - start; Object[] logArgs = new Object[] {distributionManager.getDistributionManagerId(), transport, Integer.valueOf(distributionManager.getOtherDistributionManagerIds().size()), distributionManager.getOtherDistributionManagerIds(), (logger.isInfoEnabled(LogMarker.DM_MARKER) ? " (VERBOSE, took " + delta + " ms)" : ""), ((distributionManager.getDMType() == ADMIN_ONLY_DM_TYPE) ? " (admin only)" : (distributionManager.getDMType() == LOCATOR_DM_TYPE) ? " (locator)" : "")}; logger.info(LogMarker.DM_MARKER, "DistributionManager {} started on {}. There were {} other DMs. others: {} {} {}", logArgs);
logger.info("[ThreadsMonitor] a New Monitor object and process were created.\n"); } else { this.threadMonitor = new ThreadsMonitoringImplDummy(); logger.info("[ThreadsMonitor] Monitoring is disabled and will not be run.\n"); if (logger.isInfoEnabled(LogMarker.DM_MARKER)) { logger.info(LogMarker.DM_MARKER, "Serial Queue info :" + " THROTTLE_PERCENT: " + THROTTLE_PERCENT + " SERIAL_QUEUE_BYTE_LIMIT :" + SERIAL_QUEUE_BYTE_LIMIT logger.info("Starting DistributionManager {}. {}", new Object[] {this.localAddress, (logger.isInfoEnabled(LogMarker.DM_MARKER) ? sb.toString() : "")});
private static void uselog4j3(Throwable t, String msg, String module) { Logger logger = getLogger(module); if (logger.isInfoEnabled()) logger.info(msg.intern(), t); else uselog4j4(t, msg, module); }
static void logSettingUpdate(Setting setting, Settings current, Settings previous, Logger logger) { if (logger.isInfoEnabled()) { if (setting.isFiltered()) { logger.info("updating [{}]", setting.key); } else { logger.info("updating [{}] from [{}] to [{}]", setting.key, setting.getRaw(previous), setting.getRaw(current)); } } }
@Override protected void doStart() { transport.addMessageListener(this); connectionManager.addListener(this); transport.start(); if (transport.boundAddress() != null && logger.isInfoEnabled()) { logger.info("{}", transport.boundAddress()); for (Map.Entry<String, BoundTransportAddress> entry : transport.profileBoundAddresses().entrySet()) { logger.info("profile [{}]: {}", entry.getKey(), entry.getValue()); } } localNode = localNodeFactory.apply(transport.boundAddress()); if (connectToRemoteCluster) { // here we start to connect to the remote clusters remoteClusterService.initializeRemoteClusters(); } }
} else if (logger.isInfoEnabled()) { FsInfo.Path totFSPath = new FsInfo.Path(); Set<String> allTypes = new HashSet<>(); logger.info("using [{}] data paths, mounts [{}], net usable_space [{}], net total_space [{}], types [{}]", nodePaths.length, allMounts, totFSPath.getAvailable(), totFSPath.getTotal(), toString(allTypes));
static void logGcOverhead( final Logger logger, final JvmMonitor.Threshold threshold, final long current, final long elapsed, final long seq) { switch (threshold) { case WARN: if (logger.isWarnEnabled()) { logger.warn(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed)); } break; case INFO: if (logger.isInfoEnabled()) { logger.info(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed)); } break; case DEBUG: if (logger.isDebugEnabled()) { logger.debug(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed)); } break; } }