@Override public void add(long delta) { getInstance().add(delta); }
@Override public long get() { return getInstance().get(); } }
@Override public void inc() { getInstance().inc(); }
@Override public void updateCounterValue(String name, long value) { Exceptions.checkNotNullOrEmpty(name, "name"); String counterName = name + ".Counter"; Counter counter = countersCache.getIfPresent(counterName); if (counter != null) { counter.clear(); } else { counter = underlying.createCounter(counterName); } counter.add(value); countersCache.put(name, counter); }
/** * Test transition back to null provider. */ @Test public void testTransitionBackToNullProvider() { MetricsConfig config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, false) .build(); MetricsProvider.initialize(config); Counter counter = statsLogger.createCounter("continuity-counter"); counter.add(1L); assertEquals(0L, counter.get()); config = MetricsConfig.builder() .with(MetricsConfig.ENABLE_STATISTICS, true) .build(); MetricsProvider.initialize(config); counter.add(1L); assertEquals(1L, counter.get()); } }
@Override public void dec() { getInstance().dec(); }
@Override public void onRemoval(RemovalNotification<String, Counter> removal) { Counter counter = removal.getValue(); if (removal.getCause() != RemovalCause.REPLACED) { Exceptions.checkNotNullOrEmpty(counter.getName(), "counter"); metrics.remove(counter.getName()); log.debug("Removed Counter: {}.", counter.getName()); } } }).
@Override public void clear() { getInstance().clear(); }
/** * Test counter registered and worked well with StatsLogger. */ @Test public void testCounter() { Counter testCounter = statsLogger.createCounter("testCounter"); testCounter.add(17); assertEquals(17, testCounter.get()); // test dynamic counter int sum = 0; for (int i = 1; i < 10; i++) { sum += i; dynamicLogger.incCounterValue("dynamicCounter", i); assertEquals(sum, MetricRegistryUtils.getCounter("pravega.dynamicCounter.Counter").getCount()); } dynamicLogger.freezeCounter("dynamicCounter"); assertEquals(null, MetricRegistryUtils.getCounter("pravega.dynamicCounter.Counter")); }
@Override public void incCounterValue(String name, long delta) { Exceptions.checkNotNullOrEmpty(name, "name"); Preconditions.checkNotNull(delta); String counterName = name + ".Counter"; try { Counter counter = countersCache.get(counterName, new Callable<Counter>() { @Override public Counter call() throws Exception { return underlying.createCounter(counterName); } }); counter.add(delta); } catch (ExecutionException e) { log.error("Error while countersCache create counter", e); } }
s.create(segmentName, TIMEOUT).join(); long expectedMetricsSize = FileSystemMetrics.WRITE_BYTES.get(); long expectedMetricsSuccesses = FileSystemMetrics.WRITE_LATENCY.toOpStatsData().getNumSuccessfulEvents(); expectedMetricsSize, FileSystemMetrics.WRITE_BYTES.get()); Assert.assertEquals("WRITE_LATENCY should not increase the count of successful events in case of unsuccessful writes", expectedMetricsSuccesses, FileSystemMetrics.WRITE_LATENCY.toOpStatsData().getNumSuccessfulEvents()); expectedMetricsSuccesses, FileSystemMetrics.WRITE_LATENCY.toOpStatsData().getNumSuccessfulEvents()); Assert.assertEquals("WRITE_BYTES should increase by the size of successful writes", expectedMetricsSize, FileSystemMetrics.WRITE_BYTES.get()); ex -> ex instanceof BadOffsetException); Assert.assertEquals("WRITE_BYTES should not change in case of unsuccessful writes", expectedMetricsSize, FileSystemMetrics.WRITE_BYTES.get()); Assert.assertEquals("WRITE_LATENCY should not increase the count of successful events in case of unsuccessful writes", expectedMetricsSuccesses, FileSystemMetrics.WRITE_LATENCY.toOpStatsData().getNumSuccessfulEvents()); ex -> ex instanceof StreamSegmentNotExistsException); Assert.assertEquals("WRITE_BYTES should not change in case of unsuccessful writes", expectedMetricsSize, FileSystemMetrics.WRITE_BYTES.get()); Assert.assertEquals("WRITE_LATENCY should not increase the count of successful events in case of unsuccessful writes", expectedMetricsSuccesses, FileSystemMetrics.WRITE_LATENCY.toOpStatsData().getNumSuccessfulEvents());
private SegmentHandle doCreate(String streamSegmentName) throws IOException { long traceId = LoggerHelpers.traceEnter(log, "create", streamSegmentName); FileAttribute<Set<PosixFilePermission>> fileAttributes = PosixFilePermissions.asFileAttribute(READ_WRITE_PERMISSION); Path path = Paths.get(config.getRoot(), streamSegmentName); Path parent = path.getParent(); assert parent != null; Files.createDirectories(parent); Files.createFile(path, fileAttributes); LoggerHelpers.traceLeave(log, "create", traceId); FileSystemMetrics.CREATE_COUNT.inc(); return FileSystemSegmentHandle.writeHandle(streamSegmentName); }
private int doRead(SegmentHandle handle, long offset, byte[] buffer, int bufferOffset, int length) throws IOException { long traceId = LoggerHelpers.traceEnter(log, "read", handle.getSegmentName(), offset, bufferOffset, length); Timer timer = new Timer(); Path path = Paths.get(config.getRoot(), handle.getSegmentName()); long fileSize = Files.size(path); if (fileSize < offset) { throw new IllegalArgumentException(String.format("Reading at offset (%d) which is beyond the " + "current size of segment (%d).", offset, fileSize)); } try (FileChannel channel = FileChannel.open(path, StandardOpenOption.READ)) { int totalBytesRead = 0; do { ByteBuffer readBuffer = ByteBuffer.wrap(buffer, bufferOffset, length); int bytesRead = channel.read(readBuffer, offset); bufferOffset += bytesRead; totalBytesRead += bytesRead; length -= bytesRead; } while (length != 0); FileSystemMetrics.READ_LATENCY.reportSuccessEvent(timer.getElapsed()); FileSystemMetrics.READ_BYTES.add(totalBytesRead); LoggerHelpers.traceLeave(log, "read", traceId, totalBytesRead); return totalBytesRead; } }
HDFSMetrics.CREATE_COUNT.inc(); log.debug("Created '{}'.", fullPath); } catch (IOException e) {
FileSystemMetrics.WRITE_BYTES.add(totalBytesWritten); LoggerHelpers.traceLeave(log, "write", traceId); return null;
@Override public int read(SegmentHandle handle, long offset, byte[] buffer, int bufferOffset, int length) throws StreamSegmentException { ensureInitializedAndNotClosed(); long traceId = LoggerHelpers.traceEnter(log, "read", handle, offset, length); if (offset < 0 || bufferOffset < 0 || length < 0 || buffer.length < bufferOffset + length) { throw new ArrayIndexOutOfBoundsException(String.format( "Offset (%s) must be non-negative, and bufferOffset (%s) and length (%s) must be valid indices into buffer of size %s.", offset, bufferOffset, length, buffer.length)); } Timer timer = new Timer(); try { return HDFS_RETRY.run(() -> { int totalBytesRead = readInternal(handle, buffer, offset, bufferOffset, length); HDFSMetrics.READ_LATENCY.reportSuccessEvent(timer.getElapsed()); HDFSMetrics.READ_BYTES.add(totalBytesRead); LoggerHelpers.traceLeave(log, "read", traceId, handle, offset, totalBytesRead); return totalBytesRead; }); } catch (IOException e) { throw HDFSExceptionHelpers.convertException(handle.getSegmentName(), e); } catch (RetriesExhaustedException e) { throw HDFSExceptionHelpers.convertException(handle.getSegmentName(), e.getCause()); } }
HDFSMetrics.WRITE_BYTES.add(length); LoggerHelpers.traceLeave(log, "write", traceId, handle, offset, length);