congrats Icon
New! Tabnine Pro 14-day free trial
Start a free trial
Tabnine Logo
ClusterStatusProtos$RegionLoad$Builder.build
Code IndexAdd Tabnine to your IDE (free)

How to use
build
method
in
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos$RegionLoad$Builder

Best Java code snippets using org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos$RegionLoad$Builder.build (Showing top 20 results out of 315)

origin: apache/hbase

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder addRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: apache/hbase

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder addRegionLoads(
  org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(builderForValue.build());
 }
 return this;
}
/**
origin: apache/hbase

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder setRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.set(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.setMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** Information on the load of individual regions. 
 * </pre>
 *
 * <code>repeated .hbase.pb.RegionLoad region_loads = 5;</code>
 */
public Builder addRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-server

private ClusterStatusProtos.ServerLoad createServerLoadProto() {
 HBaseProtos.RegionSpecifier rSpecOne = HBaseProtos.RegionSpecifier.newBuilder()
  .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
  .setValue(ByteString.copyFromUtf8("ASDFGQWERT")).build();
 HBaseProtos.RegionSpecifier rSpecTwo = HBaseProtos.RegionSpecifier.newBuilder()
  .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
  .setValue(ByteString.copyFromUtf8("QWERTYUIOP")).build();
 ClusterStatusProtos.RegionLoad rlOne =
  ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecOne).setStores(10)
   .setStorefiles(101).setStoreUncompressedSizeMB(106).setStorefileSizeMB(520)
   .setFilteredReadRequestsCount(100).setStorefileIndexSizeKB(42).setRootIndexSizeKB(201)
   .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE).build();
 ClusterStatusProtos.RegionLoad rlTwo =
  ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecTwo).setStores(3)
   .setStorefiles(13).setStoreUncompressedSizeMB(23).setStorefileSizeMB(300)
   .setFilteredReadRequestsCount(200).setStorefileIndexSizeKB(40).setRootIndexSizeKB(303)
   .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE).build();
 ClusterStatusProtos.ServerLoad sl =
  ClusterStatusProtos.ServerLoad.newBuilder().addRegionLoads(rlOne).
   addRegionLoads(rlTwo).build();
 return sl;
}
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** Information on the load of individual regions. 
 * </pre>
 *
 * <code>repeated .hbase.pb.RegionLoad region_loads = 5;</code>
 */
public Builder addRegionLoads(
  org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder addRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder addRegionLoads(
  org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <code>repeated .hbase.pb.RegionLoad region_loads = 1;</code>
 */
public Builder setRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.set(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.setMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: org.apache.hbase/hbase-server

private RegionServerStatusProtos.RegionServerReportRequest.Builder
  makeRSReportRequestWithRegions(final ServerName sn, HRegionInfo... regions) {
 ClusterStatusProtos.ServerLoad.Builder sl = ClusterStatusProtos.ServerLoad.newBuilder();
 for (int i = 0; i < regions.length; i++) {
  HBaseProtos.RegionSpecifier.Builder rs = HBaseProtos.RegionSpecifier.newBuilder();
  rs.setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME);
  rs.setValue(UnsafeByteOperations.unsafeWrap(regions[i].getRegionName()));
  ClusterStatusProtos.RegionLoad.Builder rl = ClusterStatusProtos.RegionLoad.newBuilder()
    .setRegionSpecifier(rs.build());
  sl.addRegionLoads(i, rl.build());
 }
 return RegionServerStatusProtos.RegionServerReportRequest.newBuilder()
      .setServer(ProtobufUtil.toServerName(sn))
      .setLoad(sl);
}
origin: org.apache.hbase/hbase-client

.setStoreUncompressedSizeMB(
 (int) regionMetrics.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE))
.build();
origin: apache/hbase

r.setCompleteSequenceId(regionLoadBldr);
return regionLoadBldr.build();
origin: apache/hbase

.setStoreUncompressedSizeMB(
 (int) regionMetrics.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE))
.build();
origin: apache/hbase

private ClusterStatusProtos.ServerLoad createServerLoadProto() {
 HBaseProtos.RegionSpecifier rSpecOne = HBaseProtos.RegionSpecifier.newBuilder()
   .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
   .setValue(ByteString.copyFromUtf8("ASDFGQWERT")).build();
 HBaseProtos.RegionSpecifier rSpecTwo = HBaseProtos.RegionSpecifier.newBuilder()
   .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
   .setValue(ByteString.copyFromUtf8("QWERTYUIOP")).build();
 ClusterStatusProtos.RegionLoad rlOne =
   ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecOne).setStores(10)
     .setStorefiles(101).setStoreUncompressedSizeMB(106).setStorefileSizeMB(520)
     .setFilteredReadRequestsCount(100).setStorefileIndexSizeKB(42).setRootIndexSizeKB(201)
     .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE)
     .build();
 ClusterStatusProtos.RegionLoad rlTwo =
   ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecTwo).setStores(3)
     .setStorefiles(13).setStoreUncompressedSizeMB(23).setStorefileSizeMB(300)
     .setFilteredReadRequestsCount(200).setStorefileIndexSizeKB(40).setRootIndexSizeKB(303)
     .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE)
     .setCpRequestsCount(100)
     .build();
 ClusterStatusProtos.ServerLoad sl =
   ClusterStatusProtos.ServerLoad.newBuilder().addRegionLoads(rlOne).
     addRegionLoads(rlTwo).build();
 return sl;
}
origin: apache/hbase

private ClusterStatusProtos.ServerLoad createServerLoadProto() {
 HBaseProtos.RegionSpecifier rSpecOne = HBaseProtos.RegionSpecifier.newBuilder()
  .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
  .setValue(ByteString.copyFromUtf8("ASDFGQWERT")).build();
 HBaseProtos.RegionSpecifier rSpecTwo = HBaseProtos.RegionSpecifier.newBuilder()
  .setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.ENCODED_REGION_NAME)
  .setValue(ByteString.copyFromUtf8("QWERTYUIOP")).build();
 ClusterStatusProtos.RegionLoad rlOne =
  ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecOne).setStores(10)
   .setStorefiles(101).setStoreUncompressedSizeMB(106).setStorefileSizeMB(520)
   .setFilteredReadRequestsCount(100).setStorefileIndexSizeKB(42).setRootIndexSizeKB(201)
   .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE).build();
 ClusterStatusProtos.RegionLoad rlTwo =
  ClusterStatusProtos.RegionLoad.newBuilder().setRegionSpecifier(rSpecTwo).setStores(3)
   .setStorefiles(13).setStoreUncompressedSizeMB(23).setStorefileSizeMB(300)
   .setFilteredReadRequestsCount(200).setStorefileIndexSizeKB(40).setRootIndexSizeKB(303)
   .setReadRequestsCount(Integer.MAX_VALUE).setWriteRequestsCount(Integer.MAX_VALUE).build();
 ClusterStatusProtos.ServerLoad sl =
  ClusterStatusProtos.ServerLoad.newBuilder().addRegionLoads(rlOne).
   addRegionLoads(rlTwo).build();
 return sl;
}
origin: apache/hbase

/**
 * <pre>
 ** Information on the load of individual regions. 
 * </pre>
 *
 * <code>repeated .hbase.pb.RegionLoad region_loads = 5;</code>
 */
public Builder setRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.set(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.setMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: apache/hbase

/**
 * <pre>
 ** Information on the load of individual regions. 
 * </pre>
 *
 * <code>repeated .hbase.pb.RegionLoad region_loads = 5;</code>
 */
public Builder addRegionLoads(
  int index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(index, builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(index, builderForValue.build());
 }
 return this;
}
/**
origin: apache/hbase

/**
 * <pre>
 ** Information on the load of individual regions. 
 * </pre>
 *
 * <code>repeated .hbase.pb.RegionLoad region_loads = 5;</code>
 */
public Builder addRegionLoads(
  org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad.Builder builderForValue) {
 if (regionLoadsBuilder_ == null) {
  ensureRegionLoadsIsMutable();
  regionLoads_.add(builderForValue.build());
  onChanged();
 } else {
  regionLoadsBuilder_.addMessage(builderForValue.build());
 }
 return this;
}
/**
origin: apache/hbase

private RegionServerStatusProtos.RegionServerReportRequest.Builder
  makeRSReportRequestWithRegions(final ServerName sn, HRegionInfo... regions) {
 ClusterStatusProtos.ServerLoad.Builder sl = ClusterStatusProtos.ServerLoad.newBuilder();
 for (int i = 0; i < regions.length; i++) {
  HBaseProtos.RegionSpecifier.Builder rs = HBaseProtos.RegionSpecifier.newBuilder();
  rs.setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME);
  rs.setValue(UnsafeByteOperations.unsafeWrap(regions[i].getRegionName()));
  ClusterStatusProtos.RegionLoad.Builder rl = ClusterStatusProtos.RegionLoad.newBuilder()
    .setRegionSpecifier(rs.build());
  sl.addRegionLoads(i, rl.build());
 }
 return RegionServerStatusProtos.RegionServerReportRequest.newBuilder()
      .setServer(ProtobufUtil.toServerName(sn))
      .setLoad(sl);
}
origin: apache/hbase

/**
 * @return An empty region load stamped with the passed in <code>regionInfo</code>
 * region name.
 */
private RegionMetrics getEmptyRegionMetrics(final RegionInfo regionInfo) {
 return RegionMetricsBuilder.toRegionMetrics(ClusterStatusProtos.RegionLoad.newBuilder().
     setRegionSpecifier(HBaseProtos.RegionSpecifier.newBuilder().
         setType(HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME).
         setValue(ByteString.copyFrom(regionInfo.getRegionName())).build()).build());
}
org.apache.hadoop.hbase.shaded.protobuf.generatedClusterStatusProtos$RegionLoad$Builderbuild

Popular methods of ClusterStatusProtos$RegionLoad$Builder

  • setFilteredReadRequestsCount
    the current total filtered read requests made to region optional uint64 filtered_read_requests
  • setReadRequestsCount
    the current total read requests made to region optional uint64 read_requests_count = 8;
  • setRootIndexSizeKB
    The current total size of root-level indexes for the region, in KB. optional uint32 root_index
  • setStoreUncompressedSizeMB
    the total size of the store files for the region, uncompressed, in MB optional uint32 store_un
  • setStorefileIndexSizeKB
    The current total size of root-level store file indexes for the region, in KB. The same as {@link
  • setStorefileSizeMB
    the current total size of the store files for the region, in MB optional uint32 storefile_size
  • setStorefiles
    the number of storefiles for the region optional uint32 storefiles = 3;
  • setStores
    the number of stores for the region optional uint32 stores = 2;
  • setWriteRequestsCount
    the current total write requests made to region optional uint64 write_requests_count = 9;
  • setCompleteSequenceId
    the most recent sequence Id from cache flush optional uint64 complete_sequence_id = 15;
  • setCpRequestsCount
    the current total coprocessor requests made to region optional uint64 cp_requests_count = 20;
  • setCurrentCompactedKVs
    the completed count of key values in currently running compaction optional uint64 current_comp
  • setCpRequestsCount,
  • setCurrentCompactedKVs,
  • setDataLocality,
  • setLastMajorCompactionTs,
  • setMemStoreSizeMB,
  • setRegionSpecifier,
  • setTotalCompactingKVs,
  • setTotalStaticBloomSizeKB,
  • setTotalStaticIndexSizeKB

Popular in Java

  • Finding current android device location
  • putExtra (Intent)
  • getResourceAsStream (ClassLoader)
  • onCreateOptionsMenu (Activity)
  • Selector (java.nio.channels)
    A controller for the selection of SelectableChannel objects. Selectable channels can be registered w
  • Servlet (javax.servlet)
    Defines methods that all servlets must implement. A servlet is a small Java program that runs within
  • HttpServletRequest (javax.servlet.http)
    Extends the javax.servlet.ServletRequest interface to provide request information for HTTP servlets.
  • JFrame (javax.swing)
  • Response (javax.ws.rs.core)
    Defines the contract between a returned instance and the runtime when an application needs to provid
  • FileUtils (org.apache.commons.io)
    General file manipulation utilities. Facilities are provided in the following areas: * writing to a
  • PhpStorm for WordPress
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now