Tabnine Logo
DataTransferProtos$OpWriteBlockProto.getStorageType
Code IndexAdd Tabnine to your IDE (free)

How to use
getStorageType
method
in
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$OpWriteBlockProto

Best Java code snippets using org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$OpWriteBlockProto.getStorageType (Showing top 12 results out of 315)

origin: org.apache.hadoop/hadoop-hdfs

try {
 writeBlock(PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock()),
   PBHelperClient.convertStorageType(proto.getStorageType()),
   PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()),
   proto.getHeader().getClientName(),
origin: org.apache.hadoop/hadoop-hdfs-client

setStorageType(other.getStorageType());
origin: ch.cern.hadoop/hadoop-hdfs

setStorageType(other.getStorageType());
origin: io.prestosql.hadoop/hadoop-apache

setStorageType(other.getStorageType());
origin: org.apache.hadoop/hadoop-hdfs-client

if (hasStorageType()) {
 result = result &&
   (getStorageType() == other.getStorageType());
origin: io.prestosql.hadoop/hadoop-apache

if (hasStorageType()) {
 result = result &&
   (getStorageType() == other.getStorageType());
origin: ch.cern.hadoop/hadoop-hdfs

if (hasStorageType()) {
 result = result &&
   (getStorageType() == other.getStorageType());
origin: io.prestosql.hadoop/hadoop-apache

hash = (53 * hash) + hashEnum(getStorageType());
origin: org.apache.hadoop/hadoop-hdfs-client

hash = (53 * hash) + hashEnum(getStorageType());
origin: ch.cern.hadoop/hadoop-hdfs

hash = (53 * hash) + hashEnum(getStorageType());
origin: io.prestosql.hadoop/hadoop-apache

/** Receive OP_WRITE_BLOCK */
private void opWriteBlock(DataInputStream in) throws IOException {
 final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in));
 final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList());
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()),
    PBHelper.convertStorageType(proto.getStorageType()),
    PBHelper.convert(proto.getHeader().getBaseHeader().getToken()),
    proto.getHeader().getClientName(),
    targets,
    PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length),
    PBHelper.convert(proto.getSource()),
    fromProto(proto.getStage()),
    proto.getPipelineSize(),
    proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(),
    proto.getLatestGenerationStamp(),
    fromProto(proto.getRequestedChecksum()),
    (proto.hasCachingStrategy() ?
      getCachingStrategy(proto.getCachingStrategy()) :
     CachingStrategy.newDefaultStrategy()),
    (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false),
    (proto.hasPinning() ? proto.getPinning(): false),
    (PBHelper.convertBooleanList(proto.getTargetPinningsList())));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: ch.cern.hadoop/hadoop-hdfs

/** Receive OP_WRITE_BLOCK */
private void opWriteBlock(DataInputStream in) throws IOException {
 final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in));
 final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList());
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()),
    PBHelper.convertStorageType(proto.getStorageType()),
    PBHelper.convert(proto.getHeader().getBaseHeader().getToken()),
    proto.getHeader().getClientName(),
    targets,
    PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length),
    PBHelper.convert(proto.getSource()),
    fromProto(proto.getStage()),
    proto.getPipelineSize(),
    proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(),
    proto.getLatestGenerationStamp(),
    fromProto(proto.getRequestedChecksum()),
    (proto.hasCachingStrategy() ?
      getCachingStrategy(proto.getCachingStrategy()) :
     CachingStrategy.newDefaultStrategy()),
    (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false),
    (proto.hasPinning() ? proto.getPinning(): false),
    (PBHelper.convertBooleanList(proto.getTargetPinningsList())));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
org.apache.hadoop.hdfs.protocol.protoDataTransferProtos$OpWriteBlockProtogetStorageType

Javadoc

optional .hadoop.hdfs.StorageTypeProto storageType = 11 [default = DISK];

Popular methods of DataTransferProtos$OpWriteBlockProto

  • getAllowLazyPersist
    optional bool allowLazyPersist = 13 [default = false]; Hint to the DataNode that the block can be a
  • getCachingStrategy
    optional .hadoop.hdfs.CachingStrategyProto cachingStrategy = 10;
  • getHeader
    required .hadoop.hdfs.ClientOperationHeaderProto header = 1;
  • getLatestGenerationStamp
    required uint64 latestGenerationStamp = 8;
  • getMaxBytesRcvd
    required uint64 maxBytesRcvd = 7;
  • getMinBytesRcvd
    required uint64 minBytesRcvd = 6;
  • getPinning
    optional bool pinning = 14 [default = false]; whether to pin the block, so Balancer won't move it.
  • getPipelineSize
    required uint32 pipelineSize = 5;
  • getRequestedChecksum
    required .hadoop.hdfs.ChecksumProto requestedChecksum = 9; The requested checksum mechanism for thi
  • getSerializedSize
  • getSource
    optional .hadoop.hdfs.DatanodeInfoProto source = 3;
  • getStage
    required .hadoop.hdfs.OpWriteBlockProto.BlockConstructionStage stage = 4;
  • getSource,
  • getStage,
  • getTargetPinningsList,
  • getTargetStorageTypesList,
  • getTargetsList,
  • hasAllowLazyPersist,
  • hasCachingStrategy,
  • hasPinning,
  • newBuilder

Popular in Java

  • Making http post requests using okhttp
  • getSupportFragmentManager (FragmentActivity)
  • findViewById (Activity)
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • OutputStream (java.io)
    A writable sink for bytes.Most clients will use output streams that write data to the file system (
  • MalformedURLException (java.net)
    This exception is thrown when a program attempts to create an URL from an incorrect specification.
  • Reference (javax.naming)
  • Servlet (javax.servlet)
    Defines methods that all servlets must implement. A servlet is a small Java program that runs within
  • DataSource (javax.sql)
    An interface for the creation of Connection objects which represent a connection to a database. This
  • BasicDataSource (org.apache.commons.dbcp)
    Basic implementation of javax.sql.DataSource that is configured via JavaBeans properties. This is no
  • Top 12 Jupyter Notebook extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now