public static MutationProto toMutation(final MutationType type, final Mutation mutation, MutationProto.Builder builder) throws IOException { return toMutation(type, mutation, builder, HConstants.NO_NONCE); }
public static MutationProto toMutation(final MutationType type, final Mutation mutation) throws IOException { return toMutation(type, mutation, HConstants.NO_NONCE); }
/** * Create a protocol buffer Mutate based on a client Mutation * * @param type * @param mutation * @return a protobuf'd Mutation * @throws IOException */ public static MutationProto toMutation(final MutationType type, final Mutation mutation, final long nonce) throws IOException { return toMutation(type, mutation, MutationProto.newBuilder(), nonce); }
private void multiMutate(List<Mutation> mutations) throws IOException { try (Table table = conn.getTable(RSGROUP_TABLE_NAME)) { CoprocessorRpcChannel channel = table.coprocessorService(ROW_KEY); MultiRowMutationProtos.MutateRowsRequest.Builder mmrBuilder = MultiRowMutationProtos.MutateRowsRequest.newBuilder(); for (Mutation mutation : mutations) { if (mutation instanceof Put) { mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, mutation)); } else if (mutation instanceof Delete) { mmrBuilder.addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.DELETE, mutation)); } else { throw new DoNotRetryIOException( "multiMutate doesn't support " + mutation.getClass().getName()); } } MultiRowMutationProtos.MultiRowMutationService.BlockingInterface service = MultiRowMutationProtos.MultiRowMutationService.newBlockingStub(channel); try { service.mutateRows(null, mmrBuilder.build()); } catch (ServiceException ex) { ProtobufUtil.toIOException(ex); } } }
List<Mutation> mutations = new ArrayList<>(mutateRequestList.size()); for (MutationProto m : mutateRequestList) { mutations.add(ProtobufUtil.toMutation(m));
public static MutationProto toMutation(final MutationType type, final Mutation mutation) throws IOException { return toMutation(type, mutation, HConstants.NO_NONCE); }
public static MutationProto toMutation(final MutationType type, final Mutation mutation, MutationProto.Builder builder) throws IOException { return toMutation(type, mutation, builder, HConstants.NO_NONCE); }
@Test public void testMultiRowMutation() throws Exception { LOG.info("Starting testMultiRowMutation"); final TableName tableName = TableName.valueOf(name.getMethodName()); final byte [] ROW1 = Bytes.toBytes("testRow1"); Table t = TEST_UTIL.createTable(tableName, FAMILY); Put p = new Put(ROW); p.addColumn(FAMILY, QUALIFIER, VALUE); MutationProto m1 = ProtobufUtil.toMutation(MutationType.PUT, p); p = new Put(ROW1); p.addColumn(FAMILY, QUALIFIER, VALUE); MutationProto m2 = ProtobufUtil.toMutation(MutationType.PUT, p); MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder(); mrmBuilder.addMutationRequest(m1); mrmBuilder.addMutationRequest(m2); MutateRowsRequest mrm = mrmBuilder.build(); CoprocessorRpcChannel channel = t.coprocessorService(ROW); MultiRowMutationService.BlockingInterface service = MultiRowMutationService.newBlockingStub(channel); service.mutateRows(null, mrm); Get g = new Get(ROW); Result r = t.get(g); assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIER))); g = new Get(ROW1); r = t.get(g); assertEquals(0, Bytes.compareTo(VALUE, r.getValue(FAMILY, QUALIFIER))); }
MultiRowMutationProtos.MutateRowsRequest request = MultiRowMutationProtos.MutateRowsRequest.newBuilder() .addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, put1)) .addMutationRequest(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.PUT, put2)) .build();
protected MutationProto toMutationProto(Mutation mutation) throws IOException { MutationProto m = null; if(mutation instanceof Put){ m = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(MutationType.PUT, mutation); } else if(mutation instanceof Delete) { m = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(MutationType.DELETE, mutation); } else { throw new IOException("Put/Delete mutations only supported"); } return m; } }
public static MutationProto toProto(Mutation mutation) throws IOException { MutationType type; if (mutation instanceof Put) { type = MutationType.PUT; } else if (mutation instanceof Delete) { type = MutationType.DELETE; } else { throw new IllegalArgumentException("Only Put and Delete are supported"); } return org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(type, mutation); }
mrmBuilder.addMutationRequest(ProtobufUtil.toMutation( ClientProtos.MutationProto.MutationType.PUT, mutation));
/** * Create a protocol buffer Mutate based on a client Mutation * * @param type * @param mutation * @return a protobuf'd Mutation * @throws IOException */ public static MutationProto toMutation(final MutationType type, final Mutation mutation, final long nonce) throws IOException { return toMutation(type, mutation, MutationProto.newBuilder(), nonce); }
mutateBuilder.setTimestamp(increment.getTimestamp()); mutateBuilder.setTimeRange(ProtobufUtil.toTimeRange(increment.getTimeRange())); assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.INCREMENT, increment));
/** * Each ByteString entry is a byte array serialized from MutationProto instance * @param mutations * @throws IOException */ private static List<Mutation> getMutations(List<ByteString> mutations) throws IOException { List<Mutation> result = new ArrayList<Mutation>(); for (ByteString mutation : mutations) { MutationProto mProto = MutationProto.parseFrom(mutation); result.add(org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto)); } return result; }
assertEquals(mutateBuilder.build(), ProtobufUtil.toMutation(MutationType.APPEND, append));
ProtobufUtil.toMutation(MutationType.PUT, put));
ProtobufUtil.toMutation(MutationType.DELETE, delete));
/** * This method shouldn't be used - you should use {@link KeyValueCodec#readKeyValue(DataInput)} instead. Its the * complement to {@link #writeData(DataOutput)}. */ @SuppressWarnings("javadoc") public void readFields(DataInput in) throws IOException { this.indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in)); byte[] mutationData = Bytes.readByteArray(in); MutationProto mProto = MutationProto.parseFrom(mutationData); this.mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto); this.hashCode = calcHashCode(indexTableName, mutation); }
@Override public Void run() throws Exception { commitLastStatsUpdatedTime(statsCollector); if (mutations.size() > 0) { byte[] row = mutations.get(0).getRow(); MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder(); for (Mutation m : mutations) { mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(getMutationType(m), m)); } MutateRowsRequest mrm = mrmBuilder.build(); CoprocessorRpcChannel channel = statsWriterTable.coprocessorService(row); MultiRowMutationService.BlockingInterface service = MultiRowMutationService .newBlockingStub(channel); try { service.mutateRows(null, mrm); } catch (ServiceException ex) { ProtobufUtil.toIOException(ex); } } return null; } });