case 1: // TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.type = org.apache.hadoop.hive.metastore.api.LockType.findByValue(iprot.readI32()); struct.setTypeIsSet(true); } else {
list.add(present_type); if (present_type) list.add(type.getValue());
@Override public String toString() { return JavaUtils.lockIdToString(extLockId) + " intLockId:" + intLockId + " " + JavaUtils.txnIdToString(txnId) + " db:" + db + " table:" + table + " partition:" + partition + " state:" + (state == null ? "null" : state.toString()) + " type:" + (type == null ? "null" : type.toString()); } private boolean isDbLock() {
"Thought all the non native tables have an instance of storage handler"); LockType lockType = storageHandler.getLockType(output); if (null == LockType.findByValue(lockType.getValue())) { throw new IllegalArgumentException(String .format("Lock type [%s] for Database.Table [%s.%s] is unknown", lockType, t.getDbName(),
if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false;
if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false;
list.add(present_type); if (present_type) list.add(type.getValue());
case 6: // TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.type = org.apache.hadoop.hive.metastore.api.LockType.findByValue(iprot.readI32()); struct.setTypeIsSet(true); } else {
os.writeBytes(lock.getType().toString()); os.write(separator); os.writeBytes((lock.getTxnid() == 0) ? "NULL" : Long.toString(lock.getTxnid()));
if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false;
if (struct.type != null) { oprot.writeFieldBegin(TYPE_FIELD_DESC); oprot.writeI32(struct.type.getValue()); oprot.writeFieldEnd();
@Override public void read(org.apache.thrift.protocol.TProtocol prot, LockComponent struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.type = org.apache.hadoop.hive.metastore.api.LockType.findByValue(iprot.readI32()); struct.setTypeIsSet(true); struct.level = org.apache.hadoop.hive.metastore.api.LockLevel.findByValue(iprot.readI32());
os.writeBytes(lock.getType().toString()); os.write(separator); os.writeBytes((lock.getTxnid() == 0) ? "NULL" : Long.toString(lock.getTxnid()));
if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false;
oprot.writeI32(struct.type.getValue()); oprot.writeFieldEnd();
struct.state = org.apache.hadoop.hive.metastore.api.LockState.findByValue(iprot.readI32()); struct.setStateIsSet(true); struct.type = org.apache.hadoop.hive.metastore.api.LockType.findByValue(iprot.readI32()); struct.setTypeIsSet(true); struct.lastheartbeat = iprot.readI64();
@Override public String toString() { return JavaUtils.lockIdToString(extLockId) + " intLockId:" + intLockId + " txnId:" + Long.toString (txnId) + " db:" + db + " table:" + table + " partition:" + partition + " state:" + (state == null ? "null" : state.toString()) + " type:" + (type == null ? "null" : type.toString()); } private boolean isDbLock() {
if (!(this_present_type && that_present_type)) return false; if (!this.type.equals(that.type)) return false;
@Override public void write(org.apache.thrift.protocol.TProtocol prot, LockComponent struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; oprot.writeI32(struct.type.getValue()); oprot.writeI32(struct.level.getValue()); oprot.writeString(struct.dbname);
case 1: // TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.type = LockType.findByValue(iprot.readI32()); struct.setTypeIsSet(true); } else {