public Byte getByte(String fieldName, HCatSchema recordSchema) throws HCatException { //TINYINT return (Byte) get(fieldName, recordSchema, Byte.class); }
@Override public List<Object> getStructFieldsDataAsList(Object o) { return ((HCatRecord) o).getAll(); }
public void setDecimal(String fieldName, HCatSchema recordSchema, HiveDecimal value) throws HCatException { set(fieldName, recordSchema, value); } public HiveDecimal getDecimal(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(WritableComparable<?> key, HCatRecord value) throws IOException, InterruptedException { LocalFileWriter localFileWriter = getLocalFileWriter(value); RecordWriter localWriter = localFileWriter.getLocalWriter(); ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector(); AbstractSerDe localSerDe = localFileWriter.getLocalSerDe(); OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo(); for (Integer colToDel : partColsToDel) { value.remove(colToDel); } try { // The key given by user is ignored - in case of Parquet we need to supply null Object keyToWrite = localWriter instanceof ParquetRecordWriterWrapper ? null : NullWritable.get(); localWriter.write(keyToWrite, localSerDe.serialize(value.getAll(), localObjectInspector)); } catch (SerDeException e) { throw new IOException("Failed to serialize object", e); } }
/** * Test get and set calls with type * @throws HCatException */ public void testGetSetByType1() throws HCatException { HCatRecord inpRec = getHCatRecords()[0]; HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); HCatSchema hsch = HCatSchemaUtils.getHCatSchema( "a:tinyint,b:smallint,c:int,d:bigint,e:float,f:double,g:boolean,h:string,i:binary,j:string"); newRec.setByte("a", hsch, inpRec.getByte("a", hsch)); newRec.setShort("b", hsch, inpRec.getShort("b", hsch)); newRec.setInteger("c", hsch, inpRec.getInteger("c", hsch)); newRec.setLong("d", hsch, inpRec.getLong("d", hsch)); newRec.setFloat("e", hsch, inpRec.getFloat("e", hsch)); newRec.setDouble("f", hsch, inpRec.getDouble("f", hsch)); newRec.setBoolean("g", hsch, inpRec.getBoolean("g", hsch)); newRec.setString("h", hsch, inpRec.getString("h", hsch)); newRec.setByteArray("i", hsch, inpRec.getByteArray("i", hsch)); newRec.setString("j", hsch, inpRec.getString("j", hsch)); Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec)); }
/** * Test type specific get/set methods on HCatRecord types added in Hive 13 * @throws HCatException */ public void testGetSetByType3() throws HCatException { HCatRecord inpRec = getHCat13TypesRecord(); HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); HCatSchema hsch = HCatSchemaUtils.getHCatSchema( "a:decimal(5,2),b:char(10),c:varchar(20),d:date,e:timestamp"); newRec.setDecimal("a", hsch, inpRec.getDecimal("a", hsch)); newRec.setChar("b", hsch, inpRec.getChar("b", hsch)); newRec.setVarchar("c", hsch, inpRec.getVarchar("c", hsch)); newRec.setDate("d", hsch, inpRec.getDate("d", hsch)); newRec.setTimestamp("e", hsch, inpRec.getTimestamp("e", hsch)); }
/** * Test get and set calls with type * @throws HCatException */ public void testGetSetByType2() throws HCatException { HCatRecord inpRec = getGetSet2InpRec(); HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); HCatSchema hsch = HCatSchemaUtils.getHCatSchema("a:binary,b:map<string,string>,c:array<int>,d:struct<i:int>"); newRec.setByteArray("a", hsch, inpRec.getByteArray("a", hsch)); newRec.setMap("b", hsch, inpRec.getMap("b", hsch)); newRec.setList("c", hsch, inpRec.getList("c", hsch)); newRec.setStruct("d", hsch, inpRec.getStruct("d", hsch)); Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec)); }
if (rec.getInteger("id", schema).equals(id)) { actualVal = rec.get(fieldName, schema); break;
@Test public void testRemove() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); boolean sawException = false; try { r.remove(0); } catch (UnsupportedOperationException uoe) { sawException = true; } Assert.assertTrue(sawException); }
@Test public void testSize() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); Assert.assertEquals(4, r.size()); }
@Override public int compare(HCatRecord hr1, HCatRecord hr2) { try { return hr1.getInteger("id", schema) - hr2.getInteger("id", schema); } catch (Exception e) { LOG.warn("Exception caught while sorting hcat records " + e); } return 0; } });
@Override public void write(WritableComparable<?> key, HCatRecord value) throws IOException, InterruptedException { LocalFileWriter localFileWriter = getLocalFileWriter(value); RecordWriter localWriter = localFileWriter.getLocalWriter(); ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector(); SerDe localSerDe = localFileWriter.getLocalSerDe(); OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo(); for (Integer colToDel : partColsToDel) { value.remove(colToDel); } // The key given by user is ignored try { localWriter.write(NullWritable.get(), localSerDe.serialize(value.getAll(), localObjectInspector)); } catch (SerDeException e) { throw new IOException("Failed to serialize object", e); } }
private void runsInSlave(ReaderContext cntxt, int slaveNum) throws HCatException { HCatReader reader = DataTransferFactory.getHCatReader(cntxt, slaveNum); Iterator<HCatRecord> itr = reader.read(); int i = 1; while (itr.hasNext()) { HCatRecord read = itr.next(); HCatRecord written = getRecord(i++); // Argh, HCatRecord doesnt implement equals() Assert.assertTrue("Read: " + read.get(0) + "Written: " + written.get(0), written.get(0).equals(read.get(0))); Assert.assertTrue("Read: " + read.get(1) + "Written: " + written.get(1), written.get(1).equals(read.get(1))); Assert.assertEquals(2, read.size()); } //Assert.assertFalse(itr.hasNext()); }
@Override public void copy(HCatRecord r) throws HCatException { this.contents = r.getAll(); }
public HiveChar getChar(String fieldName, HCatSchema recordSchema) throws HCatException { return (HiveChar) get(fieldName, recordSchema, HiveChar.class); } public void setVarchar(String fieldName, HCatSchema recordSchema, HiveVarchar value)
public void setChar(String fieldName, HCatSchema recordSchema, HiveChar value) throws HCatException { set(fieldName, recordSchema, value); } public HiveChar getChar(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(WritableComparable<?> key, HCatRecord value) throws IOException, InterruptedException { LocalFileWriter localFileWriter = getLocalFileWriter(value); RecordWriter localWriter = localFileWriter.getLocalWriter(); ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector(); AbstractSerDe localSerDe = localFileWriter.getLocalSerDe(); OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo(); for (Integer colToDel : partColsToDel) { value.remove(colToDel); } try { // The key given by user is ignored - in case of Parquet we need to supply null Object keyToWrite = localWriter instanceof ParquetRecordWriterWrapper ? null : NullWritable.get(); localWriter.write(keyToWrite, localSerDe.serialize(value.getAll(), localObjectInspector)); } catch (SerDeException e) { throw new IOException("Failed to serialize object", e); } }
public static List<String> getRowAsList(HCatRecord record, List<String> rowValues) { List<Object> allFields = record.getAll(); for (Object o : allFields) { rowValues.add((o == null) ? null : o.toString()); } return rowValues; }