/** * Gets the table schema. * @return the table schema */ public HCatSchema getTableSchema() { assert this.partitionInfo.getTableInfo() != null : "TableInfo should have been set at this point."; return this.partitionInfo.getTableInfo().getAllColumns(); }
/** * Deserialize this object, decompressing the partitions which can exceed the * allowed jobConf size. * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a> */ @SuppressWarnings("unchecked") private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); //Next object in the stream will be a byte array of partition information which is compressed ObjectInputStream pis = new ObjectInputStream(new ByteArrayInputStream( (byte[])ois.readObject())); ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(pis)); partitions = (List<PartInfo>)partInfoReader.readObject(); if (partitions != null) { for (PartInfo partInfo : partitions) { if (partInfo.getTableInfo() == null) { partInfo.setTableInfo(this.tableInfo); } } } //Closing only the reader used for decompression byte stream partInfoReader.close(); } }
private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException { deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), taskContext.getConfiguration()); try { InternalUtil.initializeDeserializer(deserializer, storageHandler.getConf(), hcatSplit.getPartitionInfo().getTableInfo(), hcatSplit.getPartitionInfo().getPartitionSchema()); } catch (SerDeException e) { throw new IOException("Failed initializing deserializer " + storageHandler.getSerDeClass().getName(), e); } }
PartInfo partitionInfo = hcatSplit.getPartitionInfo(); if (partitionInfo.getTableInfo() == null) { partitionInfo.setTableInfo( HCatUtil.getLastInputJobInfosFromConf(taskContext.getConfiguration()).getTableInfo());
/** * Gets the table schema. * @return the table schema */ public HCatSchema getTableSchema() { assert this.partitionInfo.getTableInfo() != null : "TableInfo should have been set at this point."; return this.partitionInfo.getTableInfo().getAllColumns(); }
/** * Gets the table schema. * @return the table schema */ public HCatSchema getTableSchema() { assert this.partitionInfo.getTableInfo() != null : "TableInfo should have been set at this point."; return this.partitionInfo.getTableInfo().getAllColumns(); }
/** * Gets the table schema. * @return the table schema */ public HCatSchema getTableSchema() { assert this.partitionInfo.getTableInfo() != null : "TableInfo should have been set at this point."; return this.partitionInfo.getTableInfo().getAllColumns(); }
/** * Gets the table schema. * @return the table schema */ public HCatSchema getTableSchema() { assert this.partitionInfo.getTableInfo() != null : "TableInfo should have been set at this point."; return this.partitionInfo.getTableInfo().getAllColumns(); }
/** * Deserialize this object, decompressing the partitions which can exceed the * allowed jobConf size. * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a> */ @SuppressWarnings("unchecked") private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(ois)); partitions = (List<PartInfo>)partInfoReader.readObject(); for (PartInfo partInfo : partitions) { if (partInfo.getTableInfo() == null) { partInfo.setTableInfo(this.tableInfo); } } } }
/** * Deserialize this object, decompressing the partitions which can exceed the * allowed jobConf size. * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a> */ @SuppressWarnings("unchecked") private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(ois)); partitions = (List<PartInfo>)partInfoReader.readObject(); for (PartInfo partInfo : partitions) { if (partInfo.getTableInfo() == null) { partInfo.setTableInfo(this.tableInfo); } } } }
/** * Deserialize this object, decompressing the partitions which can exceed the * allowed jobConf size. * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a> */ @SuppressWarnings("unchecked") private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(ois)); partitions = (List<PartInfo>)partInfoReader.readObject(); for (PartInfo partInfo : partitions) { if (partInfo.getTableInfo() == null) { partInfo.setTableInfo(this.tableInfo); } } } }
/** * Deserialize this object, decompressing the partitions which can exceed the * allowed jobConf size. * @see <a href="https://issues.apache.org/jira/browse/HCATALOG-453">HCATALOG-453</a> */ @SuppressWarnings("unchecked") private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { ois.defaultReadObject(); ObjectInputStream partInfoReader = new ObjectInputStream(new InflaterInputStream(ois)); partitions = (List<PartInfo>)partInfoReader.readObject(); if (partitions != null) { for (PartInfo partInfo : partitions) { if (partInfo.getTableInfo() == null) { partInfo.setTableInfo(this.tableInfo); } } } } }
private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException { deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), taskContext.getConfiguration()); try { InternalUtil.initializeDeserializer(deserializer, storageHandler.getConf(), hcatSplit.getPartitionInfo().getTableInfo(), hcatSplit.getPartitionInfo().getPartitionSchema()); } catch (SerDeException e) { throw new IOException("Failed initializing deserializer " + storageHandler.getSerDeClass().getName(), e); } }
private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException { deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), taskContext.getConfiguration()); try { InternalUtil.initializeDeserializer(deserializer, storageHandler.getConf(), hcatSplit.getPartitionInfo().getTableInfo(), hcatSplit.getPartitionInfo().getPartitionSchema()); } catch (SerDeException e) { throw new IOException("Failed initializing deserializer " + storageHandler.getSerDeClass().getName(), e); } }
private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException { deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), taskContext.getConfiguration()); try { InternalUtil.initializeDeserializer(deserializer, storageHandler.getConf(), hcatSplit.getPartitionInfo().getTableInfo(), hcatSplit.getPartitionInfo().getPartitionSchema()); } catch (SerDeException e) { throw new IOException("Failed initializing deserializer " + storageHandler.getSerDeClass().getName(), e); } }
private void createDeserializer(HCatSplit hcatSplit, HiveStorageHandler storageHandler, TaskAttemptContext taskContext) throws IOException { deserializer = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), taskContext.getConfiguration()); try { InternalUtil.initializeDeserializer(deserializer, storageHandler.getConf(), hcatSplit.getPartitionInfo().getTableInfo(), hcatSplit.getPartitionInfo().getPartitionSchema()); } catch (SerDeException e) { throw new IOException("Failed initializing deserializer " + storageHandler.getSerDeClass().getName(), e); } }
PartInfo partitionInfo = hcatSplit.getPartitionInfo(); if (partitionInfo.getTableInfo() == null) { partitionInfo.setTableInfo(((InputJobInfo)HCatUtil.deserialize( taskContext.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO)
PartInfo partitionInfo = hcatSplit.getPartitionInfo(); if (partitionInfo.getTableInfo() == null) { partitionInfo.setTableInfo(((InputJobInfo)HCatUtil.deserialize( taskContext.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO)
PartInfo partitionInfo = hcatSplit.getPartitionInfo(); if (partitionInfo.getTableInfo() == null) { partitionInfo.setTableInfo(((InputJobInfo)HCatUtil.deserialize( taskContext.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO)
PartInfo partitionInfo = hcatSplit.getPartitionInfo(); if (partitionInfo.getTableInfo() == null) { partitionInfo.setTableInfo(((InputJobInfo)HCatUtil.deserialize( taskContext.getConfiguration().get(HCatConstants.HCAT_KEY_JOB_INFO)