/** * Enqueue most recent record read, and dequeue earliest result in the queue. * * @param job * Current job configuration. * * @param recordreader * Record reader. * * @param key * Key of current reading record. * * @param value * Value of current reading record. * * @return Return false if reaches the end of file, otherwise return true. */ public boolean updateBuffer(JobConf job, RecordReader recordreader, WritableComparable key, Writable value) throws IOException { key = ReflectionUtils.copy(job, (WritableComparable)buffer.get(cur).getFirst(), key); value = ReflectionUtils.copy(job, (Writable)buffer.get(cur).getSecond(), value); boolean notEOF = recordreader.next(buffer.get(cur).getFirst(), buffer.get(cur).getSecond()); if (notEOF) { cur = (++cur) % buffer.size(); } return notEOF; }
/** * Enqueue most recent record read, and dequeue earliest result in the queue. * * @param job * Current job configuration. * * @param recordreader * Record reader. * * @param key * Key of current reading record. * * @param value * Value of current reading record. * * @return Return false if reaches the end of file, otherwise return true. */ public boolean updateBuffer(JobConf job, RecordReader recordreader, WritableComparable key, Writable value) throws IOException { key = ReflectionUtils.copy(job, (WritableComparable)buffer.get(cur).getFirst(), key); value = ReflectionUtils.copy(job, (Writable)buffer.get(cur).getSecond(), value); boolean notEOF = recordreader.next(buffer.get(cur).getFirst(), buffer.get(cur).getSecond()); if (notEOF) { cur = (++cur) % buffer.size(); } return notEOF; }
static synchronized public Object evaluateExprOnPart( ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> pair, Object partColValues) throws HiveException { return pair.getFirst().getPrimitiveJavaObject(pair.getSecond().evaluate(partColValues)); } }
static synchronized public Object evaluateExprOnPart( ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> pair, Object partColValues) throws HiveException { return pair.getFirst().getPrimitiveJavaObject(pair.getSecond().evaluate(partColValues)); } }
Conjunct analyzeConjunct(ASTNode conjunct) throws SemanticException { if(conjunct.getChildCount() == 2) { ASTNode left = (ASTNode) conjunct.getChild(0); ASTNode right = (ASTNode) conjunct.getChild(1); ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left); ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right); return new Conjunct(left, right, leftInfo.getFirst(), rightInfo.getFirst(), leftInfo.getSecond(), rightInfo.getSecond()); } else { ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct); return new Conjunct(conjunct, null, sqExprInfo.getFirst(), null, sqExprInfo.getSecond(), sqExprInfo.getSecond()); } }
Conjunct analyzeConjunct(ASTNode conjunct) throws SemanticException { int type = conjunct.getType(); if ( type == HiveParser.EQUAL ) { ASTNode left = (ASTNode) conjunct.getChild(0); ASTNode right = (ASTNode) conjunct.getChild(1); ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left); ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right); return new Conjunct(left, right, leftInfo.getFirst(), rightInfo.getFirst(), leftInfo.getSecond(), rightInfo.getSecond()); } else { ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct); return new Conjunct(conjunct, null, sqExprInfo.getFirst(), null, sqExprInfo.getSecond(), sqExprInfo.getSecond()); } }
tem.setFirst(ReflectionUtils.copy(job, key, tem.getFirst())); tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond())); buffer.add(tem);
getGroupByGroupingSetsForClause(parseInfo, dest); List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst(); List<Long> groupingSets = grpByExprsGroupingSets.getSecond();
tem.setFirst(ReflectionUtils.copy(job, key, tem.getFirst())); tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond())); buffer.add(tem);
org.apache.hadoop.hive.ql.metadata.Table> pair: finalSelOps.values()) { List<FieldSchema> fieldSchemas = plan.getResultSchema().getFieldSchemas(); SelectOperator finalSelOp = pair.getFirst(); org.apache.hadoop.hive.ql.metadata.Table t = pair.getSecond(); String destTableName = null;
public synchronized void add(HiveKey key, BytesWritable value) { if (writeCursor >= IN_MEMORY_NUM_ROWS) { // Write buffer is full if (!readBufferUsed) { // Read buffer isn't used, switch buffer switchBufferAndResetCursor(); } else { // Need to spill from write buffer to disk try { if (output == null) { setupOutput(); } for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[i]; writeHiveKey(output, pair.getFirst()); writeValue(output, pair.getSecond()); pair.setFirst(null); pair.setSecond(null); } writeCursor = 0; } catch (Exception e) { clear(); // Clean up the cache throw new RuntimeException("Failed to spill rows to disk", e); } } } ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[writeCursor++]; pair.setFirst(key); pair.setSecond(value); }
public synchronized void add(HiveKey key, BytesWritable value) { if (writeCursor >= IN_MEMORY_NUM_ROWS) { // Write buffer is full if (!readBufferUsed) { // Read buffer isn't used, switch buffer switchBufferAndResetCursor(); } else { // Need to spill from write buffer to disk try { if (output == null) { setupOutput(); } for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[i]; writeHiveKey(output, pair.getFirst()); writeValue(output, pair.getSecond()); pair.setFirst(null); pair.setSecond(null); } writeCursor = 0; } catch (Exception e) { clear(); // Clean up the cache throw new RuntimeException("Failed to spill rows to disk", e); } } } ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[writeCursor++]; pair.setFirst(key); pair.setSecond(value); }
while (kvContainer.hasNext()) { ObjectPair<HiveKey, BytesWritable> pair = kvContainer.next(); Writable key = pair.getFirst(); Writable val = pair.getSecond(); writeHelper.setKeyValue(key, val);
private boolean topLevelConjunctCheck(ASTNode searchCond, ObjectPair<Boolean, Integer> subqInfo) { if( searchCond.getType() == HiveParser.KW_OR) { subqInfo.setFirst(Boolean.TRUE); if(subqInfo.getSecond() > 1) { return false; } } if( searchCond.getType() == HiveParser.TOK_SUBQUERY_EXPR) { subqInfo.setSecond(subqInfo.getSecond() + 1); if(subqInfo.getSecond()> 1 && subqInfo.getFirst()) { return false; } return true; } for(int i=0; i<searchCond.getChildCount(); i++){ boolean validSubQuery = topLevelConjunctCheck((ASTNode)searchCond.getChild(i), subqInfo); if(!validSubQuery) { return false; } } return true; }
private ObjectPair<ExprType,ColumnInfo> analyzeExpr(ASTNode expr) { ColumnInfo cInfo = null; if ( forHavingClause ) { try { cInfo = parentQueryRR.getExpression(expr); if ( cInfo != null) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } } catch(SemanticException se) { } } if ( expr.getType() == HiveParser.DOT) { ASTNode dot = firstDot(expr); cInfo = resolveDot(dot); if ( cInfo != null ) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else if ( expr.getType() == HiveParser.TOK_TABLE_OR_COL ) { return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else { ExprType exprType = ExprType.REFERS_NONE; int cnt = expr.getChildCount(); for(int i=0; i < cnt; i++) { ASTNode child = (ASTNode) expr.getChild(i); exprType = exprType.combine(analyzeExpr(child).getFirst()); } return ObjectPair.create(exprType, null); } }
private ObjectPair<ExprType,ColumnInfo> analyzeExpr(ASTNode expr) { ColumnInfo cInfo = null; if ( forHavingClause ) { try { cInfo = parentQueryRR.getExpression(expr); if ( cInfo != null) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } } catch(SemanticException se) { } } if ( expr.getType() == HiveParser.DOT) { ASTNode dot = firstDot(expr); cInfo = resolveDot(dot); if ( cInfo != null ) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else if ( expr.getType() == HiveParser.TOK_TABLE_OR_COL ) { return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else { ExprType exprType = ExprType.REFERS_NONE; int cnt = expr.getChildCount(); for(int i=0; i < cnt; i++) { ASTNode child = (ASTNode) expr.getChild(i); exprType = exprType.combine(analyzeExpr(child).getFirst()); } return ObjectPair.create(exprType, null); } }