Tabnine Logo
SequenceManager.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.phoenix.compile.SequenceManager
constructor

Best Java code snippets using org.apache.phoenix.compile.SequenceManager.<init> (Showing top 20 results out of 315)

origin: apache/phoenix

public StatementContext(PhoenixStatement statement, Scan scan) {
  this(statement, FromCompiler.EMPTY_TABLE_RESOLVER, scan, new SequenceManager(statement));
}
origin: apache/phoenix

/**
 *  Constructor that lets you override whether or not to collect request level metrics.
 */
public StatementContext(PhoenixStatement statement, boolean collectRequestLevelMetrics) {
  this(statement, FromCompiler.EMPTY_TABLE_RESOLVER, new Scan(), new SequenceManager(statement), collectRequestLevelMetrics);
}
origin: apache/phoenix

public StatementContext(PhoenixStatement statement, ColumnResolver resolver) {
  this (statement, resolver, new Scan(), new SequenceManager(statement));
}
origin: apache/phoenix

public QueryCompiler(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, boolean projectTuples, boolean optimizeSubquery, Map<TableRef, QueryPlan> dataPlans) throws SQLException {
  this(statement, select, resolver, Collections.<PDatum>emptyList(), null, new SequenceManager(statement), projectTuples, optimizeSubquery, dataPlans);
}
origin: apache/phoenix

public QueryPlan optimize(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory) throws SQLException {
  QueryCompiler compiler = new QueryCompiler(statement, select, resolver, targetColumns, parallelIteratorFactory, new SequenceManager(statement));
  QueryPlan dataPlan = compiler.compile();
  return optimize(dataPlan, statement, targetColumns, parallelIteratorFactory);
}

origin: apache/phoenix

public MutationPlan compile(final List<TableRef> tableRefs, final byte[] emptyCF, final List<byte[]> projectCFs, final List<PColumn> deleteList,
    final long timestamp) throws SQLException {
  PhoenixStatement statement = new PhoenixStatement(connection);
  final StatementContext context = new StatementContext(
      statement,
    new MultipleTableRefColumnResolver(tableRefs),
      scan,
      new SequenceManager(statement));
  return new PostDDLMutationPlan(context, tableRefs, timestamp, emptyCF, deleteList, projectCFs);
}
origin: apache/phoenix

private boolean getKeyExpressionCombinations(Pair<Expression, Expression> combination, StatementContext context, SelectStatement select, TableRef table, JoinType type, final List<Expression> joinExpressions, final List<Expression> hashExpressions) throws SQLException {
  if ((type != JoinType.Inner && type != JoinType.Semi) || this.noChildParentJoinOptimization)
    return false;
  Scan scanCopy = ScanUtil.newScan(context.getScan());
  StatementContext contextCopy = new StatementContext(statement, context.getResolver(), scanCopy, new SequenceManager(statement));
  contextCopy.setCurrentTable(table);
  List<Expression> lhsCombination = Lists.<Expression> newArrayList();
  boolean complete = WhereOptimizer.getKeyExpressionCombination(lhsCombination, contextCopy, select, joinExpressions);
  if (lhsCombination.isEmpty())
    return false;
  List<Expression> rhsCombination = Lists.newArrayListWithExpectedSize(lhsCombination.size());
  for (int i = 0; i < lhsCombination.size(); i++) {
    Expression lhs = lhsCombination.get(i);
    for (int j = 0; j < joinExpressions.size(); j++) {
      if (lhs == joinExpressions.get(j)) {
        rhsCombination.add(hashExpressions.get(j));
        break;
      }
    }
  }
  if (lhsCombination.size() == 1) {
    combination.setFirst(lhsCombination.get(0));
    combination.setSecond(rhsCombination.get(0));
  } else {
    combination.setFirst(new RowValueConstructorExpression(lhsCombination, false));
    combination.setSecond(new RowValueConstructorExpression(rhsCombination, false));
  }
  return type == JoinType.Semi && complete;
}
origin: apache/phoenix

@SuppressWarnings("unchecked")
@Override
public QueryPlan compilePlan(PhoenixStatement stmt, Sequence.ValueOp seqAction) throws SQLException {
  if(!getUdfParseNodes().isEmpty()) {
    stmt.throwIfUnallowedUserDefinedFunctions(getUdfParseNodes());
  }
  SelectStatement select = SubselectRewriter.flatten(this, stmt.getConnection());
  ColumnResolver resolver = FromCompiler.getResolverForQuery(select, stmt.getConnection());
  select = StatementNormalizer.normalize(select, resolver);
  SelectStatement transformedSelect = SubqueryRewriter.transform(select, resolver, stmt.getConnection());
  if (transformedSelect != select) {
    resolver = FromCompiler.getResolverForQuery(transformedSelect, stmt.getConnection());
    select = StatementNormalizer.normalize(transformedSelect, resolver);
  }
  QueryPlan plan = new QueryCompiler(stmt, select, resolver, Collections.<PDatum>emptyList(), stmt.getConnection().getIteratorFactory(), new SequenceManager(stmt), true, false, null).compile();
  plan.getContext().getSequenceManager().validateSequences(seqAction);
  return plan;
}
origin: apache/phoenix

final ColumnResolver resolver = FromCompiler.getResolver(create, connection, create.getUdfParseNodes());
Scan scan = new Scan();
final StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
List<ParseNode> splitNodes = create.getSplitNodes();
origin: apache/phoenix

public static ClientAggregators getSingleSumAggregator(String url, Properties props) throws SQLException {
  try (PhoenixConnection pconn = DriverManager.getConnection(url, props).unwrap(PhoenixConnection.class)) {
    PhoenixStatement statement = new PhoenixStatement(pconn);
    StatementContext context = new StatementContext(statement, null, new Scan(), new SequenceManager(statement));
    AggregationManager aggregationManager = context.getAggregationManager();
    SumAggregateFunction func = new SumAggregateFunction(Arrays.<Expression>asList(new KeyValueColumnExpression(new PLongColumn() {
origin: apache/phoenix

    new StatementContext(statement, resolver, new Scan(), new SequenceManager(statement));;
ParseNode dummyWhere = GenSubqueryParamValuesRewriter.replaceWithDummyValues(stmt.getWhere(), context);
stmt = FACTORY.select(stmt, dummyWhere);
origin: apache/phoenix

QueryCompiler compiler = new QueryCompiler(statement, select, resolverToBe, Collections.<PColumn>emptyList(), parallelIteratorFactoryToBe, new SequenceManager(statement));
final QueryPlan dataPlan = compiler.compile();
origin: apache/phoenix

ViewType viewTypeToBe = null;
Scan scan = new Scan();
final StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
origin: apache/phoenix

PhoenixConnection connection = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
final PhoenixStatement statement = new PhoenixStatement(connection);
final StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
context.setScanRanges(scanRanges);
ParallelIterators parallelIterators = new ParallelIterators(new QueryPlan() {
origin: apache/phoenix

  JoinSpec joinSpec = joinSpecs.get(i);
  Scan subScan = ScanUtil.newScan(originalScan);
  subContexts[i] = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
  subPlans[i] = compileJoinQuery(subContexts[i], binds, joinSpec.getJoinTable(), true, true, null);
  boolean hasPostReference = joinSpec.getJoinTable().hasPostReference();
JoinTable lhsJoin = joinTable.getSubJoinTableWithoutPostFilters();
Scan subScan = ScanUtil.newScan(originalScan);
StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, true, null);
PTable rhsProjTable;
StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), lhsScan, new SequenceManager(statement));
boolean preserveRowkey = !projectPKColumns && type != JoinType.Full;
QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, !preserveRowkey, lhsOrderBy);
StatementContext rhsCtx = new StatementContext(statement, context.getResolver(), rhsScan, new SequenceManager(statement));
QueryPlan rhsPlan = compileJoinQuery(rhsCtx, binds, rhsJoin, true, true, rhsOrderBy);
PTable rhsProjTable = rhsCtx.getResolver().getTables().get(0).getTable();
StatementContext subCtx = new StatementContext(statement, resolver, ScanUtil.newScan(originalScan), new SequenceManager(statement));
subCtx.setCurrentTable(tableRef);
QueryPlan innerPlan = new SortMergeJoinPlan(subCtx, joinTable.getStatement(), tableRef, type == JoinType.Right ? JoinType.Left : type, lhsPlan, rhsPlan, lhsKeyExpressions, rhsKeyExpressions, projectedTable, lhsProjTable, needsMerge ? rhsProjTable : null, fieldPosition, lastJoinSpec.isSingleValueOnly());
origin: apache/phoenix

StatementContext context = new StatementContext(statement, resolver, scan, new SequenceManager(statement));
long ts = timestamp;
origin: apache/phoenix

  QueryCompiler compiler = new QueryCompiler(statement, select, selectResolver, targetColumns, parallelIteratorFactoryToBe, new SequenceManager(statement), true, false, null);
  queryPlanToBe = compiler.compile();
final StatementContext context = new StatementContext(statement, resolver, new Scan(), new SequenceManager(statement));
UpsertValuesCompiler expressionBuilder = new UpsertValuesCompiler(context);
final List<Expression> constantExpressions = Lists.newArrayListWithExpectedSize(valueNodes.size());
origin: org.apache.phoenix/phoenix-core

public QueryPlan optimize(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory) throws SQLException {
  QueryCompiler compiler = new QueryCompiler(statement, select, resolver, targetColumns, parallelIteratorFactory, new SequenceManager(statement));
  QueryPlan dataPlan = compiler.compile();
  return optimize(dataPlan, statement, targetColumns, parallelIteratorFactory);
}

origin: com.aliyun.phoenix/ali-phoenix-core

public QueryPlan optimize(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory) throws SQLException {
  QueryCompiler compiler = new QueryCompiler(statement, select, resolver, targetColumns, parallelIteratorFactory, new SequenceManager(statement));
  QueryPlan dataPlan = compiler.compile();
  return optimize(dataPlan, statement, targetColumns, parallelIteratorFactory);
}

origin: com.aliyun.phoenix/ali-phoenix-core

public MutationPlan compile(final List<TableRef> tableRefs, final byte[] emptyCF, final List<byte[]> projectCFs, final List<PColumn> deleteList,
    final long timestamp) throws SQLException {
  PhoenixStatement statement = new PhoenixStatement(connection);
  final StatementContext context = new StatementContext(
      statement,
    new MultipleTableRefColumnResolver(tableRefs),
      scan,
      new SequenceManager(statement));
  return new PostDDLMutationPlan(context, tableRefs, timestamp, emptyCF, deleteList, projectCFs);
}
org.apache.phoenix.compileSequenceManager<init>

Popular methods of SequenceManager

  • getSequenceCount
  • determineNumToAllocate
    If caller specified used NEXT VALUES FOR expression then we have set the numToAllocate. If numToAllo
  • newSequenceReference
  • newSequenceTuple
  • setSequenceValues
  • validateSequences

Popular in Java

  • Reading from database using SQL prepared statement
  • scheduleAtFixedRate (ScheduledExecutorService)
  • startActivity (Activity)
  • getSupportFragmentManager (FragmentActivity)
  • DecimalFormat (java.text)
    A concrete subclass of NumberFormat that formats decimal numbers. It has a variety of features desig
  • SortedMap (java.util)
    A map that has its keys ordered. The sorting is according to either the natural ordering of its keys
  • Timer (java.util)
    Timers schedule one-shot or recurring TimerTask for execution. Prefer java.util.concurrent.Scheduled
  • Callable (java.util.concurrent)
    A task that returns a result and may throw an exception. Implementors define a single method with no
  • Handler (java.util.logging)
    A Handler object accepts a logging request and exports the desired messages to a target, for example
  • JFileChooser (javax.swing)
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now