Tabnine Logo
PeriodGranularity.<init>
Code IndexAdd Tabnine to your IDE (free)

How to use
org.apache.druid.java.util.common.granularity.PeriodGranularity
constructor

Best Java code snippets using org.apache.druid.java.util.common.granularity.PeriodGranularity.<init> (Showing top 20 results out of 315)

origin: apache/incubator-druid

Granularity create(DateTime origin, DateTimeZone tz)
{
 if (period != null && (origin != null || tz != null)) {
  return new PeriodGranularity(period, origin, tz);
 } else {
  // If All or None granularity, or if origin and tz are both null, return the cached granularity
  return defaultGranularity;
 }
}
origin: apache/incubator-druid

GranularityType(
  final String hiveFormat,
  final String lowerDefaultFormat,
  final String defaultFormat,
  final int dateValuePositions,
  final String period
)
{
 this.hiveFormat = hiveFormat;
 this.lowerDefaultFormat = lowerDefaultFormat;
 this.defaultFormat = defaultFormat;
 this.dateValuePositions = dateValuePositions;
 this.period = new Period(period);
 this.defaultGranularity = new PeriodGranularity(this.period, null, null);
}
origin: apache/incubator-druid

 /**
  * Returns the Druid QueryGranularity corresponding to a Calcite TimeUnitRange, or null if there is none.
  *
  * @param timeUnitRange time unit
  * @param timeZone      session time zone
  *
  * @return queryGranularity, or null
  */
 public static PeriodGranularity toQueryGranularity(final TimeUnitRange timeUnitRange, final DateTimeZone timeZone)
 {
  final Period period = PERIOD_MAP.get(timeUnitRange);
  if (period == null) {
   return null;
  }

  return new PeriodGranularity(period, null, timeZone);
 }
}
origin: apache/incubator-druid

private static Iterable<Interval> splitInterval(Interval interval, Period period)
{
 if (interval.getEndMillis() == interval.getStartMillis()) {
  return Collections.singletonList(interval);
 }
 List<Interval> intervals = new ArrayList<>();
 Iterator<Interval> timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator();
 DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart());
 while (timestamps.hasNext()) {
  DateTime end = timestamps.next().getStart();
  intervals.add(new Interval(start, end));
  start = end;
 }
 if (start.compareTo(interval.getEnd()) < 0) {
  intervals.add(new Interval(start, interval.getEnd()));
 }
 return intervals;
}
origin: apache/incubator-druid

@Test
public void testIterableMonth()
{
 final DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
 final DateTime baseTime = new DateTime("2012-11-03T10:00:00", tz);
 assertSameInterval(
   Lists.newArrayList(
     new DateTime("2012-11-01T00:00:00.000-07:00", tz),
     new DateTime("2012-12-01T00:00:00.000-08:00", tz),
     new DateTime("2013-01-01T00:00:00.000-08:00", tz),
     new DateTime("2013-02-01T00:00:00.000-08:00", tz)
   ),
   new PeriodGranularity(new Period("P1M"), null, tz)
     .getIterable(new Interval(baseTime, baseTime.plus(Months.months(3))))
 );
}
origin: apache/incubator-druid

@Test
public void testTruncateDhaka()
{
 final DateTimeZone tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Dhaka"));
 final DateTime date = new DateTime("2011-03-15T21:42:23.898+06:00", tz);
 final PeriodGranularity year = new PeriodGranularity(new Period("P1Y"), null, tz);
 final PeriodGranularity hour = new PeriodGranularity(new Period("PT1H"), null, tz);
 final PeriodGranularity twoHour = new PeriodGranularity(new Period("PT2H"), null, tz);
 Assert.assertEquals(
   new DateTime("2011-01-01T00:00:00.000+06:00", tz),
   year.toDateTime(year.bucketStart(date).getMillis())
 );
 Assert.assertEquals(
   new DateTime("2011-03-15T21:00:00.000+06:00", tz),
   hour.toDateTime(hour.bucketStart(date).getMillis())
 );
 Assert.assertEquals(
   new DateTime("2011-03-15T20:00:00.000+06:00", tz),
   twoHour.toDateTime(twoHour.bucketStart(date).getMillis())
 );
}

origin: apache/incubator-druid

@Test
public void testTruncateKathmandu()
{
 final DateTimeZone tz = DateTimeZone.forTimeZone(TimeZone.getTimeZone("Asia/Kathmandu"));
 final DateTime date = new DateTime("2011-03-15T21:42:23.898+05:45", tz);
 final PeriodGranularity year = new PeriodGranularity(new Period("P1Y"), null, tz);
 final PeriodGranularity hour = new PeriodGranularity(new Period("PT1H"), null, tz);
 final PeriodGranularity twoHour = new PeriodGranularity(new Period("PT2H"), null, tz);
 Assert.assertEquals(
   new DateTime("2011-01-01T00:00:00.000+05:45", tz),
   year.toDateTime(year.bucketStart(date).getMillis())
 );
 Assert.assertEquals(
   new DateTime("2011-03-15T21:00:00.000+05:45", tz),
   hour.toDateTime(hour.bucketStart(date).getMillis())
 );
 Assert.assertEquals(
   new DateTime("2011-03-15T20:00:00.000+05:45", tz),
   twoHour.toDateTime(twoHour.bucketStart(date).getMillis())
 );
}

origin: apache/incubator-druid

@Test
public void testIterableWeek()
{
 final DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles");
 final DateTime baseTime = new DateTime("2012-11-03T10:00:00", tz);
 assertSameInterval(
   Lists.newArrayList(
     new DateTime("2012-10-29T00:00:00.000-07:00", tz),
     new DateTime("2012-11-05T00:00:00.000-08:00", tz),
     new DateTime("2012-11-12T00:00:00.000-08:00", tz),
     new DateTime("2012-11-19T00:00:00.000-08:00", tz)
   ),
   new PeriodGranularity(new Period("P1W"), null, tz)
     .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3))))
 );
 assertSameInterval(
   Lists.newArrayList(
     new DateTime("2012-11-03T10:00:00.000-07:00", tz),
     new DateTime("2012-11-10T10:00:00.000-08:00", tz),
     new DateTime("2012-11-17T10:00:00.000-08:00", tz)
   ),
   new PeriodGranularity(new Period("P1W"), baseTime, tz)
     .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3))))
 );
}
origin: apache/incubator-druid

@Test
public void testCustomPeriodToDate()
{
 PathDate[] customChecks = {
   new PathDate(
     new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()),
     null,
     "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"
   ),
   new PathDate(
     new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()),
     null,
     "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"
   ),
   new PathDate(
     new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()),
     null,
     "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"
   )
 };
 checkToDate(
   new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC),
   Granularity.Formatter.DEFAULT,
   customChecks
 );
}
origin: apache/incubator-druid

private static DruidExpression castCharToDateTime(
  final PlannerContext plannerContext,
  final DruidExpression operand,
  final SqlTypeName toType
)
{
 // Cast strings to datetimes by parsing them from SQL format.
 final DruidExpression timestampExpression = DruidExpression.fromFunctionCall(
   "timestamp_parse",
   ImmutableList.of(
     operand,
     DruidExpression.fromExpression(DruidExpression.nullLiteral()),
     DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
   )
 );
 if (toType == SqlTypeName.DATE) {
  return TimeFloorOperatorConversion.applyTimestampFloor(
    timestampExpression,
    new PeriodGranularity(Period.days(1), null, plannerContext.getTimeZone()),
    plannerContext.getExprMacroTable()
  );
 } else if (toType == SqlTypeName.TIMESTAMP) {
  return timestampExpression;
 } else {
  throw new ISE("Unsupported DateTime type[%s]", toType);
 }
}
origin: apache/incubator-druid

@Test(expected = IllegalArgumentException.class)
public void testMergeResultsWithNegativeLimit()
{
 GroupByQuery.Builder builder = GroupByQuery
   .builder()
   .setDataSource(QueryRunnerTestHelper.dataSource)
   .setInterval("2011-04-02/2011-04-04")
   .setDimensions(new DefaultDimensionSpec("quality", "alias"))
   .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))
   .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
   .setLimit(-1);
 builder.build();
}
origin: apache/incubator-druid

PeriodGranularity periodOrigin = new PeriodGranularity(new Period("PT15M"), origin, null);
assertSameDateTime(
  Lists.newArrayList(
PeriodGranularity periodNoOrigin = new PeriodGranularity(new Period("PT15M"), null, null);
assertSameDateTime(
  Lists.newArrayList(
origin: apache/incubator-druid

@Test
public void testCompoundPeriodMillisTruncate()
{
 {
  final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00");
  PeriodGranularity periodOrigin = new PeriodGranularity(
    new Period("PT12H5M"),
    origin,
    DateTimeZone.UTC
  );
  assertSameDateTime(
    Lists.newArrayList(
      DateTimes.of("2012-01-01T04:50:00.000-08:00"),
      DateTimes.of("2012-01-02T05:00:00.000-08:00"),
      DateTimes.of("2012-01-02T17:05:00.000-08:00"),
      DateTimes.of("2012-02-03T22:25:00.000-08:00")
    ),
    Lists.newArrayList(
      periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")),
      periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")),
      periodOrigin.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")),
      periodOrigin.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00"))
    )
  );
 }
}
origin: apache/incubator-druid

@Test
public void testGroupByWithRegEx()
{
 GroupByQuery.Builder builder = GroupByQuery
   .builder()
   .setDataSource(QueryRunnerTestHelper.dataSource)
   .setInterval("2011-04-02/2011-04-04")
   .setDimFilter(new RegexDimFilter("quality", "auto.*", null))
   .setDimensions(new DefaultDimensionSpec("quality", "quality"))
   .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount)
   .setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
 final GroupByQuery query = builder.build();
 List<Row> expectedResults = Collections.singletonList(
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "quality", "automotive", "rows", 2L)
 );
 QueryRunner<Row> mergeRunner = factory.getToolchest().mergeResults(runner);
 Map<String, Object> context = new HashMap<>();
 TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit");
}
origin: apache/incubator-druid

@Test
public void testTimeseriesUsingFloorPlusCastAsDate() throws Exception
{
 testQuery(
   "SELECT SUM(cnt), dt FROM (\n"
   + "  SELECT CAST(FLOOR(__time TO QUARTER) AS DATE) AS dt,\n"
   + "  cnt FROM druid.foo\n"
   + ") AS x\n"
   + "GROUP BY dt\n"
   + "ORDER BY dt",
   ImmutableList.of(
     Druids.newTimeseriesQueryBuilder()
        .dataSource(CalciteTests.DATASOURCE1)
        .intervals(QSS(Filtration.eternity()))
        .granularity(new PeriodGranularity(Period.months(3), null, DateTimeZone.UTC))
        .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
        .context(TIMESERIES_CONTEXT_DEFAULT)
        .build()
   ),
   ImmutableList.of(
     new Object[]{3L, D("2000-01-01")},
     new Object[]{3L, D("2001-01-01")}
   )
 );
}
origin: apache/incubator-druid

private void doTestMergeResultsWithValidLimit(final int limit)
{
 GroupByQuery.Builder builder = GroupByQuery
   .builder()
   .setDataSource(QueryRunnerTestHelper.dataSource)
   .setInterval("2011-04-02/2011-04-04")
   .setDimensions(new DefaultDimensionSpec("quality", "alias"))
   .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"))
   .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
   .setLimit(limit);
 final GroupByQuery fullQuery = builder.build();
 List<Row> expectedResults = Arrays.asList(
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 2L, "idx", 217L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 2L, "idx", 319L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 2L, "idx", 216L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 2L, "idx", 221L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 2L, "idx", 177L),
   GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L)
 );
 QueryRunner<Row> mergeRunner = factory.getToolchest().mergeResults(runner);
 Map<String, Object> context = new HashMap<>();
 TestHelper.assertExpectedObjects(
   Iterables.limit(expectedResults, limit),
   mergeRunner.run(QueryPlus.wrap(fullQuery), context),
   StringUtils.format("limit: %d", limit)
 );
}
origin: apache/incubator-druid

@Test
public void testTimeseriesLosAngelesUsingTimeFloorConnectionUtc() throws Exception
{
 testQuery(
   "SELECT SUM(cnt), gran FROM (\n"
   + "  SELECT TIME_FLOOR(__time, 'P1M', CAST(NULL AS TIMESTAMP), 'America/Los_Angeles') AS gran,\n"
   + "  cnt FROM druid.foo\n"
   + ") AS x\n"
   + "GROUP BY gran\n"
   + "ORDER BY gran",
   ImmutableList.of(
     Druids.newTimeseriesQueryBuilder()
        .dataSource(CalciteTests.DATASOURCE1)
        .intervals(QSS(Filtration.eternity()))
        .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES)))
        .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
        .context(TIMESERIES_CONTEXT_DEFAULT)
        .build()
   ),
   ImmutableList.of(
     new Object[]{1L, T("1999-12-01T08")},
     new Object[]{2L, T("2000-01-01T08")},
     new Object[]{1L, T("2000-12-01T08")},
     new Object[]{2L, T("2001-01-01T08")}
   )
 );
}
origin: apache/incubator-druid

@Test
public void testTimeseriesLosAngelesViaQueryContext() throws Exception
{
 testQuery(
   PLANNER_CONFIG_DEFAULT,
   QUERY_CONTEXT_LOS_ANGELES,
   "SELECT SUM(cnt), gran FROM (\n"
   + "  SELECT FLOOR(__time TO MONTH) AS gran,\n"
   + "  cnt FROM druid.foo\n"
   + ") AS x\n"
   + "GROUP BY gran\n"
   + "ORDER BY gran",
   CalciteTests.REGULAR_USER_AUTH_RESULT,
   ImmutableList.of(
     Druids.newTimeseriesQueryBuilder()
        .dataSource(CalciteTests.DATASOURCE1)
        .intervals(QSS(Filtration.eternity()))
        .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES)))
        .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
        .context(TIMESERIES_CONTEXT_LOS_ANGELES)
        .build()
   ),
   ImmutableList.of(
     new Object[]{1L, T("1999-12-01", LOS_ANGELES)},
     new Object[]{2L, T("2000-01-01", LOS_ANGELES)},
     new Object[]{1L, T("2000-12-01", LOS_ANGELES)},
     new Object[]{2L, T("2001-01-01", LOS_ANGELES)}
   )
 );
}
origin: apache/incubator-druid

@Test
public void testTimeseriesLosAngelesUsingTimeFloorConnectionLosAngeles() throws Exception
{
 testQuery(
   PLANNER_CONFIG_DEFAULT,
   QUERY_CONTEXT_LOS_ANGELES,
   "SELECT SUM(cnt), gran FROM (\n"
   + "  SELECT TIME_FLOOR(__time, 'P1M') AS gran,\n"
   + "  cnt FROM druid.foo\n"
   + ") AS x\n"
   + "GROUP BY gran\n"
   + "ORDER BY gran",
   CalciteTests.REGULAR_USER_AUTH_RESULT,
   ImmutableList.of(
     Druids.newTimeseriesQueryBuilder()
        .dataSource(CalciteTests.DATASOURCE1)
        .intervals(QSS(Filtration.eternity()))
        .granularity(new PeriodGranularity(Period.months(1), null, DateTimes.inferTzFromString(LOS_ANGELES)))
        .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
        .context(TIMESERIES_CONTEXT_LOS_ANGELES)
        .build()
   ),
   ImmutableList.of(
     new Object[]{1L, T("1999-12-01", LOS_ANGELES)},
     new Object[]{2L, T("2000-01-01", LOS_ANGELES)},
     new Object[]{1L, T("2000-12-01", LOS_ANGELES)},
     new Object[]{2L, T("2001-01-01", LOS_ANGELES)}
   )
 );
}
origin: apache/incubator-druid

@Test
public void testTimeseriesUsingCastAsDate() throws Exception
{
 testQuery(
   "SELECT SUM(cnt), dt FROM (\n"
   + "  SELECT CAST(__time AS DATE) AS dt,\n"
   + "  cnt FROM druid.foo\n"
   + ") AS x\n"
   + "GROUP BY dt\n"
   + "ORDER BY dt",
   ImmutableList.of(
     Druids.newTimeseriesQueryBuilder()
        .dataSource(CalciteTests.DATASOURCE1)
        .intervals(QSS(Filtration.eternity()))
        .granularity(new PeriodGranularity(Period.days(1), null, DateTimeZone.UTC))
        .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt")))
        .context(TIMESERIES_CONTEXT_DEFAULT)
        .build()
   ),
   ImmutableList.of(
     new Object[]{1L, D("2000-01-01")},
     new Object[]{1L, D("2000-01-02")},
     new Object[]{1L, D("2000-01-03")},
     new Object[]{1L, D("2001-01-01")},
     new Object[]{1L, D("2001-01-02")},
     new Object[]{1L, D("2001-01-03")}
   )
 );
}
org.apache.druid.java.util.common.granularityPeriodGranularity<init>

Popular methods of PeriodGranularity

  • bucketStart
  • getPeriod
  • getTimeZone
  • getOrigin
  • bucketEnd
  • getDateValues
  • getIterable
  • increment
  • isCompoundPeriod
  • serialize
  • truncate
  • truncateCompoundPeriod
  • truncate,
  • truncateCompoundPeriod,
  • truncateMillisPeriod,
  • toDateTime

Popular in Java

  • Reading from database using SQL prepared statement
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • setScale (BigDecimal)
  • findViewById (Activity)
  • Menu (java.awt)
  • Collections (java.util)
    This class consists exclusively of static methods that operate on or return collections. It contains
  • Dictionary (java.util)
    Note: Do not use this class since it is obsolete. Please use the Map interface for new implementatio
  • GregorianCalendar (java.util)
    GregorianCalendar is a concrete subclass of Calendarand provides the standard calendar used by most
  • TreeMap (java.util)
    Walk the nodes of the tree left-to-right or right-to-left. Note that in descending iterations, next
  • Executors (java.util.concurrent)
    Factory and utility methods for Executor, ExecutorService, ScheduledExecutorService, ThreadFactory,
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now