785cbd2114dd6dee345809f288a1cecc8cdf2c06
[hive.git] / ql / src / test / results / clientpositive / druid_timeseries.q.out
1 PREHOOK: query: CREATE EXTERNAL TABLE druid_table_1
2 STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler'
3 TBLPROPERTIES ("druid.datasource" = "wikipedia")
4 PREHOOK: type: CREATETABLE
5 PREHOOK: Output: database:default
6 PREHOOK: Output: default@druid_table_1
7 POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_1
8 STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler'
9 TBLPROPERTIES ("druid.datasource" = "wikipedia")
10 POSTHOOK: type: CREATETABLE
11 POSTHOOK: Output: database:default
12 POSTHOOK: Output: default@druid_table_1
13 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` >= '2010-01-01 00:00:00 UTC' AND  `__time` <= '2012-03-01 00:00:00 UTC' OR  added <= 0
14 PREHOOK: type: QUERY
15 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` >= '2010-01-01 00:00:00 UTC' AND  `__time` <= '2012-03-01 00:00:00 UTC' OR  added <= 0
16 POSTHOOK: type: QUERY
17 STAGE DEPENDENCIES:
18   Stage-1 is a root stage
19   Stage-0 depends on stages: Stage-1
20
21 STAGE PLANS:
22   Stage: Stage-1
23     Map Reduce
24       Map Operator Tree:
25           TableScan
26             alias: druid_table_1
27             filterExpr: (((__time >= 2009-12-31 16:00:00.0 US/Pacific) and (__time <= 2012-02-29 16:00:00.0 US/Pacific)) or (added <= 0)) (type: boolean)
28             properties:
29               druid.query.json {"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":["robot","namespace","anonymous","unpatrolled","page","language","newpage","user"],"metrics":["count","added","delta","variation","deleted"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
30               druid.query.type select
31             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
32             Filter Operator
33               predicate: (((__time >= 2009-12-31 16:00:00.0 US/Pacific) and (__time <= 2012-02-29 16:00:00.0 US/Pacific)) or (added <= 0)) (type: boolean)
34               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
35               Select Operator
36                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
37                 Group By Operator
38                   aggregations: count()
39                   mode: hash
40                   outputColumnNames: _col0
41                   Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE
42                   Reduce Output Operator
43                     sort order: 
44                     Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE
45                     value expressions: _col0 (type: bigint)
46       Reduce Operator Tree:
47         Group By Operator
48           aggregations: count(VALUE._col0)
49           mode: mergepartial
50           outputColumnNames: _col0
51           Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE
52           File Output Operator
53             compressed: false
54             Statistics: Num rows: 1 Data size: 8 Basic stats: PARTIAL Column stats: NONE
55             table:
56                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
57                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
58                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
59
60   Stage: Stage-0
61     Fetch Operator
62       limit: -1
63       Processor Tree:
64         ListSink
65
66 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00 UTC'
67 PREHOOK: type: QUERY
68 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00 UTC'
69 POSTHOOK: type: QUERY
70 STAGE DEPENDENCIES:
71   Stage-0 is a root stage
72
73 STAGE PLANS:
74   Stage: Stage-0
75     Fetch Operator
76       limit: -1
77       Processor Tree:
78         TableScan
79           alias: druid_table_1
80           properties:
81             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["1900-01-01T00:00:00.000Z/2010-01-01T00:00:00.001Z"],"context":{"skipEmptyBuckets":true}}
82             druid.query.type timeseries
83           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
84           Select Operator
85             expressions: $f0 (type: bigint)
86             outputColumnNames: _col0
87             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
88             ListSink
89
90 PREHOOK: query: EXPLAIN
91 SELECT max(added), sum(variation)
92 FROM druid_table_1
93 PREHOOK: type: QUERY
94 POSTHOOK: query: EXPLAIN
95 SELECT max(added), sum(variation)
96 FROM druid_table_1
97 POSTHOOK: type: QUERY
98 STAGE DEPENDENCIES:
99   Stage-0 is a root stage
100
101 STAGE PLANS:
102   Stage: Stage-0
103     Fetch Operator
104       limit: -1
105       Processor Tree:
106         TableScan
107           alias: druid_table_1
108           properties:
109             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"doubleMax","name":"$f0","fieldName":"added"},{"type":"doubleSum","name":"$f1","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
110             druid.query.type timeseries
111           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
112           Select Operator
113             expressions: $f0 (type: float), $f1 (type: float)
114             outputColumnNames: _col0, _col1
115             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
116             ListSink
117
118 PREHOOK: query: EXPLAIN
119 SELECT `__time`, max(added), sum(variation)
120 FROM druid_table_1
121 GROUP BY `__time`
122 PREHOOK: type: QUERY
123 POSTHOOK: query: EXPLAIN
124 SELECT `__time`, max(added), sum(variation)
125 FROM druid_table_1
126 GROUP BY `__time`
127 POSTHOOK: type: QUERY
128 STAGE DEPENDENCIES:
129   Stage-0 is a root stage
130
131 STAGE PLANS:
132   Stage: Stage-0
133     Fetch Operator
134       limit: -1
135       Processor Tree:
136         TableScan
137           alias: druid_table_1
138           properties:
139             druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"extraction","dimension":"__time","outputName":"extract","extractionFn":{"type":"timeFormat","format":"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'","timeZone":"US/Pacific"}}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"]}
140             druid.query.type groupBy
141           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
142           Select Operator
143             expressions: extract (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
144             outputColumnNames: _col0, _col1, _col2
145             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
146             ListSink
147
148 PREHOOK: query: EXPLAIN
149 SELECT floor_year(`__time`), max(added), sum(variation)
150 FROM druid_table_1
151 GROUP BY floor_year(`__time`)
152 PREHOOK: type: QUERY
153 POSTHOOK: query: EXPLAIN
154 SELECT floor_year(`__time`), max(added), sum(variation)
155 FROM druid_table_1
156 GROUP BY floor_year(`__time`)
157 POSTHOOK: type: QUERY
158 STAGE DEPENDENCIES:
159   Stage-0 is a root stage
160
161 STAGE PLANS:
162   Stage: Stage-0
163     Fetch Operator
164       limit: -1
165       Processor Tree:
166         TableScan
167           alias: druid_table_1
168           properties:
169             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"year","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
170             druid.query.type timeseries
171           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
172           Select Operator
173             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
174             outputColumnNames: _col0, _col1, _col2
175             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
176             ListSink
177
178 PREHOOK: query: EXPLAIN
179 SELECT floor_quarter(`__time`), max(added), sum(variation)
180 FROM druid_table_1
181 GROUP BY floor_quarter(`__time`)
182 PREHOOK: type: QUERY
183 POSTHOOK: query: EXPLAIN
184 SELECT floor_quarter(`__time`), max(added), sum(variation)
185 FROM druid_table_1
186 GROUP BY floor_quarter(`__time`)
187 POSTHOOK: type: QUERY
188 STAGE DEPENDENCIES:
189   Stage-0 is a root stage
190
191 STAGE PLANS:
192   Stage: Stage-0
193     Fetch Operator
194       limit: -1
195       Processor Tree:
196         TableScan
197           alias: druid_table_1
198           properties:
199             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"quarter","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
200             druid.query.type timeseries
201           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
202           Select Operator
203             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
204             outputColumnNames: _col0, _col1, _col2
205             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
206             ListSink
207
208 PREHOOK: query: EXPLAIN
209 SELECT floor_month(`__time`), max(added), sum(variation)
210 FROM druid_table_1
211 GROUP BY floor_month(`__time`)
212 PREHOOK: type: QUERY
213 POSTHOOK: query: EXPLAIN
214 SELECT floor_month(`__time`), max(added), sum(variation)
215 FROM druid_table_1
216 GROUP BY floor_month(`__time`)
217 POSTHOOK: type: QUERY
218 STAGE DEPENDENCIES:
219   Stage-0 is a root stage
220
221 STAGE PLANS:
222   Stage: Stage-0
223     Fetch Operator
224       limit: -1
225       Processor Tree:
226         TableScan
227           alias: druid_table_1
228           properties:
229             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"month","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
230             druid.query.type timeseries
231           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
232           Select Operator
233             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
234             outputColumnNames: _col0, _col1, _col2
235             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
236             ListSink
237
238 PREHOOK: query: EXPLAIN
239 SELECT floor_week(`__time`), max(added), sum(variation)
240 FROM druid_table_1
241 GROUP BY floor_week(`__time`)
242 PREHOOK: type: QUERY
243 POSTHOOK: query: EXPLAIN
244 SELECT floor_week(`__time`), max(added), sum(variation)
245 FROM druid_table_1
246 GROUP BY floor_week(`__time`)
247 POSTHOOK: type: QUERY
248 STAGE DEPENDENCIES:
249   Stage-0 is a root stage
250
251 STAGE PLANS:
252   Stage: Stage-0
253     Fetch Operator
254       limit: -1
255       Processor Tree:
256         TableScan
257           alias: druid_table_1
258           properties:
259             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"week","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
260             druid.query.type timeseries
261           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
262           Select Operator
263             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
264             outputColumnNames: _col0, _col1, _col2
265             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
266             ListSink
267
268 PREHOOK: query: EXPLAIN
269 SELECT floor_day(`__time`), max(added), sum(variation)
270 FROM druid_table_1
271 GROUP BY floor_day(`__time`)
272 PREHOOK: type: QUERY
273 POSTHOOK: query: EXPLAIN
274 SELECT floor_day(`__time`), max(added), sum(variation)
275 FROM druid_table_1
276 GROUP BY floor_day(`__time`)
277 POSTHOOK: type: QUERY
278 STAGE DEPENDENCIES:
279   Stage-0 is a root stage
280
281 STAGE PLANS:
282   Stage: Stage-0
283     Fetch Operator
284       limit: -1
285       Processor Tree:
286         TableScan
287           alias: druid_table_1
288           properties:
289             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"day","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
290             druid.query.type timeseries
291           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
292           Select Operator
293             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
294             outputColumnNames: _col0, _col1, _col2
295             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
296             ListSink
297
298 PREHOOK: query: EXPLAIN
299 SELECT floor_hour(`__time`), max(added), sum(variation)
300 FROM druid_table_1
301 GROUP BY floor_hour(`__time`)
302 PREHOOK: type: QUERY
303 POSTHOOK: query: EXPLAIN
304 SELECT floor_hour(`__time`), max(added), sum(variation)
305 FROM druid_table_1
306 GROUP BY floor_hour(`__time`)
307 POSTHOOK: type: QUERY
308 STAGE DEPENDENCIES:
309   Stage-0 is a root stage
310
311 STAGE PLANS:
312   Stage: Stage-0
313     Fetch Operator
314       limit: -1
315       Processor Tree:
316         TableScan
317           alias: druid_table_1
318           properties:
319             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"hour","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
320             druid.query.type timeseries
321           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
322           Select Operator
323             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
324             outputColumnNames: _col0, _col1, _col2
325             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
326             ListSink
327
328 PREHOOK: query: EXPLAIN
329 SELECT floor_minute(`__time`), max(added), sum(variation)
330 FROM druid_table_1
331 GROUP BY floor_minute(`__time`)
332 PREHOOK: type: QUERY
333 POSTHOOK: query: EXPLAIN
334 SELECT floor_minute(`__time`), max(added), sum(variation)
335 FROM druid_table_1
336 GROUP BY floor_minute(`__time`)
337 POSTHOOK: type: QUERY
338 STAGE DEPENDENCIES:
339   Stage-0 is a root stage
340
341 STAGE PLANS:
342   Stage: Stage-0
343     Fetch Operator
344       limit: -1
345       Processor Tree:
346         TableScan
347           alias: druid_table_1
348           properties:
349             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"minute","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
350             druid.query.type timeseries
351           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
352           Select Operator
353             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
354             outputColumnNames: _col0, _col1, _col2
355             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
356             ListSink
357
358 PREHOOK: query: EXPLAIN
359 SELECT floor_second(`__time`), max(added), sum(variation)
360 FROM druid_table_1
361 GROUP BY floor_second(`__time`)
362 PREHOOK: type: QUERY
363 POSTHOOK: query: EXPLAIN
364 SELECT floor_second(`__time`), max(added), sum(variation)
365 FROM druid_table_1
366 GROUP BY floor_second(`__time`)
367 POSTHOOK: type: QUERY
368 STAGE DEPENDENCIES:
369   Stage-0 is a root stage
370
371 STAGE PLANS:
372   Stage: Stage-0
373     Fetch Operator
374       limit: -1
375       Processor Tree:
376         TableScan
377           alias: druid_table_1
378           properties:
379             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"second","aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
380             druid.query.type timeseries
381           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
382           Select Operator
383             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
384             outputColumnNames: _col0, _col1, _col2
385             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
386             ListSink
387
388 PREHOOK: query: EXPLAIN
389 SELECT floor_hour(`__time`), max(added), sum(variation)
390 FROM druid_table_1
391 WHERE robot='1'
392 GROUP BY floor_hour(`__time`)
393 PREHOOK: type: QUERY
394 POSTHOOK: query: EXPLAIN
395 SELECT floor_hour(`__time`), max(added), sum(variation)
396 FROM druid_table_1
397 WHERE robot='1'
398 GROUP BY floor_hour(`__time`)
399 POSTHOOK: type: QUERY
400 STAGE DEPENDENCIES:
401   Stage-0 is a root stage
402
403 STAGE PLANS:
404   Stage: Stage-0
405     Fetch Operator
406       limit: -1
407       Processor Tree:
408         TableScan
409           alias: druid_table_1
410           properties:
411             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"hour","filter":{"type":"selector","dimension":"robot","value":"1"},"aggregations":[{"type":"doubleMax","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"variation"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
412             druid.query.type timeseries
413           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
414           Select Operator
415             expressions: __time (type: timestamp with local time zone), $f1 (type: float), $f2 (type: float)
416             outputColumnNames: _col0, _col1, _col2
417             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
418             ListSink
419
420 PREHOOK: query: EXPLAIN
421 SELECT floor_hour(`__time`), max(added), sum(variation)
422 FROM druid_table_1
423 WHERE floor_hour(`__time`)
424     BETWEEN CAST('2010-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
425         AND CAST('2014-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
426 GROUP BY floor_hour(`__time`)
427 PREHOOK: type: QUERY
428 POSTHOOK: query: EXPLAIN
429 SELECT floor_hour(`__time`), max(added), sum(variation)
430 FROM druid_table_1
431 WHERE floor_hour(`__time`)
432     BETWEEN CAST('2010-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
433         AND CAST('2014-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
434 GROUP BY floor_hour(`__time`)
435 POSTHOOK: type: QUERY
436 STAGE DEPENDENCIES:
437   Stage-1 is a root stage
438   Stage-0 depends on stages: Stage-1
439
440 STAGE PLANS:
441   Stage: Stage-1
442     Map Reduce
443       Map Operator Tree:
444           TableScan
445             alias: druid_table_1
446             filterExpr: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
447             properties:
448               druid.query.json {"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["added","variation"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
449               druid.query.type select
450             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
451             Filter Operator
452               predicate: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
453               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
454               Select Operator
455                 expressions: floor_hour(__time) (type: timestamp with local time zone), added (type: float), variation (type: float)
456                 outputColumnNames: _col0, _col1, _col2
457                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
458                 Group By Operator
459                   aggregations: max(_col1), sum(_col2)
460                   keys: _col0 (type: timestamp with local time zone)
461                   mode: hash
462                   outputColumnNames: _col0, _col1, _col2
463                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
464                   Reduce Output Operator
465                     key expressions: _col0 (type: timestamp with local time zone)
466                     sort order: +
467                     Map-reduce partition columns: _col0 (type: timestamp with local time zone)
468                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
469                     value expressions: _col1 (type: float), _col2 (type: double)
470       Reduce Operator Tree:
471         Group By Operator
472           aggregations: max(VALUE._col0), sum(VALUE._col1)
473           keys: KEY._col0 (type: timestamp with local time zone)
474           mode: mergepartial
475           outputColumnNames: _col0, _col1, _col2
476           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
477           File Output Operator
478             compressed: false
479             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
480             table:
481                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
482                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
483                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
484
485   Stage: Stage-0
486     Fetch Operator
487       limit: -1
488       Processor Tree:
489         ListSink
490
491 PREHOOK: query: EXPLAIN
492 SELECT subq.h, subq.m, subq.s
493 FROM
494 (
495   SELECT floor_hour(`__time`) as h, max(added) as m, sum(variation) as s
496   FROM druid_table_1
497   GROUP BY floor_hour(`__time`)
498 ) subq
499 WHERE subq.h BETWEEN CAST('2010-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
500         AND CAST('2014-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
501 PREHOOK: type: QUERY
502 POSTHOOK: query: EXPLAIN
503 SELECT subq.h, subq.m, subq.s
504 FROM
505 (
506   SELECT floor_hour(`__time`) as h, max(added) as m, sum(variation) as s
507   FROM druid_table_1
508   GROUP BY floor_hour(`__time`)
509 ) subq
510 WHERE subq.h BETWEEN CAST('2010-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
511         AND CAST('2014-01-01 00:00:00' AS TIMESTAMP WITH LOCAL TIME ZONE)
512 POSTHOOK: type: QUERY
513 STAGE DEPENDENCIES:
514   Stage-1 is a root stage
515   Stage-0 depends on stages: Stage-1
516
517 STAGE PLANS:
518   Stage: Stage-1
519     Map Reduce
520       Map Operator Tree:
521           TableScan
522             alias: druid_table_1
523             filterExpr: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
524             properties:
525               druid.query.json {"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["added","variation"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
526               druid.query.type select
527             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
528             Filter Operator
529               predicate: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
530               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
531               Select Operator
532                 expressions: floor_hour(__time) (type: timestamp with local time zone), added (type: float), variation (type: float)
533                 outputColumnNames: _col0, _col1, _col2
534                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
535                 Group By Operator
536                   aggregations: max(_col1), sum(_col2)
537                   keys: _col0 (type: timestamp with local time zone)
538                   mode: hash
539                   outputColumnNames: _col0, _col1, _col2
540                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
541                   Reduce Output Operator
542                     key expressions: _col0 (type: timestamp with local time zone)
543                     sort order: +
544                     Map-reduce partition columns: _col0 (type: timestamp with local time zone)
545                     Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
546                     value expressions: _col1 (type: float), _col2 (type: double)
547       Reduce Operator Tree:
548         Group By Operator
549           aggregations: max(VALUE._col0), sum(VALUE._col1)
550           keys: KEY._col0 (type: timestamp with local time zone)
551           mode: mergepartial
552           outputColumnNames: _col0, _col1, _col2
553           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
554           File Output Operator
555             compressed: false
556             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
557             table:
558                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
559                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
560                 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
561
562   Stage: Stage-0
563     Fetch Operator
564       limit: -1
565       Processor Tree:
566         ListSink
567
568 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1
569 PREHOOK: type: QUERY
570 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1
571 POSTHOOK: type: QUERY
572 STAGE DEPENDENCIES:
573   Stage-0 is a root stage
574
575 STAGE PLANS:
576   Stage: Stage-0
577     Fetch Operator
578       limit: -1
579       Processor Tree:
580         TableScan
581           alias: druid_table_1
582           properties:
583             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
584             druid.query.type timeseries
585           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
586           Select Operator
587             expressions: $f0 (type: bigint)
588             outputColumnNames: _col0
589             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
590             ListSink
591
592 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00 UTC'
593 PREHOOK: type: QUERY
594 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00 UTC'
595 POSTHOOK: type: QUERY
596 STAGE DEPENDENCIES:
597   Stage-0 is a root stage
598
599 STAGE PLANS:
600   Stage: Stage-0
601     Fetch Operator
602       limit: -1
603       Processor Tree:
604         TableScan
605           alias: druid_table_1
606           properties:
607             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["1900-01-01T00:00:00.000Z/2010-01-01T00:00:00.001Z"],"context":{"skipEmptyBuckets":true}}
608             druid.query.type timeseries
609           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
610           Select Operator
611             expressions: $f0 (type: bigint)
612             outputColumnNames: _col0
613             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
614             ListSink
615
616 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` >= '2010-01-01 00:00:00'
617 PREHOOK: type: QUERY
618 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` >= '2010-01-01 00:00:00'
619 POSTHOOK: type: QUERY
620 STAGE DEPENDENCIES:
621   Stage-0 is a root stage
622
623 STAGE PLANS:
624   Stage: Stage-0
625     Fetch Operator
626       limit: -1
627       Processor Tree:
628         TableScan
629           alias: druid_table_1
630           properties:
631             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["2010-01-01T08:00:00.000Z/3000-01-01T00:00:00.000Z"],"context":{"skipEmptyBuckets":true}}
632             druid.query.type timeseries
633           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
634           Select Operator
635             expressions: $f0 (type: bigint)
636             outputColumnNames: _col0
637             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
638             ListSink
639
640 PREHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00' OR  `__time` <= '2012-03-01 00:00:00'
641 PREHOOK: type: QUERY
642 POSTHOOK: query: EXPLAIN SELECT count(`__time`) from druid_table_1 where `__time` <= '2010-01-01 00:00:00' OR  `__time` <= '2012-03-01 00:00:00'
643 POSTHOOK: type: QUERY
644 STAGE DEPENDENCIES:
645   Stage-0 is a root stage
646
647 STAGE PLANS:
648   Stage: Stage-0
649     Fetch Operator
650       limit: -1
651       Processor Tree:
652         TableScan
653           alias: druid_table_1
654           properties:
655             druid.query.json {"queryType":"timeseries","dataSource":"wikipedia","descending":false,"granularity":"all","aggregations":[{"type":"count","name":"$f0"}],"intervals":["1900-01-01T00:00:00.000Z/2012-03-01T08:00:00.001Z"],"context":{"skipEmptyBuckets":true}}
656             druid.query.type timeseries
657           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
658           Select Operator
659             expressions: $f0 (type: bigint)
660             outputColumnNames: _col0
661             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
662             ListSink
663