public class org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat extends java.lang.Object
{
private static final java.lang.String TIMESERIES_QUERY;
private static final java.lang.String TIMESERIES_QUERY_SPLIT;
private static final java.lang.String TOPN_QUERY;
private static final java.lang.String TOPN_QUERY_SPLIT;
private static final java.lang.String GROUP_BY_QUERY;
private static final java.lang.String GROUP_BY_QUERY_SPLIT;
public void <init>()
{
org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat v;
v := @this: org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat;
specialinvoke v.<java.lang.Object: void <init>()>();
return;
}
public void testTimeZone() throws java.lang.Exception
{
java.lang.Object[] v, v, v;
org.apache.hadoop.conf.Configuration v, v, v;
java.lang.String v, v, v;
java.lang.reflect.Method v;
org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat v;
java.lang.Class[] v;
org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat v;
java.lang.Class v;
java.lang.Object v, v, v;
v := @this: org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat;
v = new org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat;
specialinvoke v.<org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat: void <init>()>();
v = class "Lorg/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat;";
v = newarray (java.lang.Class)[1];
v[0] = class "Lorg/apache/hadoop/conf/Configuration;";
v = virtualinvoke v.<java.lang.Class: java.lang.reflect.Method getDeclaredMethod(java.lang.String,java.lang.Class[])>("getInputSplits", v);
virtualinvoke v.<java.lang.reflect.Method: void setAccessible(boolean)>(1);
v = staticinvoke <org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat: org.apache.hadoop.conf.Configuration createPropertiesQuery(java.lang.String,java.lang.String,java.lang.String)>("sample_datasource", "timeseries", "{  \"queryType\": \"timeseries\",  \"dataSource\": \"sample_datasource\",  \"granularity\": \"DAY\",  \"descending\": \"true\",  \"intervals\": [ \"2012-01-01T00:00:00.000-08:00/2012-01-03T00:00:00.000-08:00\" ]}");
v = newarray (java.lang.Object)[1];
v[0] = v;
v = virtualinvoke v.<java.lang.reflect.Method: java.lang.Object invoke(java.lang.Object,java.lang.Object[])>(v, v);
v = staticinvoke <java.util.Arrays: java.lang.String toString(java.lang.Object[])>(v);
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>("[HiveDruidSplit{{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"sample_datasource\"},\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2012-01-01T08:00:00.000Z/2012-01-03T08:00:00.000Z\"]},\"descending\":true,\"virtualColumns\":[],\"filter\":null,\"granularity\":\"DAY\",\"aggregations\":[],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"queryId\":\"\"}}, [localhost:8082]}]", v);
v = staticinvoke <org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat: org.apache.hadoop.conf.Configuration createPropertiesQuery(java.lang.String,java.lang.String,java.lang.String)>("sample_datasource", "topN", "{  \"queryType\": \"topN\",  \"dataSource\": \"sample_data\",  \"dimension\": \"sample_dim\",  \"threshold\": 5,  \"metric\": \"count\",  \"aggregations\": [    {      \"type\": \"longSum\",      \"name\": \"count\",      \"fieldName\": \"count\"    },    {      \"type\": \"doubleSum\",      \"name\": \"some_metric\",      \"fieldName\": \"some_metric\"    }  ],  \"granularity\": \"all\",  \"intervals\": [    \"2013-08-31T00:00:00.000-07:00/2013-09-03T00:00:00.000-07:00\"  ]}");
v = newarray (java.lang.Object)[1];
v[0] = v;
v = virtualinvoke v.<java.lang.reflect.Method: java.lang.Object invoke(java.lang.Object,java.lang.Object[])>(v, v);
v = staticinvoke <java.util.Arrays: java.lang.String toString(java.lang.Object[])>(v);
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>("[HiveDruidSplit{{\"queryType\":\"topN\",\"dataSource\":{\"type\":\"table\",\"name\":\"sample_data\"},\"virtualColumns\":[],\"dimension\":{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"sample_dim\",\"outputName\":\"sample_dim\",\"outputType\":\"STRING\"},\"metric\":{\"type\":\"LegacyTopNMetricSpec\",\"metric\":\"count\"},\"threshold\":5,\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2013-08-31T07:00:00.000Z/2013-09-03T07:00:00.000Z\"]},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"longSum\",\"name\":\"count\",\"fieldName\":\"count\",\"expression\":null},{\"type\":\"doubleSum\",\"name\":\"some_metric\",\"fieldName\":\"some_metric\",\"expression\":null}],\"postAggregations\":[],\"context\":{\"queryId\":\"\"},\"descending\":false}, [localhost:8082]}]", v);
v = staticinvoke <org.apache.hadoop.hive.druid.TestHiveDruidQueryBasedInputFormat: org.apache.hadoop.conf.Configuration createPropertiesQuery(java.lang.String,java.lang.String,java.lang.String)>("sample_datasource", "groupBy", "{  \"queryType\": \"groupBy\",  \"dataSource\": \"sample_datasource\",  \"granularity\": \"day\",  \"dimensions\": [\"country\", \"device\"],  \"limitSpec\": { \"type\": \"default\", \"limit\": 5000, \"columns\": [\"country\", \"data_transfer\"] },  \"aggregations\": [    { \"type\": \"longSum\", \"name\": \"total_usage\", \"fieldName\": \"user_count\" },    { \"type\": \"doubleSum\", \"name\": \"data_transfer\", \"fieldName\": \"data_transfer\" }  ],  \"intervals\": [ \"2012-01-01T00:00:00.000-08:00/2012-01-03T00:00:00.000-08:00\" ] }");
v = newarray (java.lang.Object)[1];
v[0] = v;
v = virtualinvoke v.<java.lang.reflect.Method: java.lang.Object invoke(java.lang.Object,java.lang.Object[])>(v, v);
v = staticinvoke <java.util.Arrays: java.lang.String toString(java.lang.Object[])>(v);
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>("[HiveDruidSplit{{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"sample_datasource\"},\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2012-01-01T08:00:00.000Z/2012-01-03T08:00:00.000Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":\"DAY\",\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"country\",\"outputName\":\"country\",\"outputType\":\"STRING\"},{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"device\",\"outputName\":\"device\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"total_usage\",\"fieldName\":\"user_count\",\"expression\":null},{\"type\":\"doubleSum\",\"name\":\"data_transfer\",\"fieldName\":\"data_transfer\",\"expression\":null}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"default\",\"columns\":[{\"dimension\":\"country\",\"direction\":\"ascending\",\"dimensionOrder\":{\"type\":\"lexicographic\"}},{\"dimension\":\"data_transfer\",\"direction\":\"ascending\",\"dimensionOrder\":{\"type\":\"lexicographic\"}}],\"limit\":5000},\"context\":{\"queryId\":\"\"},\"descending\":false}, [localhost:8082]}]", v);
return;
}
private static org.apache.hadoop.conf.Configuration createPropertiesQuery(java.lang.String, java.lang.String, java.lang.String)
{
org.apache.hadoop.hive.conf.HiveConf$ConfVars v;
org.apache.hadoop.conf.Configuration v;
java.lang.String v, v, v, v;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = new org.apache.hadoop.conf.Configuration;
specialinvoke v.<org.apache.hadoop.conf.Configuration: void <init>()>();
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("mapreduce.input.fileinputformat.inputdir", "/my/dir");
v = <org.apache.hadoop.hive.conf.HiveConf$ConfVars: org.apache.hadoop.hive.conf.HiveConf$ConfVars HIVE_DRUID_BROKER_DEFAULT_ADDRESS>;
v = v.<org.apache.hadoop.hive.conf.HiveConf$ConfVars: java.lang.String varname>;
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>(v, "localhost:8082");
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("druid.datasource", v);
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("druid.query.json", v);
virtualinvoke v.<org.apache.hadoop.conf.Configuration: void set(java.lang.String,java.lang.String)>("druid.query.type", v);
return v;
}
}