public class org.apache.hadoop.hive.ql.io.TestDruidRecordWriter extends java.lang.Object
{
private final org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper objectMapper;
private static final org.joda.time.Interval INTERVAL_FULL;
public org.junit.rules.TemporaryFolder temporaryFolder;
final java.util.List expectedRows;
public void <init>()
{
java.lang.Double v, v, v;
long v, v, v;
org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper v;
org.apache.hive.druid.com.google.common.collect.ImmutableList v, v, v, v;
java.lang.Long v, v, v, v, v, v;
org.apache.hive.druid.com.google.common.collect.ImmutableMap v, v, v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter v;
org.joda.time.DateTime v, v, v;
org.junit.rules.TemporaryFolder v;
v := @this: org.apache.hadoop.hive.ql.io.TestDruidRecordWriter;
specialinvoke v.<java.lang.Object: void <init>()>();
v = <org.apache.hadoop.hive.druid.DruidStorageHandlerUtils: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper JSON_MAPPER>;
v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper objectMapper> = v;
v = new org.junit.rules.TemporaryFolder;
specialinvoke v.<org.junit.rules.TemporaryFolder: void <init>()>();
v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.junit.rules.TemporaryFolder temporaryFolder> = v;
v = staticinvoke <org.joda.time.DateTime: org.joda.time.DateTime parse(java.lang.String)>("2014-10-22T00:00:00.000Z");
v = virtualinvoke v.<org.joda.time.DateTime: long getMillis()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>("a.example.com");
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(190L);
v = staticinvoke <java.lang.Double: java.lang.Double valueOf(double)>(1.0);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableMap: org.apache.hive.druid.com.google.common.collect.ImmutableMap of(java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object)>("__time", v, "host", v, "visited_sum", v, "unique_hosts", v);
v = staticinvoke <org.joda.time.DateTime: org.joda.time.DateTime parse(java.lang.String)>("2014-10-22T01:00:00.000Z");
v = virtualinvoke v.<org.joda.time.DateTime: long getMillis()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>("b.example.com");
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(175L);
v = staticinvoke <java.lang.Double: java.lang.Double valueOf(double)>(1.0);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableMap: org.apache.hive.druid.com.google.common.collect.ImmutableMap of(java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object)>("__time", v, "host", v, "visited_sum", v, "unique_hosts", v);
v = staticinvoke <org.joda.time.DateTime: org.joda.time.DateTime parse(java.lang.String)>("2014-10-22T02:00:00.000Z");
v = virtualinvoke v.<org.joda.time.DateTime: long getMillis()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>("c.example.com");
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(270L);
v = staticinvoke <java.lang.Double: java.lang.Double valueOf(double)>(1.0);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableMap: org.apache.hive.druid.com.google.common.collect.ImmutableMap of(java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object,java.lang.Object)>("__time", v, "host", v, "visited_sum", v, "unique_hosts", v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object,java.lang.Object,java.lang.Object)>(v, v, v);
v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: java.util.List expectedRows> = v;
return;
}
public void testTimeStampColumnName()
{
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter v;
v := @this: org.apache.hadoop.hive.ql.io.TestDruidRecordWriter;
staticinvoke <org.junit.Assert: void assertEquals(java.lang.String,java.lang.Object,java.lang.Object)>("Time column name need to match to ensure serdeser compatibility", "__time", "__time");
return;
}
public void testWrite() throws java.io.IOException, org.apache.hive.druid.org.apache.druid.segment.loading.SegmentLoadingException
{
org.apache.hive.druid.org.apache.druid.data.input.InputRow v;
org.apache.hive.druid.org.apache.druid.data.input.impl.MapInputRowParser v;
org.apache.hadoop.hive.druid.io.DruidRecordWriter v;
java.lang.Long v;
org.apache.hadoop.conf.Configuration v;
org.apache.hive.druid.org.apache.druid.segment.data.RoaringBitmapSerdeFactory v;
java.util.stream.Collector v;
org.apache.hive.druid.org.apache.druid.segment.IndexIO v;
org.apache.hive.druid.org.apache.druid.data.input.impl.StringDimensionSchema v;
org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPusher v;
org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularity v, v;
java.util.stream.Stream v, v;
org.apache.hive.druid.org.apache.druid.segment.indexing.DataSchema v;
org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPuller v;
java.lang.Object[] v;
org.apache.hive.druid.org.apache.druid.segment.QueryableIndexStorageAdapter v;
java.util.ArrayList v;
org.joda.time.Interval v, v;
org.apache.hive.druid.org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory v;
java.lang.String v;
org.apache.hive.druid.org.apache.druid.segment.IndexSpec v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter v;
java.io.File v, v, v, v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$1 v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$2 v;
org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper v, v;
org.apache.hive.druid.org.apache.druid.segment.QueryableIndex v;
org.apache.hive.druid.org.apache.druid.data.input.impl.TimestampSpec v;
org.apache.hadoop.fs.Path v;
org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose v;
boolean v, v;
java.util.List v, v, v;
org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter v;
java.util.function.Function v;
org.apache.hive.druid.org.apache.druid.query.aggregation.LongSumAggregatorFactory v;
org.apache.hive.druid.com.google.common.collect.ImmutableList v, v, v, v, v;
org.apache.hive.druid.org.apache.druid.segment.indexing.granularity.UniformGranularitySpec v;
org.apache.hive.druid.org.apache.druid.data.input.impl.DimensionsSpec v;
int v;
java.lang.Boolean v;
java.util.Iterator v;
org.apache.hadoop.hive.druid.serde.DruidWritable v;
org.apache.hive.druid.org.apache.druid.segment.indexing.RealtimeTuningConfig v;
java.lang.Object v, v, v;
org.apache.hadoop.fs.LocalFileSystem v;
org.apache.hive.druid.org.apache.druid.data.input.impl.TimeAndDimsParseSpec v;
org.junit.rules.TemporaryFolder v, v, v, v;
v := @this: org.apache.hadoop.hive.ql.io.TestDruidRecordWriter;
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.junit.rules.TemporaryFolder temporaryFolder>;
v = virtualinvoke v.<org.junit.rules.TemporaryFolder: java.io.File newFolder()>();
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.junit.rules.TemporaryFolder temporaryFolder>;
v = virtualinvoke v.<org.junit.rules.TemporaryFolder: java.io.File newFolder()>();
v = new org.apache.hadoop.conf.Configuration;
specialinvoke v.<org.apache.hadoop.conf.Configuration: void <init>()>();
v = new org.apache.hive.druid.org.apache.druid.data.input.impl.MapInputRowParser;
v = new org.apache.hive.druid.org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
v = new org.apache.hive.druid.org.apache.druid.data.input.impl.TimestampSpec;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.impl.TimestampSpec: void <init>(java.lang.String,java.lang.String,org.joda.time.DateTime)>("__time", "auto", null);
v = new org.apache.hive.druid.org.apache.druid.data.input.impl.DimensionsSpec;
v = new org.apache.hive.druid.org.apache.druid.data.input.impl.StringDimensionSchema;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.impl.StringDimensionSchema: void <init>(java.lang.String)>("host");
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>(v);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.impl.DimensionsSpec: void <init>(java.util.List,java.util.List,java.util.List)>(v, null, null);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.impl.TimeAndDimsParseSpec: void <init>(org.apache.hive.druid.org.apache.druid.data.input.impl.TimestampSpec,org.apache.hive.druid.org.apache.druid.data.input.impl.DimensionsSpec)>(v, v);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.impl.MapInputRowParser: void <init>(org.apache.hive.druid.org.apache.druid.data.input.impl.ParseSpec)>(v);
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper objectMapper>;
v = new org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$1;
specialinvoke v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$1: void <init>(org.apache.hadoop.hive.ql.io.TestDruidRecordWriter)>(v);
v = virtualinvoke v.<org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper: java.lang.Object convertValue(java.lang.Object,org.apache.hive.druid.com.fasterxml.jackson.core.type.TypeReference)>(v, v);
v = new org.apache.hive.druid.org.apache.druid.segment.indexing.DataSchema;
v = newarray (org.apache.hive.druid.org.apache.druid.query.aggregation.AggregatorFactory)[2];
v = new org.apache.hive.druid.org.apache.druid.query.aggregation.LongSumAggregatorFactory;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.query.aggregation.LongSumAggregatorFactory: void <init>(java.lang.String,java.lang.String)>("visited_sum", "visited_sum");
v[0] = v;
v = new org.apache.hive.druid.org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory: void <init>(java.lang.String,java.lang.String)>("unique_hosts", "unique_hosts");
v[1] = v;
v = new org.apache.hive.druid.org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
v = <org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularities: org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularity DAY>;
v = <org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularities: org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularity NONE>;
v = <org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.joda.time.Interval INTERVAL_FULL>;
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>(v);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.indexing.granularity.UniformGranularitySpec: void <init>(org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularity,org.apache.hive.druid.org.apache.druid.java.util.common.granularity.Granularity,java.util.List)>(v, v, v);
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper objectMapper>;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.indexing.DataSchema: void <init>(java.lang.String,java.util.Map,org.apache.hive.druid.org.apache.druid.query.aggregation.AggregatorFactory[],org.apache.hive.druid.org.apache.druid.segment.indexing.granularity.GranularitySpec,org.apache.hive.druid.org.apache.druid.segment.transform.TransformSpec,org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper)>("testDataSource", v, v, v, null, v);
v = new org.apache.hive.druid.org.apache.druid.segment.IndexSpec;
v = new org.apache.hive.druid.org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(boolean)>(1);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.data.RoaringBitmapSerdeFactory: void <init>(java.lang.Boolean)>(v);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.IndexSpec: void <init>(org.apache.hive.druid.org.apache.druid.segment.data.BitmapSerdeFactory,org.apache.hive.druid.org.apache.druid.segment.data.CompressionStrategy,org.apache.hive.druid.org.apache.druid.segment.data.CompressionStrategy,org.apache.hive.druid.org.apache.druid.segment.data.CompressionFactory$LongEncodingStrategy)>(v, null, null, null);
v = new org.apache.hive.druid.org.apache.druid.segment.indexing.RealtimeTuningConfig;
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.junit.rules.TemporaryFolder temporaryFolder>;
v = virtualinvoke v.<org.junit.rules.TemporaryFolder: java.io.File newFolder()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(0L);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.indexing.RealtimeTuningConfig: void <init>(java.lang.Integer,java.lang.Long,org.joda.time.Period,org.joda.time.Period,java.io.File,org.apache.hive.druid.org.apache.druid.segment.realtime.plumber.VersioningPolicy,org.apache.hive.druid.org.apache.druid.segment.realtime.plumber.RejectionPolicyFactory,java.lang.Integer,org.apache.hive.druid.org.apache.druid.timeline.partition.ShardSpec,org.apache.hive.druid.org.apache.druid.segment.IndexSpec,org.apache.hive.druid.org.apache.druid.segment.IndexSpec,java.lang.Boolean,int,int,java.lang.Boolean,java.lang.Long,java.lang.Long,org.apache.hive.druid.org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory,java.lang.String)>(null, null, null, null, v, null, null, null, null, v, null, null, 0, 0, null, null, v, null, null);
v = staticinvoke <org.apache.hadoop.fs.FileSystem: org.apache.hadoop.fs.LocalFileSystem getLocal(org.apache.hadoop.conf.Configuration)>(v);
v = new org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPusher;
v = new org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$2;
specialinvoke v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$2: void <init>(org.apache.hadoop.hive.ql.io.TestDruidRecordWriter,java.io.File)>(v, v);
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPusher: void <init>(org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPusherConfig)>(v);
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<java.io.File: java.lang.String getAbsolutePath()>();
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String,java.lang.String)>(v, "segmentsDescriptorDir");
v = new org.apache.hadoop.hive.druid.io.DruidRecordWriter;
specialinvoke v.<org.apache.hadoop.hive.druid.io.DruidRecordWriter: void <init>(org.apache.hive.druid.org.apache.druid.segment.indexing.DataSchema,org.apache.hive.druid.org.apache.druid.segment.indexing.RealtimeTuningConfig,org.apache.hive.druid.org.apache.druid.segment.loading.DataSegmentPusher,int,org.apache.hadoop.fs.Path,org.apache.hadoop.fs.FileSystem)>(v, v, v, 20, v, v);
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: java.util.List expectedRows>;
v = interfaceinvoke v.<java.util.List: java.util.stream.Stream stream()>();
v = staticinvoke <org.apache.hadoop.hive.ql.io.TestDruidRecordWriter$lambda_testWrite_0__9: java.util.function.Function bootstrap$()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.util.stream.Stream map(java.util.function.Function)>(v);
v = staticinvoke <java.util.stream.Collectors: java.util.stream.Collector toList()>();
v = interfaceinvoke v.<java.util.stream.Stream: java.lang.Object collect(java.util.stream.Collector)>(v);
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
virtualinvoke v.<org.apache.hadoop.hive.druid.io.DruidRecordWriter: void write(org.apache.hadoop.io.Writable)>(v);
goto label;
label:
virtualinvoke v.<org.apache.hadoop.hive.druid.io.DruidRecordWriter: void close(boolean)>(0);
v = staticinvoke <org.apache.hadoop.hive.druid.DruidStorageHandlerUtils: java.util.List getCreatedSegments(org.apache.hadoop.fs.Path,org.apache.hadoop.conf.Configuration)>(v, v);
v = interfaceinvoke v.<java.util.List: int size()>();
staticinvoke <org.junit.Assert: void assertEquals(long,long)>(1L, v);
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.junit.rules.TemporaryFolder temporaryFolder>;
v = virtualinvoke v.<org.junit.rules.TemporaryFolder: java.io.File newFolder()>();
v = new org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPuller;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPuller: void <init>()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
virtualinvoke v.<org.apache.hive.druid.org.apache.druid.segment.loading.LocalDataSegmentPuller: void getSegmentFiles(org.apache.hive.druid.org.apache.druid.timeline.DataSegment,java.io.File)>(v, v);
v = <org.apache.hadoop.hive.druid.DruidStorageHandlerUtils: org.apache.hive.druid.org.apache.druid.segment.IndexIO INDEX_IO>;
v = virtualinvoke v.<org.apache.hive.druid.org.apache.druid.segment.IndexIO: org.apache.hive.druid.org.apache.druid.segment.QueryableIndex loadIndex(java.io.File)>(v);
v = new org.apache.hive.druid.org.apache.druid.segment.QueryableIndexStorageAdapter;
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.QueryableIndexStorageAdapter: void <init>(org.apache.hive.druid.org.apache.druid.segment.QueryableIndex)>(v);
v = new org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose;
v = new org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter;
v = virtualinvoke v.<org.apache.hive.druid.org.apache.druid.segment.QueryableIndexStorageAdapter: org.joda.time.Interval getInterval()>();
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter: void <init>(org.apache.hive.druid.org.apache.druid.segment.StorageAdapter,org.joda.time.Interval)>(v, v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>(v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>("host");
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object,java.lang.Object)>("visited_sum", "unique_hosts");
specialinvoke v.<org.apache.hive.druid.org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose: void <init>(java.util.List,org.apache.hive.druid.org.apache.druid.segment.transform.TransformSpec,java.util.List,java.util.List,org.apache.hive.druid.org.apache.druid.query.filter.DimFilter)>(v, null, v, v, null);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.Lists: java.util.ArrayList newArrayList()>();
label:
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.Firehose: boolean hasMore()>();
if v == 0 goto label;
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.Firehose: org.apache.hive.druid.org.apache.druid.data.input.InputRow nextRow()>();
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: java.util.List expectedRows>;
specialinvoke v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: void verifyRows(java.util.List,java.util.List)>(v, v);
return;
}
private void verifyRows(java.util.List, java.util.List)
{
long v;
org.apache.hive.druid.com.google.common.collect.ImmutableList v;
java.lang.Long v;
int v, v, v, v;
java.lang.String v;
java.lang.Number v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter v;
double v, v;
java.io.PrintStream v;
org.joda.time.DateTime v;
java.util.List v, v, v, v;
java.lang.Object v, v, v, v, v, v, v, v;
v := @this: org.apache.hadoop.hive.ql.io.TestDruidRecordWriter;
v := @parameter: java.util.List;
v := @parameter: java.util.List;
v = <java.lang.System: java.io.PrintStream out>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.util.List)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("actualRows = \u0001");
virtualinvoke v.<java.io.PrintStream: void println(java.lang.String)>(v);
v = interfaceinvoke v.<java.util.List: int size()>();
v = interfaceinvoke v.<java.util.List: int size()>();
staticinvoke <org.junit.Assert: void assertEquals(long,long)>(v, v);
v = 0;
label:
v = interfaceinvoke v.<java.util.List: int size()>();
if v >= v goto label;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
v = staticinvoke <org.apache.hive.druid.com.google.common.collect.ImmutableList: org.apache.hive.druid.com.google.common.collect.ImmutableList of(java.lang.Object)>("host");
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.InputRow: java.util.List getDimensions()>();
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>(v, v);
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>("__time");
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.InputRow: org.joda.time.DateTime getTimestamp()>();
v = virtualinvoke v.<org.joda.time.DateTime: long getMillis()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>(v, v);
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>("host");
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.InputRow: java.util.List getDimension(java.lang.String)>("host");
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>(v, v);
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>("visited_sum");
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.InputRow: java.lang.Number getMetric(java.lang.String)>("visited_sum");
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>(v, v);
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>("unique_hosts");
v = virtualinvoke v.<java.lang.Double: double doubleValue()>();
v = interfaceinvoke v.<org.apache.hive.druid.org.apache.druid.data.input.InputRow: java.lang.Object getRaw(java.lang.String)>("unique_hosts");
v = staticinvoke <org.apache.hive.druid.org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory: java.lang.Object estimateCardinality(java.lang.Object,boolean)>(v, 0);
v = virtualinvoke v.<java.lang.Double: double doubleValue()>();
staticinvoke <org.junit.Assert: void assertEquals(double,double,double)>(v, v, 0.001);
v = v + 1;
goto label;
label:
return;
}
public void testSerDesr() throws java.io.IOException
{
org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectReader v;
java.lang.Object v;
org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper v;
java.lang.String v;
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter v;
v := @this: org.apache.hadoop.hive.ql.io.TestDruidRecordWriter;
v = v.<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper objectMapper>;
v = virtualinvoke v.<org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper: org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectReader readerFor(java.lang.Class)>(class "Lorg/apache/hive/druid/org/apache/druid/timeline/DataSegment;");
v = virtualinvoke v.<org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectReader: java.lang.Object readValue(java.lang.String)>("{\"dataSource\":\"datasource2015\",\"interval\":\"2015-06-01T00:00:00.000-04:00/2015-06-02T00:00:00.000-04:00\",\"version\":\"2016-11-04T19:24:01.732-04:00\",\"loadSpec\":{\"type\":\"hdfs\",\"path\":\"hdfs://cn105-10.l42scl.hortonworks.com:8020/apps/hive/warehouse/druid.db/.hive-staging_hive_2016-11-04_19-23-50_168_1550339856804207572-1/_task_tmp.-ext-10002/_tmp.000000_0/datasource2015/20150601T000000.000-0400_20150602T000000.000-0400/2016-11-04T19_24_01.732-04_00/0/index.zip\"},\"dimensions\":\"dimension1\",\"metrics\":\"bigint\",\"shardSpec\":{\"type\":\"linear\",\"partitionNum\":0},\"binaryVersion\":9,\"size\":1765,\"identifier\":\"datasource2015_2015-06-01T00:00:00.000-04:00_2015-06-02T00:00:00.000-04:00_2016-11-04T19:24:01.732-04:00\"}");
v = virtualinvoke v.<org.apache.hive.druid.org.apache.druid.timeline.DataSegment: java.lang.String getDataSource()>();
staticinvoke <org.junit.Assert: void assertEquals(java.lang.Object,java.lang.Object)>("datasource2015", v);
return;
}
static void <clinit>()
{
org.joda.time.Interval v;
v = new org.joda.time.Interval;
specialinvoke v.<org.joda.time.Interval: void <init>(java.lang.Object)>("2014-10-22T00:00:00Z/P1D");
<org.apache.hadoop.hive.ql.io.TestDruidRecordWriter: org.joda.time.Interval INTERVAL_FULL> = v;
return;
}
}