public class org.apache.drill.exec.store.hdf.HDF5BatchReader extends java.lang.Object implements org.apache.drill.exec.physical.impl.scan.framework.ManagedReader
{
private static final org.slf4j.Logger logger;
private static final java.lang.String PATH_COLUMN_NAME;
private static final java.lang.String DATA_TYPE_COLUMN_NAME;
private static final java.lang.String IS_LINK_COLUMN_NAME;
private static final java.lang.String FILE_NAME_COLUMN_NAME;
private static final java.lang.String INT_COLUMN_PREFIX;
private static final java.lang.String LONG_COLUMN_PREFIX;
private static final java.lang.String FLOAT_COLUMN_PREFIX;
private static final java.lang.String DOUBLE_COLUMN_PREFIX;
private static final java.lang.String INT_COLUMN_NAME;
private static final java.lang.String FLOAT_COLUMN_NAME;
private static final java.lang.String DOUBLE_COLUMN_NAME;
private static final java.lang.String LONG_COLUMN_NAME;
private static final java.lang.String DATA_SIZE_COLUMN_NAME;
private static final java.lang.String ELEMENT_COUNT_NAME;
private static final java.lang.String DATASET_DATA_TYPE_NAME;
private static final java.lang.String DIMENSIONS_FIELD_NAME;
private static final int PREVIEW_ROW_LIMIT;
private static final int PREVIEW_COL_LIMIT;
private static final int MAX_DATASET_SIZE;
private final org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig;
private final java.util.List dataWriters;
private final int maxRecords;
private java.lang.String fileName;
private org.apache.hadoop.mapred.FileSplit split;
private io.jhdf.HdfFile hdfFile;
private java.io.BufferedReader reader;
private org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter;
private org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec;
private java.util.Iterator metadataIterator;
private org.apache.drill.exec.vector.accessor.ScalarWriter pathWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter dataTypeWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter fileNameWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter linkWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter dataSizeWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter elementCountWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter datasetTypeWriter;
private org.apache.drill.exec.vector.accessor.ScalarWriter dimensionsWriter;
private org.apache.drill.common.exceptions.CustomErrorContext errorContext;
private boolean showMetadataPreview;
private int[] dimensions;
static final boolean $assertionsDisabled;
public void <init>(org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig, int)
{
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v;
org.apache.drill.exec.store.hdf.HDF5FormatConfig v;
boolean v;
java.util.ArrayList v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig;
v := @parameter: int;
specialinvoke v.<java.lang.Object: void <init>()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig> = v;
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int maxRecords> = v;
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: org.apache.drill.exec.store.hdf.HDF5FormatConfig formatConfig>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5FormatConfig: boolean showPreview()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: boolean showMetadataPreview> = v;
return;
}
public boolean open(org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator)
{
org.apache.drill.common.exceptions.UserException v;
org.apache.drill.exec.record.metadata.SchemaBuilder v, v, v, v, v, v, v, v, v;
org.apache.hadoop.mapred.FileSplit v, v, v;
io.jhdf.api.Dataset v;
io.jhdf.HdfFile v, v;
java.lang.Object[] v;
org.apache.drill.common.exceptions.UserException$Builder v, v, v;
java.util.ArrayList v;
org.apache.drill.exec.vector.accessor.ScalarWriter v, v, v, v, v, v, v, v;
java.lang.String v, v, v, v;
java.io.IOException v;
org.apache.drill.exec.store.hdf.writers.WriterSpec v;
org.apache.drill.common.types.TypeProtos$MinorType v, v, v, v, v, v, v, v;
int[] v, v, v, v;
org.apache.hadoop.fs.Path v, v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator v;
java.util.List v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v, v, v;
org.apache.drill.common.exceptions.CustomErrorContext v, v, v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v, v, v, v, v, v, v, v, v, v, v;
int v, v;
org.slf4j.Logger v;
java.util.Iterator v;
org.apache.drill.exec.physical.resultSet.ResultSetLoader v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator;
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.hadoop.mapred.FileSplit split()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.hadoop.mapred.FileSplit split> = v;
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.common.exceptions.CustomErrorContext parentErrorContext()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.common.exceptions.CustomErrorContext errorContext> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.hadoop.mapred.FileSplit split>;
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String getName()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.lang.String fileName> = v;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void openFile(org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator)>(v);
label:
goto label;
label:
v := @caughtexception;
v = staticinvoke <org.apache.drill.common.exceptions.UserException: org.apache.drill.common.exceptions.UserException$Builder dataReadError(java.lang.Throwable)>(v);
v = newarray (java.lang.Object)[1];
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.hadoop.mapred.FileSplit split>;
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v[0] = v;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder addContext(java.lang.String,java.lang.Object[])>("Failed to close input file: %s", v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.common.exceptions.CustomErrorContext errorContext>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder addContext(org.apache.drill.common.exceptions.CustomErrorContext)>(v);
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException build(org.slf4j.Logger)>(v);
throw v;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v != null goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List getFileMetadata(io.jhdf.api.Group,java.util.List)>(v, v);
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Iterator metadataIterator> = v;
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("path", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("data_type", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("file_name", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("data_size", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("is_link", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("element_count", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("dataset_data_type", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("dimensions", v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: void tableSchema(org.apache.drill.exec.record.metadata.TupleMetadata,boolean)>(v, 0);
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.exec.physical.resultSet.ResultSetLoader build()>();
v = newarray (int)[0];
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions> = v;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.ResultSetLoader: org.apache.drill.exec.physical.resultSet.RowSetLoader writer()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter> = v;
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Dataset: int[] getDimensions()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions> = v;
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.exec.physical.resultSet.ResultSetLoader build()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.ResultSetLoader: org.apache.drill.exec.physical.resultSet.RowSetLoader writer()>();
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter> = v;
v = new org.apache.drill.exec.store.hdf.writers.WriterSpec;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.exec.record.metadata.TupleMetadata providedSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.common.exceptions.CustomErrorContext parentErrorContext()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.WriterSpec: void <init>(org.apache.drill.exec.vector.accessor.TupleWriter,org.apache.drill.exec.record.metadata.TupleMetadata,org.apache.drill.common.exceptions.CustomErrorContext)>(v, v, v);
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = lengthof v;
if v > 1 goto label;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void buildSchemaFor1DimensionalDataset(io.jhdf.api.Dataset)>(v);
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = lengthof v;
if v != 2 goto label;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void buildSchemaFor2DimensionalDataset(io.jhdf.api.Dataset)>(v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void buildSchemaFor2DimensionalDataset(io.jhdf.api.Dataset)>(v);
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v != null goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("path");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter pathWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("data_type");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dataTypeWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("file_name");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter fileNameWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("data_size");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dataSizeWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("is_link");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter linkWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("element_count");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter elementCountWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("dataset_data_type");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter datasetTypeWriter> = v;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("dimensions");
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dimensionsWriter> = v;
label:
return 1;
catch java.io.IOException from label to label with label;
}
private void buildSchemaFor1DimensionalDataset(io.jhdf.api.Dataset)
{
org.slf4j.Logger v;
io.jhdf.object.datatype.DataType v;
org.apache.drill.common.types.TypeProtos$MinorType v;
java.util.List v;
org.apache.drill.exec.store.hdf.writers.HDF5DataWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.api.Dataset v;
java.lang.Class v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: io.jhdf.api.Dataset;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
if v != null goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Class getJavaType()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Couldn\'t add {}", v);
return;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.HDF5DataWriter buildWriter(org.apache.drill.common.types.TypeProtos$MinorType)>(v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
return;
}
private org.apache.drill.exec.store.hdf.writers.HDF5DataWriter buildWriter(org.apache.drill.common.types.TypeProtos$MinorType)
{
org.apache.drill.exec.store.hdf.writers.HDF5MapDataWriter v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter v;
int[] v;
org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v, v, v, v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v, v, v, v, v, v, v;
org.apache.drill.exec.store.hdf.writers.HDF5StringDataWriter v;
org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter v;
int v, v;
java.lang.String v, v, v, v, v, v, v, v;
java.lang.UnsupportedOperationException v;
org.apache.drill.exec.store.hdf.writers.HDF5TimestampDataWriter v;
org.apache.drill.exec.store.hdf.writers.WriterSpec v, v, v, v, v, v, v;
org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.common.types.TypeProtos$MinorType;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
default: goto label;
};
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5StringDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5StringDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5TimestampDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5TimestampDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new org.apache.drill.exec.store.hdf.writers.HDF5MapDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5MapDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String)>(v, v, v);
return v;
label:
v = new java.lang.UnsupportedOperationException;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: java.lang.String name()>();
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>(v);
throw v;
}
private void buildSchemaFor2DimensionalDataset(io.jhdf.api.Dataset)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter v;
byte v;
int[] v, v;
io.jhdf.api.Dataset v;
java.util.List v, v, v, v;
org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v, v, v, v;
org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter v;
int v, v, v, v;
java.lang.String v, v, v, v, v, v, v, v, v, v;
java.lang.UnsupportedOperationException v;
org.slf4j.Logger v;
io.jhdf.object.datatype.DataType v;
org.apache.drill.exec.store.hdf.writers.WriterSpec v, v, v, v;
java.lang.Class v;
org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: io.jhdf.api.Dataset;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
if v != null goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Class getJavaType()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Couldn\'t add {}", v);
return;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = v[1];
v = 0;
label:
v = v cmp v;
if v >= 0 goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
default: goto label;
};
label:
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("int_col_\u0001");
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = new org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5IntDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String,java.lang.String,int)>(v, v, v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("long_col_\u0001");
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = new org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5LongDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String,java.lang.String,int)>(v, v, v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("double_col_\u0001");
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = new org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DoubleDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String,java.lang.String,int)>(v, v, v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("float_col_\u0001");
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = new org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.writers.WriterSpec writerSpec>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
specialinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5FloatDataWriter: void <init>(io.jhdf.HdfFile,org.apache.drill.exec.store.hdf.writers.WriterSpec,java.lang.String,java.lang.String,int)>(v, v, v, v, v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = new java.lang.UnsupportedOperationException;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: java.lang.String name()>();
specialinvoke v.<java.lang.UnsupportedOperationException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v + 1;
goto label;
label:
return;
}
private void openFile(org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator) throws java.io.IOException
{
java.lang.Object[] v;
org.apache.drill.common.exceptions.CustomErrorContext v;
org.apache.drill.common.exceptions.UserException$Builder v, v, v;
org.apache.drill.common.exceptions.UserException v;
org.apache.drill.exec.store.dfs.DrillFileSystem v;
org.apache.hadoop.mapred.FileSplit v, v;
org.apache.hadoop.fs.Path v, v;
org.slf4j.Logger v;
java.io.InputStreamReader v;
java.lang.Exception v;
org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v;
java.io.BufferedReader v;
java.io.InputStream v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator;
v = null;
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.file.FileScanFramework$FileSchemaNegotiator: org.apache.drill.exec.store.dfs.DrillFileSystem fileSystem()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.hadoop.mapred.FileSplit split>;
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v = virtualinvoke v.<org.apache.drill.exec.store.dfs.DrillFileSystem: java.io.InputStream openPossiblyCompressedStream(org.apache.hadoop.fs.Path)>(v);
v = staticinvoke <io.jhdf.HdfFile: io.jhdf.HdfFile fromInputStream(java.io.InputStream)>(v);
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile> = v;
label:
goto label;
label:
v := @caughtexception;
if v == null goto label;
virtualinvoke v.<java.io.InputStream: void close()>();
label:
v = staticinvoke <org.apache.drill.common.exceptions.UserException: org.apache.drill.common.exceptions.UserException$Builder dataReadError(java.lang.Throwable)>(v);
v = newarray (java.lang.Object)[1];
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.hadoop.mapred.FileSplit split>;
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v[0] = v;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder message(java.lang.String,java.lang.Object[])>("Failed to open input file: %s", v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.common.exceptions.CustomErrorContext errorContext>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder addContext(org.apache.drill.common.exceptions.CustomErrorContext)>(v);
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException build(org.slf4j.Logger)>(v);
throw v;
label:
v = new java.io.BufferedReader;
v = new java.io.InputStreamReader;
specialinvoke v.<java.io.InputStreamReader: void <init>(java.io.InputStream)>(v);
specialinvoke v.<java.io.BufferedReader: void <init>(java.io.Reader)>(v);
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.io.BufferedReader reader> = v;
return;
catch java.lang.Exception from label to label with label;
}
public boolean next()
{
int[] v, v, v, v;
boolean v, v, v, v, v, v, v;
java.util.List v, v, v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v, v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v, v, v, v, v, v, v, v, v;
int v, v, v, v, v, v, v;
java.lang.String v, v;
java.util.Iterator v;
java.lang.Object v, v, v, v, v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int maxRecords>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean limitReached(int)>(v);
if v == 0 goto label;
return 0;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean isFull()>();
if v != 0 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v == null goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
v = virtualinvoke v.<java.lang.String: boolean isEmpty()>();
if v == 0 goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Iterator metadataIterator>;
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v != 0 goto label;
return 0;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void projectMetadataRow(org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v);
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = lengthof v;
if v > 1 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean isCompound()>();
if v == 0 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean hasNext()>();
if v != 0 goto label;
return 0;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean write()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = lengthof v;
if v > 1 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean hasNext()>();
if v != 0 goto label;
return 0;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean write()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
goto label;
label:
v = 0;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = 0;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = v[1];
if v >= v goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List dataWriters>;
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(v);
virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: boolean write()>();
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.writers.HDF5DataWriter: int currentRowCount()>();
v = v + 1;
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: int[] dimensions>;
v = v[0];
if v < v goto label;
return 0;
label:
return 1;
}
private void projectMetadataRow(org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
int[] v;
java.util.Map v;
io.jhdf.api.Dataset v;
boolean v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v;
long v, v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v, v, v, v, v, v, v, v;
int v;
java.lang.String v, v, v, v, v, v, v, v;
java.util.Iterator v;
java.lang.Class v;
java.lang.Object v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Iterator metadataIterator>;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter pathWriter>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getPath()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dataTypeWriter>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getDataType()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter fileNameWriter>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.lang.String fileName>;
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter linkWriter>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: boolean isLink()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setBoolean(boolean)>(v);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.util.Map getAttributes()>();
v = interfaceinvoke v.<java.util.Map: int size()>();
if v <= 0 goto label;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeAttributes(org.apache.drill.exec.vector.accessor.TupleWriter,org.apache.drill.exec.store.hdf.HDF5DrillMetadata)>(v, v);
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getDataType()>();
v = virtualinvoke v.<java.lang.String: boolean equalsIgnoreCase(java.lang.String)>("DATASET");
if v == 0 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getPath()>();
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dataSizeWriter>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: long getSizeInBytes()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter elementCountWriter>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: long getSize()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter datasetTypeWriter>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Class getJavaType()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter dimensionsWriter>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: int[] getDimensions()>();
v = staticinvoke <java.util.Arrays: java.lang.String toString(int[])>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: boolean isLink()>();
if v != 0 goto label;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: boolean showMetadataPreview>;
if v == 0 goto label;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getPath()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void projectDataset(org.apache.drill.exec.physical.resultSet.RowSetLoader,java.lang.String)>(v, v);
label:
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
return;
}
private java.util.List getFileMetadata(io.jhdf.api.Group, java.util.List)
{
java.lang.Throwable v;
io.jhdf.api.Node v;
int[] v;
java.util.Map v, v;
int v, v;
java.lang.String v, v, v, v, v, v;
boolean v, v;
org.apache.drill.exec.store.hdf.HDF5DrillMetadata v;
org.slf4j.Logger v, v;
java.util.Iterator v;
java.util.List v;
io.jhdf.api.NodeType v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
java.lang.Object v;
io.jhdf.api.Group v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: io.jhdf.api.Group;
v := @parameter: java.util.List;
label:
interfaceinvoke v.<io.jhdf.api.Group: java.util.Map getChildren()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<io.jhdf.exceptions.HdfException: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String)>(v);
return v;
label:
v = interfaceinvoke v.<io.jhdf.api.Group: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = new org.apache.drill.exec.store.hdf.HDF5DrillMetadata;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void <init>()>();
v = interfaceinvoke v.<io.jhdf.api.Node: boolean isLink()>();
if v == 0 goto label;
v = virtualinvoke v.<io.jhdf.links.SoftLink: java.lang.String getTargetPath()>();
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setPath(java.lang.String)>(v);
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setLink(boolean)>(1);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Node: java.lang.String getPath()>();
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Node: io.jhdf.api.NodeType getType()>();
v = virtualinvoke v.<io.jhdf.api.NodeType: java.lang.String name()>();
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setDataType(java.lang.String)>(v);
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setLink(boolean)>(0);
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$io$jhdf$api$NodeType>;
v = interfaceinvoke v.<io.jhdf.api.Node: io.jhdf.api.NodeType getType()>();
v = virtualinvoke v.<io.jhdf.api.NodeType: int ordinal()>();
v = v[v];
lookupswitch(v)
{
case 1: goto label;
case 2: goto label;
default: goto label;
};
label:
v = interfaceinvoke v.<io.jhdf.api.Node: java.lang.String getPath()>();
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Map getAttributes(java.lang.String)>(v);
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setAttributes(java.util.Map)>(v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Node: java.lang.String getPath()>();
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Map getAttributes(java.lang.String)>(v);
virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: void setAttributes(java.util.Map)>(v);
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.List getFileMetadata(io.jhdf.api.Group,java.util.List)>(v, v);
goto label;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<io.jhdf.api.Node: io.jhdf.api.NodeType getType()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Unknown data type: {}", v);
goto label;
label:
return v;
catch io.jhdf.exceptions.HdfException from label to label with label;
}
private java.util.Map getAttributes(java.lang.String)
{
java.lang.Throwable v;
java.util.HashMap v;
java.lang.Integer v;
io.jhdf.api.Node v;
java.util.Map v;
int v, v, v;
java.lang.String v, v, v, v;
boolean v, v, v;
org.apache.drill.exec.store.hdf.HDF5Attribute v;
org.slf4j.Logger v, v, v, v, v, v;
java.util.Iterator v;
java.util.Set v;
java.lang.Exception v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v, v;
java.lang.Object v, v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: java.lang.String;
v = virtualinvoke v.<java.lang.String: boolean endsWith(java.lang.String)>("/");
if v == 0 goto label;
v = virtualinvoke v.<java.lang.String: int length()>();
v = v - 1;
v = virtualinvoke v.<java.lang.String: java.lang.String substring(int,int)>(0, v);
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Getting attributes for {}", v);
v = new java.util.HashMap;
specialinvoke v.<java.util.HashMap: void <init>()>();
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Node getByPath(java.lang.String)>(v);
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Couldn\'t get attributes for path: {}", v);
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Error: {}", v);
return v;
label:
v = interfaceinvoke v.<io.jhdf.api.Node: java.util.Map getAttributes()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Unable to get attributes for {}: Only Huge objects BTrees with 1 record are currently supported.", v);
return v;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<java.util.Map: int size()>();
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object,java.lang.Object)>("Found {} attribtutes for {}", v, v);
v = interfaceinvoke v.<java.util.Map: java.util.Set entrySet()>();
v = interfaceinvoke v.<java.util.Set: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getKey()>();
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.exec.store.hdf.HDF5Attribute getAttribute(java.lang.String,java.lang.String,io.jhdf.HdfFile)>(v, v, v);
if v == null goto label;
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getValue()>();
v = interfaceinvoke v.<io.jhdf.api.Attribute: boolean isScalar()>();
if v == 0 goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.String getKey()>();
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object,java.lang.Object)>("Adding {} to attribute list for {}", v, v);
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.String getKey()>();
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
return v;
catch java.lang.Exception from label to label with label;
catch io.jhdf.exceptions.HdfException from label to label with label;
}
private void projectDataset(org.apache.drill.exec.physical.resultSet.RowSetLoader, java.lang.String)
{
org.apache.drill.common.exceptions.UserException v;
byte v;
io.jhdf.api.Dataset v;
java.lang.Exception v, v;
io.jhdf.HdfFile v, v;
double[][] v;
java.lang.Object[] v;
org.apache.drill.common.exceptions.UserException$Builder v, v, v;
long v;
java.lang.AssertionError v;
java.lang.String v, v, v, v, v;
long[][] v;
io.jhdf.object.datatype.DataType v, v, v, v, v;
int[][] v;
org.apache.drill.common.types.TypeProtos$MinorType v, v, v, v, v;
int[] v, v, v, v;
boolean v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.common.exceptions.CustomErrorContext v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
int v, v, v, v, v, v, v, v, v, v, v, v, v, v;
float[][] v;
org.slf4j.Logger v, v, v, v, v, v, v;
java.lang.Object v, v, v, v, v, v, v, v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v := @parameter: java.lang.String;
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: java.lang.String getNameFromPath(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Dataset: long getSizeInBytes()>();
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: int MAX_DATASET_SIZE>;
v = v cmp v;
if v <= 0 goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Dataset {} is greater than 16MB.  Data will be truncated in Metadata view.", v);
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: int[] getDimensions()>();
v = lengthof v;
if v != 1 goto label;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Error reading {}", v);
return;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: boolean $assertionsDisabled>;
if v != 0 goto label;
if v != null goto label;
v = new java.lang.AssertionError;
specialinvoke v.<java.lang.AssertionError: void <init>()>();
throw v;
label:
if v != null goto label;
return;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
case 8: goto label;
case 9: goto label;
case 10: goto label;
default: goto label;
};
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Couldn\'t read {}", v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeStringListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,java.lang.String[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeTimestampListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeIntListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,int[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeSmallIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,short[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeByteListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,byte[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat4ListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,float[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat8ListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,double[])>(v, v, v);
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeLongListColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long[])>(v, v, v);
goto label;
label:
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void getAndMapCompoundData(java.lang.String,io.jhdf.HdfFile,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v);
label:
goto label;
label:
v := @caughtexception;
v = staticinvoke <org.apache.drill.common.exceptions.UserException: org.apache.drill.common.exceptions.UserException$Builder dataReadError()>();
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Error writing Compound Field: \u0001");
v = newarray (java.lang.Object)[0];
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder message(java.lang.String,java.lang.Object[])>(v, v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.common.exceptions.CustomErrorContext errorContext>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder addContext(org.apache.drill.common.exceptions.CustomErrorContext)>(v);
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException build(org.slf4j.Logger)>(v);
throw v;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: java.lang.String name()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("{} not implemented.", v);
goto label;
label:
v = lengthof v;
if v != 2 goto label;
v = v[1];
v = v[0];
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
default: goto label;
};
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapIntMatrixField(int[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapFloatMatrixField(float[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapDoubleMatrixField(double[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapBigIntMatrixField(long[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("{} not implemented.", v);
goto label;
label:
v = lengthof v;
if v <= 2 goto label;
v = v[1];
v = v[0];
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
default: goto label;
};
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: int[][] toIntMatrix(java.lang.Object[])>(v);
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapIntMatrixField(int[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: float[][] toFloatMatrix(java.lang.Object[])>(v);
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapFloatMatrixField(float[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: double[][] toDoubleMatrix(java.lang.Object[])>(v);
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapDoubleMatrixField(double[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: long[][] toLongMatrix(java.lang.Object[])>(v);
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void mapBigIntMatrixField(long[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
goto label;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = staticinvoke <org.apache.drill.exec.store.hdf.HDF5Utils: org.apache.drill.common.types.TypeProtos$MinorType getDataType(io.jhdf.object.datatype.DataType)>(v);
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("{} not implemented.", v);
label:
return;
catch java.lang.Exception from label to label with label;
catch java.lang.Exception from label to label with label;
}
private void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, int)
{
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
boolean v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: int;
if v == 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,boolean)>(v, v, v);
return;
}
private void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, boolean)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
boolean v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: boolean;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIT>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setBoolean(boolean)>(v);
return;
}
private void writeSmallIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, short)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
short v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: short;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType SMALLINT>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
return;
}
private void writeSmallIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, short[])
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
short v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
short[] v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: short[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType SMALLINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v + 1;
goto label;
label:
return;
}
private void writeByteColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, byte)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
byte v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: byte;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType TINYINT>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
return;
}
private void writeByteListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, byte[])
{
byte[] v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
byte v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: byte[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType TINYINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v + 1;
goto label;
label:
return;
}
private void writeIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, int)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: int;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
return;
}
private void writeIntListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, int[])
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int[] v;
int v, v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: int[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v + 1;
goto label;
label:
return;
}
private void mapIntMatrixField(int[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
int[][] v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
int[] v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v, v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v;
java.lang.String v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: int[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v == null goto label;
v = 0;
label:
if v >= v goto label;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = 0;
label:
if v >= v goto label;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("int_col_\u0001");
v = v[v];
v = v[v];
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,int)>(v, v, v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
v = v + 1;
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void intMatrixHelper(int[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
label:
return;
}
private void intMatrixHelper(int[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
int[][] v;
org.apache.drill.exec.record.metadata.SchemaBuilder v, v;
org.apache.drill.exec.record.MaterializedField v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int[] v;
int v, v, v, v, v, v, v, v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.record.metadata.RepeatedListBuilder v, v;
org.apache.drill.exec.vector.accessor.ArrayWriter v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: int[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addRepeatedList(java.lang.String)>("int_data");
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addArray(org.apache.drill.common.types.TypeProtos$MinorType)>(v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder resumeSchema()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("int_data");
v = (int) -1;
if v != v goto label;
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: org.apache.drill.exec.record.MaterializedField column(java.lang.String)>("int_data");
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: void save()>();
v = v + 1;
goto label;
label:
return;
}
private void writeLongColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, long)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
long v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: long;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
return;
}
private void writeLongListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, long[])
{
long v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
long[] v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: long[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
v = v + 1;
goto label;
label:
return;
}
private void writeStringColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, java.lang.String)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
return;
}
private void writeStringListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, java.lang.String[])
{
java.lang.String[] v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v, v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v + 1;
goto label;
label:
return;
}
private void writeFloat8Column(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, double)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.lang.String v;
double v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: double;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT8>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
return;
}
private void writeFloat8ListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, double[])
{
double[] v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
double v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: double[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT8>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
v = v + 1;
goto label;
label:
return;
}
private void writeFloat4Column(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, float)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
float v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: float;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT4>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
return;
}
private void writeFloat4ListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, float[])
{
float[] v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
float v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: float[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT4>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = 0;
label:
if v >= v goto label;
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
v = v + 1;
goto label;
label:
return;
}
private void mapFloatMatrixField(float[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
float[][] v;
float[] v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v, v, v, v;
float v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v;
java.lang.String v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: float[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v == null goto label;
v = 0;
label:
if v >= v goto label;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = 0;
label:
if v >= v goto label;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("float_col_\u0001");
v = v[v];
v = v[v];
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat4Column(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,float)>(v, v, v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
v = v + 1;
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void floatMatrixHelper(float[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
label:
return;
}
private void floatMatrixHelper(float[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
float[] v, v;
org.apache.drill.exec.record.metadata.SchemaBuilder v, v;
org.apache.drill.exec.record.MaterializedField v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v, v, v, v, v, v;
float[][] v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.record.metadata.RepeatedListBuilder v, v;
float v;
org.apache.drill.exec.vector.accessor.ArrayWriter v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: float[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addRepeatedList(java.lang.String)>("float_data");
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT4>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addArray(org.apache.drill.common.types.TypeProtos$MinorType)>(v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder resumeSchema()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("float_data");
v = (int) -1;
if v != v goto label;
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: org.apache.drill.exec.record.MaterializedField column(java.lang.String)>("float_data");
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = v[0];
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 100);
v = 0;
label:
if v >= v goto label;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: void save()>();
v = v + 1;
goto label;
label:
return;
}
private void mapDoubleMatrixField(double[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
double[] v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v;
java.lang.String v, v;
double[][] v;
double v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: double[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v == null goto label;
v = 0;
label:
if v >= v goto label;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = 0;
label:
if v >= v goto label;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("double_col_\u0001");
v = v[v];
v = v[v];
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat8Column(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,double)>(v, v, v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
v = v + 1;
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void doubleMatrixHelper(double[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
label:
return;
}
private void doubleMatrixHelper(double[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
double[] v, v;
org.apache.drill.exec.record.metadata.SchemaBuilder v, v;
org.apache.drill.exec.record.MaterializedField v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v, v, v, v, v, v;
double v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.record.metadata.RepeatedListBuilder v, v;
org.apache.drill.exec.vector.accessor.ArrayWriter v, v;
double[][] v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: double[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addRepeatedList(java.lang.String)>("double_data");
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT8>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addArray(org.apache.drill.common.types.TypeProtos$MinorType)>(v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder resumeSchema()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("double_data");
v = (int) -1;
if v != v goto label;
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: org.apache.drill.exec.record.MaterializedField column(java.lang.String)>("double_data");
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = v[0];
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 100);
v = 0;
label:
if v >= v goto label;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: void save()>();
v = v + 1;
goto label;
label:
return;
}
private void mapBigIntMatrixField(long[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
long[][] v;
long v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
long[] v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v, v, v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig v;
java.lang.String v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: long[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig readerConfig>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader$HDF5ReaderConfig: java.lang.String defaultPath>;
if v == null goto label;
v = 0;
label:
if v >= v goto label;
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = 0;
label:
if v >= v goto label;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (int)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("long_col_\u0001");
v = v[v];
v = v[v];
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeLongColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long)>(v, v, v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
v = v + 1;
goto label;
label:
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void bigIntMatrixHelper(long[][],int,int,org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v, v, v);
label:
return;
}
private void bigIntMatrixHelper(long[][], int, int, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
long v;
org.apache.drill.exec.record.metadata.SchemaBuilder v, v;
org.apache.drill.exec.record.MaterializedField v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
long[] v, v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v, v, v, v, v, v;
long[][] v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.record.metadata.RepeatedListBuilder v, v;
org.apache.drill.exec.vector.accessor.ArrayWriter v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: long[][];
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addRepeatedList(java.lang.String)>("long_data");
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.RepeatedListBuilder addArray(org.apache.drill.common.types.TypeProtos$MinorType)>(v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.RepeatedListBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder resumeSchema()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("long_data");
v = (int) -1;
if v != v goto label;
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: org.apache.drill.exec.record.MaterializedField column(java.lang.String)>("long_data");
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 20);
v = v[0];
v = lengthof v;
v = staticinvoke <java.lang.Math: int min(int,int)>(v, 100);
v = 0;
label:
if v >= v goto label;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
v = v + 1;
goto label;
label:
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: void save()>();
v = v + 1;
goto label;
label:
return;
}
private void writeTimestampColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, long)
{
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
long v;
java.lang.String v;
java.time.Instant v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: long;
v = staticinvoke <java.time.Instant: java.time.Instant ofEpochMilli(long)>(v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType TIMESTAMP>;
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>(v, v, v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setTimestamp(java.time.Instant)>(v);
return;
}
private void writeTimestampListColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, long[])
{
long v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
long[] v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
java.time.Instant v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: long[];
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType TIMESTAMP>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = lengthof v;
v = 0;
label:
if v >= v goto label;
v = v[v];
v = staticinvoke <java.time.Instant: java.time.Instant ofEpochMilli(long)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setTimestamp(java.time.Instant)>(v);
v = v + 1;
goto label;
label:
return;
}
private org.apache.drill.exec.vector.accessor.ScalarWriter getColWriter(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, org.apache.drill.common.types.TypeProtos$MinorType)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
int v, v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: org.apache.drill.common.types.TypeProtos$MinorType;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
return v;
}
private void writeAttributes(org.apache.drill.exec.vector.accessor.TupleWriter, org.apache.drill.exec.store.hdf.HDF5DrillMetadata)
{
java.lang.IllegalStateException v;
org.apache.drill.common.types.TypeProtos$MinorType v, v, v;
byte v;
int[] v;
java.util.Map v;
boolean v, v;
java.util.Set v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
float v;
long v, v;
org.apache.drill.exec.record.MaterializedField v;
short v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v, v, v, v;
java.lang.String v, v, v;
double v;
org.apache.drill.exec.store.hdf.HDF5DrillMetadata v;
java.util.Iterator v;
org.apache.drill.exec.vector.accessor.TupleWriter v, v;
java.lang.Object v, v, v, v, v, v, v, v, v, v, v, v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: org.apache.drill.exec.store.hdf.HDF5DrillMetadata;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5DrillMetadata: java.lang.String getPath()>();
v = specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.util.Map getAttributes(java.lang.String)>(v);
v = interfaceinvoke v.<java.util.Map: java.util.Set entrySet()>();
v = interfaceinvoke v.<java.util.Set: java.util.Iterator iterator()>();
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("attributes");
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType MAP>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REQUIRED>;
v = staticinvoke <org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.MaterializedField columnSchema(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>("attributes", v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.TupleWriter tuple(int)>(v);
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getKey()>();
v = interfaceinvoke v.<java.util.Map$Entry: java.lang.Object getValue()>();
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader$1: int[] $SwitchMap$org$apache$drill$common$types$TypeProtos$MinorType>;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: org.apache.drill.common.types.TypeProtos$MinorType getDataType()>();
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
case 8: goto label;
case 9: goto label;
case 10: goto label;
case 11: goto label;
default: goto label;
};
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,boolean)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Long: long longValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeLongColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Integer: int intValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,int)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Short: short shortValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeSmallIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,short)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Byte: byte byteValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeByteColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,byte)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Double: double doubleValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat8Column(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,double)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Float: float floatValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeFloat4Column(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,float)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeStringColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,java.lang.String)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Long: long longValue()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeTimestampColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: java.lang.Object getValue()>();
v = virtualinvoke v.<java.lang.Object: java.lang.String toString()>();
specialinvoke v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: void writeStringColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,java.lang.String)>(v, v, v);
goto label;
label:
v = new java.lang.IllegalStateException;
v = virtualinvoke v.<org.apache.drill.exec.store.hdf.HDF5Attribute: org.apache.drill.common.types.TypeProtos$MinorType getDataType()>();
v = virtualinvoke v.<org.apache.drill.common.types.TypeProtos$MinorType: java.lang.String name()>();
specialinvoke v.<java.lang.IllegalStateException: void <init>(java.lang.String)>(v);
throw v;
label:
return;
}
private void getAndMapCompoundData(java.lang.String, io.jhdf.HdfFile, org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
org.apache.drill.exec.record.metadata.SchemaBuilder v, v;
byte v, v, v;
io.jhdf.api.Dataset v, v, v;
io.jhdf.HdfFile v;
long v;
org.apache.drill.exec.record.MaterializedField v;
org.apache.drill.exec.vector.accessor.ScalarWriter v, v, v, v, v, v, v, v, v;
short v;
org.apache.drill.common.types.TypeProtos$DataMode v, v, v, v, v, v, v, v;
java.lang.String v, v, v, v, v, v, v;
io.jhdf.object.datatype.DataType v, v, v;
org.apache.drill.common.types.TypeProtos$MinorType v, v, v, v, v, v, v, v;
int[] v;
boolean v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.drill.exec.record.metadata.TupleMetadata v, v;
java.util.List v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
float v;
org.apache.drill.exec.vector.accessor.ArrayWriter v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ObjectWriter v;
int v, v, v, v, v, v, v;
double v;
org.slf4j.Logger v, v;
java.util.Iterator v, v;
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.MapBuilder v;
java.lang.Class v, v;
java.lang.Object v, v, v, v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v := @parameter: java.lang.String;
v := @parameter: io.jhdf.HdfFile;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Dataset: io.jhdf.object.datatype.DataType getDataType()>();
v = virtualinvoke v.<io.jhdf.object.datatype.CompoundDataType: java.util.List getMembers()>();
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.MapBuilder addMap(java.lang.String)>("compound_data");
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = virtualinvoke v.<io.jhdf.object.datatype.CompoundDataType$CompoundDataMember: io.jhdf.object.datatype.DataType getDataType()>();
v = virtualinvoke v.<io.jhdf.object.datatype.DataType: java.lang.Class getJavaType()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = virtualinvoke v.<io.jhdf.object.datatype.CompoundDataType$CompoundDataMember: java.lang.String getName()>();
v = -1;
v = virtualinvoke v.<java.lang.String: int hashCode()>();
lookupswitch(v)
{
case -1325958191: goto label;
case 104431: goto label;
case 3039496: goto label;
case 3327612: goto label;
case 64711720: goto label;
case 97526364: goto label;
case 109413500: goto label;
case 1195259493: goto label;
default: goto label;
};
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("byte");
if v == 0 goto label;
v = 0;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("short");
if v == 0 goto label;
v = 1;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("int");
if v == 0 goto label;
v = 2;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("double");
if v == 0 goto label;
v = 3;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("float");
if v == 0 goto label;
v = 4;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("long");
if v == 0 goto label;
v = 5;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("boolean");
if v == 0 goto label;
v = 6;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("java.lang.String");
if v == 0 goto label;
v = 7;
label:
tableswitch(v)
{
case 0: goto label;
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
default: goto label;
};
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType TINYINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType SMALLINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT8>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT4>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIGINT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType BIT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode REPEATED>;
virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.MapBuilder add(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
goto label;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Drill cannot process data type {} in compound fields.", v);
goto label;
label:
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.MapBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder resumeSchema()>();
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>("compound_data");
v = (int) -1;
if v != v goto label;
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: org.apache.drill.exec.record.MaterializedField column(java.lang.String)>("compound_data");
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: int addColumn(org.apache.drill.exec.record.MaterializedField)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ObjectWriter column(int)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ObjectWriter: org.apache.drill.exec.vector.accessor.TupleWriter tuple()>();
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = virtualinvoke v.<io.jhdf.object.datatype.CompoundDataType$CompoundDataMember: io.jhdf.object.datatype.DataType getDataType()>();
v = virtualinvoke v.<io.jhdf.object.datatype.DataType: java.lang.Class getJavaType()>();
v = virtualinvoke v.<java.lang.Class: java.lang.String getName()>();
v = virtualinvoke v.<io.jhdf.object.datatype.CompoundDataType$CompoundDataMember: java.lang.String getName()>();
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Dataset: int[] getDimensions()>();
v = virtualinvoke v.<io.jhdf.HdfFile: io.jhdf.api.Dataset getDatasetByPath(java.lang.String)>(v);
v = interfaceinvoke v.<io.jhdf.api.Dataset: java.lang.Object getData()>();
v = virtualinvoke v.<java.util.LinkedHashMap: java.lang.Object get(java.lang.Object)>(v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ArrayWriter array(java.lang.String)>(v);
v = 0;
label:
v = v[0];
if v >= v goto label;
v = -1;
v = virtualinvoke v.<java.lang.String: int hashCode()>();
lookupswitch(v)
{
case -1325958191: goto label;
case 104431: goto label;
case 3039496: goto label;
case 3327612: goto label;
case 64711720: goto label;
case 97526364: goto label;
case 109413500: goto label;
case 1195259493: goto label;
default: goto label;
};
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("byte");
if v == 0 goto label;
v = 0;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("short");
if v == 0 goto label;
v = 1;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("int");
if v == 0 goto label;
v = 2;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("double");
if v == 0 goto label;
v = 3;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("float");
if v == 0 goto label;
v = 4;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("long");
if v == 0 goto label;
v = 5;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("boolean");
if v == 0 goto label;
v = 6;
goto label;
label:
v = virtualinvoke v.<java.lang.String: boolean equals(java.lang.Object)>("java.lang.String");
if v == 0 goto label;
v = 7;
label:
tableswitch(v)
{
case 0: goto label;
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
case 6: goto label;
case 7: goto label;
default: goto label;
};
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setFloat(float)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setLong(long)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setBoolean(boolean)>(v);
goto label;
label:
v = v[v];
if v == null goto label;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
v = v[v];
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
goto label;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ArrayWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setNull()>();
goto label;
label:
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Drill cannot process data type {} in compound fields.", v);
label:
v = v + 1;
goto label;
label:
return;
}
public void close()
{
java.lang.AutoCloseable[] v;
java.lang.Object[] v;
org.slf4j.Logger v;
java.io.File v, v;
org.apache.drill.exec.store.hdf.HDF5BatchReader v;
io.jhdf.HdfFile v, v, v;
java.io.BufferedReader v;
java.lang.String v;
boolean v;
v := @this: org.apache.drill.exec.store.hdf.HDF5BatchReader;
v = newarray (java.lang.AutoCloseable)[1];
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v[0] = v;
staticinvoke <org.apache.drill.common.AutoCloseables: void closeSilently(java.lang.AutoCloseable[])>(v);
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = virtualinvoke v.<io.jhdf.HdfFile: java.io.File getFile()>();
v = virtualinvoke v.<java.io.File: boolean delete()>();
if v != 0 goto label;
v = <org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger>;
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile>;
v = virtualinvoke v.<io.jhdf.HdfFile: java.io.File getFile()>();
v = virtualinvoke v.<java.io.File: java.lang.String getName()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Failed to delete HDF5 temp file {}", v);
label:
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: io.jhdf.HdfFile hdfFile> = null;
v = newarray (java.lang.AutoCloseable)[1];
v = v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.io.BufferedReader reader>;
v[0] = v;
staticinvoke <org.apache.drill.common.AutoCloseables: void closeSilently(java.lang.AutoCloseable[])>(v);
v.<org.apache.drill.exec.store.hdf.HDF5BatchReader: java.io.BufferedReader reader> = null;
return;
}
static void <clinit>()
{
org.slf4j.Logger v;
int v;
java.lang.Class v;
boolean v, v;
v = class "Lorg/apache/drill/exec/store/hdf5/HDF5BatchReader;";
v = virtualinvoke v.<java.lang.Class: boolean desiredAssertionStatus()>();
if v != 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
<org.apache.drill.exec.store.hdf.HDF5BatchReader: boolean $assertionsDisabled> = v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/drill/exec/store/hdf5/HDF5BatchReader;");
<org.apache.drill.exec.store.hdf.HDF5BatchReader: org.slf4j.Logger logger> = v;
v = <org.apache.drill.exec.vector.ValueVector: int MAX_BUFFER_SIZE>;
<org.apache.drill.exec.store.hdf.HDF5BatchReader: int MAX_DATASET_SIZE> = v;
return;
}
}