public class org.apache.drill.exec.store.esri.ShpBatchReader extends java.lang.Object implements org.apache.drill.exec.physical.impl.scan.v.ManagedReader
{
private static final org.slf4j.Logger logger;
private static final java.lang.String GID_FIELD_NAME;
private static final java.lang.String SRID_FIELD_NAME;
private static final java.lang.String SHAPE_TYPE_FIELD_NAME;
private static final java.lang.String GEOM_FIELD_NAME;
private static final java.lang.String SRID_PATTERN_TEXT;
private final org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file;
private final org.apache.hadoop.fs.Path hadoopShp;
private final org.apache.hadoop.fs.Path hadoopDbf;
private final org.apache.hadoop.fs.Path hadoopPrj;
private java.io.InputStream fileReaderShp;
private java.io.InputStream fileReaderDbf;
private java.io.InputStream fileReaderPrj;
private com.esri.core.geometry.GeometryCursor geomCursor;
private org.jamel.dbf.DbfReader dbfReader;
private final org.apache.drill.exec.vector.accessor.ScalarWriter gidWriter;
private final org.apache.drill.exec.vector.accessor.ScalarWriter sridWriter;
private final org.apache.drill.exec.vector.accessor.ScalarWriter shapeTypeWriter;
private final org.apache.drill.exec.vector.accessor.ScalarWriter geomWriter;
private final org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter;
private int srid;
private com.esri.core.geometry.SpatialReference spatialReference;
public void <init>(org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator)
{
org.apache.drill.exec.record.metadata.SchemaBuilder v, v, v, v, v;
org.apache.drill.common.types.TypeProtos$MinorType v, v, v, v;
org.apache.hadoop.mapred.FileSplit v, v;
org.apache.hadoop.fs.Path v, v, v, v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip v, v, v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v, v, v, v, v;
org.apache.drill.exec.vector.accessor.ScalarWriter v, v, v, v;
java.lang.String v, v, v;
org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator v;
org.apache.drill.exec.physical.resultSet.ResultSetLoader v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator;
specialinvoke v.<java.lang.Object: void <init>()>();
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderShp> = null;
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderDbf> = null;
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj> = null;
v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.GeometryCursor geomCursor> = null;
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader> = null;
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file()>();
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.hadoop.mapred.FileSplit split()>();
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopShp> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.hadoop.mapred.FileSplit split()>();
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v = virtualinvoke v.<org.apache.hadoop.fs.Path: java.lang.String toString()>();
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<java.lang.String: java.lang.String replace(java.lang.CharSequence,java.lang.CharSequence)>(".shp", ".dbf");
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopDbf> = v;
v = new org.apache.hadoop.fs.Path;
v = virtualinvoke v.<java.lang.String: java.lang.String replace(java.lang.CharSequence,java.lang.CharSequence)>(".shp", ".prj");
specialinvoke v.<org.apache.hadoop.fs.Path: void <init>(java.lang.String)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopPrj> = v;
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void openFile(org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator)>(v);
v = new org.apache.drill.exec.record.metadata.SchemaBuilder;
specialinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: void <init>()>();
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("gid", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("srid", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("shapeType", v);
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARBINARY>;
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.SchemaBuilder addNullable(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType)>("geom", v);
v = virtualinvoke v.<org.apache.drill.exec.record.metadata.SchemaBuilder: org.apache.drill.exec.record.metadata.TupleMetadata buildSchema()>();
interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator: void tableSchema(org.apache.drill.exec.record.metadata.TupleMetadata,boolean)>(v, 0);
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator: org.apache.drill.exec.physical.resultSet.ResultSetLoader build()>();
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.ResultSetLoader: org.apache.drill.exec.physical.resultSet.RowSetLoader writer()>();
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("gid");
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter gidWriter> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("srid");
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter sridWriter> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("shapeType");
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter shapeTypeWriter> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(java.lang.String)>("geom");
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter geomWriter> = v;
return;
}
public boolean next()
{
java.lang.Object[] v;
com.esri.core.geometry.GeometryCursor v, v;
com.esri.core.geometry.Geometry v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v, v;
int v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.jamel.dbf.DbfReader v;
boolean v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
label:
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean isFull()>();
if v != 0 goto label;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader>;
v = virtualinvoke v.<org.jamel.dbf.DbfReader: java.lang.Object[] nextRecord()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.GeometryCursor geomCursor>;
v = virtualinvoke v.<com.esri.core.geometry.GeometryCursor: com.esri.core.geometry.Geometry next()>();
if v != null goto label;
return 0;
label:
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.resultSet.RowSetLoader rowWriter>;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.GeometryCursor geomCursor>;
v = virtualinvoke v.<com.esri.core.geometry.GeometryCursor: int getGeometryID()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void processShapefileSet(org.apache.drill.exec.physical.resultSet.RowSetLoader,int,com.esri.core.geometry.Geometry,java.lang.Object[])>(v, v, v, v);
goto label;
label:
return 1;
}
private void openFile(org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator)
{
byte[] v, v;
org.apache.drill.common.exceptions.UserException v;
com.esri.core.geometry.GeometryCursor v;
org.apache.drill.exec.store.dfs.DrillFileSystem v, v, v;
java.nio.ByteBuffer v;
java.util.regex.Matcher v;
org.apache.hadoop.mapred.FileSplit v, v;
org.apache.hadoop.fs.Path v, v, v, v, v;
boolean v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip v, v, v, v, v;
java.util.regex.Pattern v;
org.jamel.dbf.DbfReader v;
java.lang.Object[] v;
org.apache.drill.common.exceptions.UserException$Builder v, v, v;
java.nio.charset.Charset v;
int v, v, v, v, v, v;
com.esri.core.geometry.ShapefileReader v;
java.lang.String v, v, v;
org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator v;
org.slf4j.Logger v, v;
java.io.IOException v;
com.esri.core.geometry.SpatialReference v;
java.io.InputStream v, v, v, v, v, v, v, v, v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator;
label:
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.drill.exec.store.dfs.DrillFileSystem fileSystem()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.hadoop.mapred.FileSplit split()>();
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v = virtualinvoke v.<org.apache.drill.exec.store.dfs.DrillFileSystem: java.io.InputStream openPossiblyCompressedStream(org.apache.hadoop.fs.Path)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderShp> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderShp>;
v = virtualinvoke v.<java.io.InputStream: int available()>();
v = newarray (byte)[v];
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderShp>;
virtualinvoke v.<java.io.InputStream: int read(byte[])>(v);
v = staticinvoke <java.nio.ByteBuffer: java.nio.ByteBuffer wrap(byte[])>(v);
v = virtualinvoke v.<java.nio.ByteBuffer: int position()>();
v = v + 100;
virtualinvoke v.<java.nio.ByteBuffer: java.nio.ByteBuffer position(int)>(v);
v = new com.esri.core.geometry.ShapefileReader;
specialinvoke v.<com.esri.core.geometry.ShapefileReader: void <init>()>();
v = virtualinvoke v.<com.esri.core.geometry.ShapefileReader: com.esri.core.geometry.GeometryCursor getGeometryCursor(java.nio.ByteBuffer)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.GeometryCursor geomCursor> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.drill.exec.store.dfs.DrillFileSystem fileSystem()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopDbf>;
v = virtualinvoke v.<org.apache.drill.exec.store.dfs.DrillFileSystem: java.io.InputStream openPossiblyCompressedStream(org.apache.hadoop.fs.Path)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderDbf> = v;
v = new org.jamel.dbf.DbfReader;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderDbf>;
specialinvoke v.<org.jamel.dbf.DbfReader: void <init>(java.io.InputStream)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.drill.exec.store.dfs.DrillFileSystem fileSystem()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopPrj>;
v = virtualinvoke v.<org.apache.drill.exec.store.dfs.DrillFileSystem: java.io.InputStream openPossiblyCompressedStream(org.apache.hadoop.fs.Path)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj>;
v = virtualinvoke v.<java.io.InputStream: int available()>();
v = newarray (byte)[v];
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj>;
virtualinvoke v.<java.io.InputStream: int read(byte[])>(v);
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj>;
virtualinvoke v.<java.io.InputStream: void close()>();
v = new java.lang.String;
v = <java.nio.charset.StandardCharsets: java.nio.charset.Charset UTF_8>;
specialinvoke v.<java.lang.String: void <init>(byte[],java.nio.charset.Charset)>(v, v);
v = staticinvoke <java.util.regex.Pattern: java.util.regex.Pattern compile(java.lang.String)>("AUTHORITY\\[\"\\w+\"\\s*,\\s*\"*(\\d+)\"*\\]\\]$");
v = virtualinvoke v.<java.util.regex.Pattern: java.util.regex.Matcher matcher(java.lang.CharSequence)>(v);
v = virtualinvoke v.<java.util.regex.Matcher: boolean find()>();
if v == 0 goto label;
v = virtualinvoke v.<java.util.regex.Matcher: java.lang.String group(int)>(1);
v = staticinvoke <java.lang.Integer: int parseInt(java.lang.String)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: int srid> = v;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: int srid>;
v = staticinvoke <com.esri.core.geometry.SpatialReference: com.esri.core.geometry.SpatialReference create(int)>(v);
v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.SpatialReference spatialReference> = v;
label:
v = <org.apache.drill.exec.store.esri.ShpBatchReader: org.slf4j.Logger logger>;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.hadoop.fs.Path hadoopShp>;
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String,java.lang.Object)>("Processing Shape File: {}", v);
label:
goto label;
label:
v := @caughtexception;
v = staticinvoke <org.apache.drill.common.exceptions.UserException: org.apache.drill.common.exceptions.UserException$Builder dataReadError(java.lang.Throwable)>(v);
v = newarray (java.lang.Object)[1];
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip file>;
v = virtualinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileDescrip: org.apache.hadoop.mapred.FileSplit split()>();
v = virtualinvoke v.<org.apache.hadoop.mapred.FileSplit: org.apache.hadoop.fs.Path getPath()>();
v[0] = v;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder message(java.lang.String,java.lang.Object[])>("Failed to open open input file: %s", v);
v = interfaceinvoke v.<org.apache.drill.exec.physical.impl.scan.v.file.FileSchemaNegotiator: java.lang.String userName()>();
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException$Builder addContext(java.lang.String,java.lang.String)>("User name", v);
v = <org.apache.drill.exec.store.esri.ShpBatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<org.apache.drill.common.exceptions.UserException$Builder: org.apache.drill.common.exceptions.UserException build(org.slf4j.Logger)>(v);
throw v;
label:
return;
catch java.io.IOException from label to label with label;
}
private void processShapefileSet(org.apache.drill.exec.physical.resultSet.RowSetLoader, int, com.esri.core.geometry.Geometry, java.lang.Object[])
{
byte[] v;
java.lang.Object[] v;
com.esri.core.geometry.ogc.OGCGeometry v;
java.nio.ByteBuffer v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
org.apache.drill.exec.vector.accessor.ScalarWriter v, v, v, v;
int v, v, v;
java.lang.String v;
com.esri.core.geometry.Geometry v;
com.esri.core.geometry.Geometry$Type v;
com.esri.core.geometry.SpatialReference v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v := @parameter: int;
v := @parameter: com.esri.core.geometry.Geometry;
v := @parameter: java.lang.Object[];
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: boolean start()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter gidWriter>;
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter sridWriter>;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: int srid>;
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter shapeTypeWriter>;
v = virtualinvoke v.<com.esri.core.geometry.Geometry: com.esri.core.geometry.Geometry$Type getType()>();
v = virtualinvoke v.<com.esri.core.geometry.Geometry$Type: java.lang.String toString()>();
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: com.esri.core.geometry.SpatialReference spatialReference>;
v = staticinvoke <com.esri.core.geometry.ogc.OGCGeometry: com.esri.core.geometry.ogc.OGCGeometry createFromEsriGeometry(com.esri.core.geometry.Geometry,com.esri.core.geometry.SpatialReference)>(v, v);
v = virtualinvoke v.<com.esri.core.geometry.ogc.OGCGeometry: java.nio.ByteBuffer asBinary()>();
v = virtualinvoke v.<java.nio.ByteBuffer: byte[] array()>();
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.apache.drill.exec.vector.accessor.ScalarWriter geomWriter>;
v = lengthof v;
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setBytes(byte[],int)>(v, v);
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeDbfRow(java.lang.Object[],org.apache.drill.exec.physical.resultSet.RowSetLoader)>(v, v);
interfaceinvoke v.<org.apache.drill.exec.physical.resultSet.RowSetLoader: void save()>();
return;
}
private void writeDbfRow(java.lang.Object[], org.apache.drill.exec.physical.resultSet.RowSetLoader)
{
byte v;
int[] v;
org.jamel.dbf.structure.DbfHeader v, v;
boolean v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.jamel.dbf.DbfReader v, v;
java.lang.Object[] v;
long v;
org.apache.drill.exec.physical.resultSet.RowSetLoader v;
java.nio.charset.Charset v;
int v, v, v, v, v;
java.lang.String v, v, v, v, v, v, v, v;
double v, v;
java.lang.Object v, v, v, v, v, v;
org.jamel.dbf.structure.DbfField v;
org.jamel.dbf.structure.DbfDataType v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: java.lang.Object[];
v := @parameter: org.apache.drill.exec.physical.resultSet.RowSetLoader;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader>;
v = virtualinvoke v.<org.jamel.dbf.DbfReader: org.jamel.dbf.structure.DbfHeader getHeader()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfHeader: int getFieldsCount()>();
v = 0;
label:
if v >= v goto label;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader>;
v = virtualinvoke v.<org.jamel.dbf.DbfReader: org.jamel.dbf.structure.DbfHeader getHeader()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfHeader: org.jamel.dbf.structure.DbfField getField(int)>(v);
v = v[v];
if v == null goto label;
v = <org.apache.drill.exec.store.esri.ShpBatchReader$1: int[] $SwitchMap$org$jamel$dbf$structure$DbfDataType>;
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: org.jamel.dbf.structure.DbfDataType getDataType()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfDataType: int ordinal()>();
v = v[v];
tableswitch(v)
{
case 1: goto label;
case 2: goto label;
case 3: goto label;
case 4: goto label;
case 5: goto label;
default: goto label;
};
label:
v = v[v];
v = new java.lang.String;
v = <java.nio.charset.StandardCharsets: java.nio.charset.Charset UTF_8>;
specialinvoke v.<java.lang.String: void <init>(byte[],java.nio.charset.Charset)>(v, v);
v = virtualinvoke v.<java.lang.String: java.lang.String trim()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeStringColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,java.lang.String)>(v, v, v);
goto label;
label:
v = v[v];
v = virtualinvoke v.<java.lang.Float: double doubleValue()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeDoubleColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,double)>(v, v, v);
goto label;
label:
v = v[v];
v = virtualinvoke v.<java.util.Date: long getTime()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeTimeColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,long)>(v, v, v);
goto label;
label:
v = v[v];
v = virtualinvoke v.<java.lang.Boolean: boolean booleanValue()>();
if v == 0 goto label;
v = 1;
goto label;
label:
v = 0;
label:
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,int)>(v, v, v);
goto label;
label:
v = v[v];
v = virtualinvoke v.<java.lang.Number: double doubleValue()>();
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: int getDecimalCount()>();
if v != 0 goto label;
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,int)>(v, v, v);
goto label;
label:
v = virtualinvoke v.<org.jamel.dbf.structure.DbfField: java.lang.String getName()>();
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void writeDoubleColumn(org.apache.drill.exec.vector.accessor.TupleWriter,java.lang.String,double)>(v, v, v);
label:
v = v + 1;
goto label;
label:
return;
}
private void writeStringColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, java.lang.String)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v, v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType VARCHAR>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setString(java.lang.String)>(v);
return;
}
private void writeDoubleColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, double)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
double v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: double;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType FLOAT8>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setDouble(double)>(v);
return;
}
private void writeBooleanColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, int)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v, v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
boolean v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: int;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = 1;
if v != 0 goto label;
v = 0;
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setBoolean(boolean)>(v);
return;
}
private void writeIntColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, int)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v, v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: int;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setInt(int)>(v);
return;
}
private void writeTimeColumn(org.apache.drill.exec.vector.accessor.TupleWriter, java.lang.String, long)
{
org.apache.drill.exec.vector.accessor.TupleWriter v;
long v;
java.time.Instant v;
org.apache.drill.exec.record.metadata.TupleMetadata v;
org.apache.drill.common.types.TypeProtos$MinorType v;
org.apache.drill.exec.vector.accessor.ScalarWriter v;
org.apache.drill.common.types.TypeProtos$DataMode v;
int v, v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata v;
java.lang.String v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: org.apache.drill.exec.vector.accessor.TupleWriter;
v := @parameter: java.lang.String;
v := @parameter: long;
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.record.metadata.TupleMetadata tupleSchema()>();
v = interfaceinvoke v.<org.apache.drill.exec.record.metadata.TupleMetadata: int index(java.lang.String)>(v);
v = staticinvoke <java.time.Instant: java.time.Instant ofEpochMilli(long)>(v);
v = (int) -1;
if v != v goto label;
v = <org.apache.drill.common.types.TypeProtos$MinorType: org.apache.drill.common.types.TypeProtos$MinorType INT>;
v = <org.apache.drill.common.types.TypeProtos$DataMode: org.apache.drill.common.types.TypeProtos$DataMode OPTIONAL>;
v = staticinvoke <org.apache.drill.exec.record.metadata.MetadataUtils: org.apache.drill.exec.record.metadata.PrimitiveColumnMetadata newScalar(java.lang.String,org.apache.drill.common.types.TypeProtos$MinorType,org.apache.drill.common.types.TypeProtos$DataMode)>(v, v, v);
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: int addColumn(org.apache.drill.exec.record.metadata.ColumnMetadata)>(v);
label:
v = interfaceinvoke v.<org.apache.drill.exec.vector.accessor.TupleWriter: org.apache.drill.exec.vector.accessor.ScalarWriter scalar(int)>(v);
interfaceinvoke v.<org.apache.drill.exec.vector.accessor.ScalarWriter: void setTimestamp(java.time.Instant)>(v);
return;
}
public void close()
{
org.slf4j.Logger v;
java.lang.Exception v;
org.apache.drill.exec.store.esri.ShpBatchReader v;
java.lang.String v;
org.jamel.dbf.DbfReader v, v;
java.io.InputStream v, v, v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderShp>;
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void closeStream(java.io.InputStream,java.lang.String)>(v, "ESRI Shapefile");
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderDbf>;
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void closeStream(java.io.InputStream,java.lang.String)>(v, "DBF Shapefile");
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: java.io.InputStream fileReaderPrj>;
specialinvoke v.<org.apache.drill.exec.store.esri.ShpBatchReader: void closeStream(java.io.InputStream,java.lang.String)>(v, "PRJ Shapefile");
label:
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader>;
if v == null goto label;
v = v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader>;
virtualinvoke v.<org.jamel.dbf.DbfReader: void close()>();
label:
goto label;
label:
v := @caughtexception;
v = <org.apache.drill.exec.store.esri.ShpBatchReader: org.slf4j.Logger logger>;
v = virtualinvoke v.<java.lang.Exception: java.lang.String getMessage()>();
interfaceinvoke v.<org.slf4j.Logger: void warn(java.lang.String,java.lang.Object)>("Error when closing DBF Reader: {}", v);
label:
v.<org.apache.drill.exec.store.esri.ShpBatchReader: org.jamel.dbf.DbfReader dbfReader> = null;
return;
catch java.lang.Exception from label to label with label;
}
private void closeStream(java.io.InputStream, java.lang.String)
{
org.apache.drill.exec.store.esri.ShpBatchReader v;
java.lang.String v;
java.io.InputStream v;
v := @this: org.apache.drill.exec.store.esri.ShpBatchReader;
v := @parameter: java.io.InputStream;
v := @parameter: java.lang.String;
if v != null goto label;
return;
label:
return;
}
static void <clinit>()
{
org.slf4j.Logger v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/drill/exec/store/esri/ShpBatchReader;");
<org.apache.drill.exec.store.esri.ShpBatchReader: org.slf4j.Logger logger> = v;
return;
}
}