final class org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter extends java.lang.Object implements org.apache.parquet.column.page.PageWriter, org.apache.parquet.column.values.bloomfilter.BloomFilterWriter, java.io.Closeable
{
private final org.apache.parquet.column.ColumnDescriptor path;
private final org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor;
private final org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream;
private final org.apache.parquet.bytes.CapacityByteArrayOutputStream buf;
private org.apache.parquet.column.page.DictionaryPage dictionaryPage;
private long uncompressedLength;
private long compressedLength;
private long totalValueCount;
private int pageCount;
private java.util.Set rlEncodings;
private java.util.Set dlEncodings;
private java.util.List dataEncodings;
private org.apache.parquet.column.values.bloomfilter.BloomFilter bloomFilter;
private org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder;
private org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder;
private org.apache.parquet.column.statistics.Statistics totalStatistics;
private final org.apache.parquet.bytes.ByteBufferAllocator allocator;
private final java.util.zip.CRC32 crc;
boolean pageWriteChecksumEnabled;
private final org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor;
private final org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor;
private final int rowGroupOrdinal;
private final int columnOrdinal;
private int pageOrdinal;
private final byte[] dataPageAAD;
private final byte[] dataPageHeaderAAD;
private final byte[] fileAAD;
private void <init>(org.apache.parquet.column.ColumnDescriptor, org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor, int, int, org.apache.parquet.bytes.ByteBufferAllocator, int, boolean, org.apache.parquet.format.BlockCipher$Encryptor, org.apache.parquet.format.BlockCipher$Encryptor, byte[], int, int)
{
org.apache.parquet.schema.PrimitiveType v;
byte[] v, v, v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v, v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder v;
java.util.ArrayList v;
java.util.HashSet v, v;
int v, v, v, v, v, v;
org.apache.parquet.format.BlockCipher$Encryptor v, v;
boolean v;
org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.bytes.ByteBufferAllocator v;
org.apache.parquet.crypto.ModuleCipherFactory$ModuleType v, v;
org.apache.parquet.column.ColumnDescriptor v;
java.util.zip.CRC32 v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.column.ColumnDescriptor;
v := @parameter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.bytes.ByteBufferAllocator;
v := @parameter: int;
v := @parameter: boolean;
v := @parameter: org.apache.parquet.format.BlockCipher$Encryptor;
v := @parameter: org.apache.parquet.format.BlockCipher$Encryptor;
v := @parameter: byte[];
v := @parameter: int;
v := @parameter: int;
specialinvoke v.<java.lang.Object: void <init>()>();
v = new java.util.HashSet;
specialinvoke v.<java.util.HashSet: void <init>()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set rlEncodings> = v;
v = new java.util.HashSet;
specialinvoke v.<java.util.HashSet: void <init>()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set dlEncodings> = v;
v = new java.util.ArrayList;
specialinvoke v.<java.util.ArrayList: void <init>()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.ColumnDescriptor path> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.ByteBufferAllocator allocator> = v;
v = new org.apache.parquet.bytes.CapacityByteArrayOutputStream;
specialinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void <init>(int,int,org.apache.parquet.bytes.ByteBufferAllocator)>(v, v, v);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream> = v;
v = new org.apache.parquet.bytes.CapacityByteArrayOutputStream;
specialinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void <init>(int,int,org.apache.parquet.bytes.ByteBufferAllocator)>(v, v, v);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf> = v;
v = virtualinvoke v.<org.apache.parquet.column.ColumnDescriptor: org.apache.parquet.schema.PrimitiveType getPrimitiveType()>();
v = staticinvoke <org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder getBuilder(org.apache.parquet.schema.PrimitiveType,int)>(v, v);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder> = v;
v = staticinvoke <org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder getBuilder()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: boolean pageWriteChecksumEnabled> = v;
if v == 0 goto label;
v = new java.util.zip.CRC32;
v = v;
specialinvoke v.<java.util.zip.CRC32: void <init>()>();
goto label;
label:
v = null;
label:
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.zip.CRC32 crc> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] fileAAD> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int rowGroupOrdinal> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int columnOrdinal> = v;
v = (int) -1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal> = v;
if null == v goto label;
v = <org.apache.parquet.crypto.ModuleCipherFactory$ModuleType: org.apache.parquet.crypto.ModuleCipherFactory$ModuleType DataPageHeader>;
v = staticinvoke <org.apache.parquet.crypto.AesCipher: byte[] createModuleAAD(byte[],org.apache.parquet.crypto.ModuleCipherFactory$ModuleType,int,int,int)>(v, v, v, v, 0);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD> = v;
goto label;
label:
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD> = null;
label:
if null == v goto label;
v = <org.apache.parquet.crypto.ModuleCipherFactory$ModuleType: org.apache.parquet.crypto.ModuleCipherFactory$ModuleType DataPage>;
v = staticinvoke <org.apache.parquet.crypto.AesCipher: byte[] createModuleAAD(byte[],org.apache.parquet.crypto.ModuleCipherFactory$ModuleType,int,int,int)>(v, v, v, v, 0);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD> = v;
goto label;
label:
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD> = null;
label:
return;
}
public void writePage(org.apache.parquet.bytes.BytesInput, int, org.apache.parquet.column.statistics.Statistics, org.apache.parquet.column.Encoding, org.apache.parquet.column.Encoding, org.apache.parquet.column.Encoding) throws java.io.IOException
{
org.apache.parquet.column.Encoding v, v, v;
org.apache.parquet.column.statistics.Statistics v;
org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder v;
org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.bytes.BytesInput v;
int v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.bytes.BytesInput;
v := @parameter: int;
v := @parameter: org.apache.parquet.column.statistics.Statistics;
v := @parameter: org.apache.parquet.column.Encoding;
v := @parameter: org.apache.parquet.column.Encoding;
v := @parameter: org.apache.parquet.column.Encoding;
v = staticinvoke <org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder getNoOpBuilder()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder> = v;
v = staticinvoke <org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder getNoOpBuilder()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder> = v;
v = (int) -1;
virtualinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void writePage(org.apache.parquet.bytes.BytesInput,int,int,org.apache.parquet.column.statistics.Statistics,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding)>(v, v, v, v, v, v, v);
return;
}
public void writePage(org.apache.parquet.bytes.BytesInput, int, int, org.apache.parquet.column.statistics.Statistics, org.apache.parquet.column.Encoding, org.apache.parquet.column.Encoding, org.apache.parquet.column.Encoding) throws java.io.IOException
{
byte[] v, v, v, v, v, v, v, v;
org.apache.parquet.column.Encoding v, v, v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
byte v, v;
org.apache.parquet.format.BlockCipher$Encryptor v, v, v, v, v;
org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder v;
java.util.Set v, v;
long v, v, v, v, v, v, v, v, v, v, v;
org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder v;
java.lang.String v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
java.util.zip.CRC32 v, v, v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v, v, v, v, v, v;
org.apache.parquet.format.converter.ParquetMetadataConverter v, v;
org.apache.parquet.bytes.BytesInput v, v, v, v;
boolean v;
org.apache.parquet.bytes.BytesInput[] v;
java.util.List v;
org.apache.parquet.io.ParquetEncodingException v, v;
int v, v, v, v, v, v, v, v, v;
org.apache.parquet.column.statistics.Statistics v, v, v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.bytes.BytesInput;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.column.statistics.Statistics;
v := @parameter: org.apache.parquet.column.Encoding;
v := @parameter: org.apache.parquet.column.Encoding;
v := @parameter: org.apache.parquet.column.Encoding;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
v = v + 1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal> = v;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v cmp 2147483647L;
if v <= 0 goto label;
v = new org.apache.parquet.io.ParquetEncodingException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (long)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Cannot write page larger than Integer.MAX_VALUE bytes: \u0001");
specialinvoke v.<org.apache.parquet.io.ParquetEncodingException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor>;
v = interfaceinvoke v.<org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor: org.apache.parquet.bytes.BytesInput compress(org.apache.parquet.bytes.BytesInput)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
if null == v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
staticinvoke <org.apache.parquet.crypto.AesCipher: void quickUpdatePageAAD(byte[],int)>(v, v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: byte[] toByteArray()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD>;
v = interfaceinvoke v.<org.apache.parquet.format.BlockCipher$Encryptor: byte[] encrypt(byte[],byte[])>(v, v);
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(byte[])>(v);
label:
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v cmp 2147483647L;
if v <= 0 goto label;
v = new org.apache.parquet.io.ParquetEncodingException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (long)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Cannot write compressed page larger than Integer.MAX_VALUE bytes: \u0001");
specialinvoke v.<org.apache.parquet.io.ParquetEncodingException: void <init>(java.lang.String)>(v);
throw v;
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void reset()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
if null == v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
staticinvoke <org.apache.parquet.crypto.AesCipher: void quickUpdatePageAAD(byte[],int)>(v, v);
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: boolean pageWriteChecksumEnabled>;
if v == 0 goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.zip.CRC32 crc>;
virtualinvoke v.<java.util.zip.CRC32: void reset()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.zip.CRC32 crc>;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: byte[] toByteArray()>();
virtualinvoke v.<java.util.zip.CRC32: void update(byte[])>(v);
v = <org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.format.converter.ParquetMetadataConverter parquetMetadataConverter>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.zip.CRC32 crc>;
v = virtualinvoke v.<java.util.zip.CRC32: long getValue()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD>;
virtualinvoke v.<org.apache.parquet.format.converter.ParquetMetadataConverter: void writeDataPageV1Header(int,int,int,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding,int,java.io.OutputStream,org.apache.parquet.format.BlockCipher$Encryptor,byte[])>(v, v, v, v, v, v, v, v, v, v);
goto label;
label:
v = <org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.format.converter.ParquetMetadataConverter parquetMetadataConverter>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD>;
virtualinvoke v.<org.apache.parquet.format.converter.ParquetMetadataConverter: void writeDataPageV1Header(int,int,int,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding,org.apache.parquet.column.Encoding,java.io.OutputStream,org.apache.parquet.format.BlockCipher$Encryptor,byte[])>(v, v, v, v, v, v, v, v, v);
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount>;
v = v + 1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
if v != null goto label;
v = virtualinvoke v.<org.apache.parquet.column.statistics.Statistics: org.apache.parquet.column.statistics.Statistics copy()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics> = v;
goto label;
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
virtualinvoke v.<org.apache.parquet.column.statistics.Statistics: void mergeStatistics(org.apache.parquet.column.statistics.Statistics)>(v);
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder>;
virtualinvoke v.<org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder: void add(org.apache.parquet.column.statistics.Statistics)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: long size()>();
v = v + v;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
virtualinvoke v.<org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder: void add(int,long)>(v, v);
v = newarray (org.apache.parquet.bytes.BytesInput)[2];
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(org.apache.parquet.bytes.CapacityByteArrayOutputStream)>(v);
v[0] = v;
v[1] = v;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput concat(org.apache.parquet.bytes.BytesInput[])>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
virtualinvoke v.<org.apache.parquet.bytes.BytesInput: void writeAllTo(java.io.OutputStream)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set rlEncodings>;
interfaceinvoke v.<java.util.Set: boolean add(java.lang.Object)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set dlEncodings>;
interfaceinvoke v.<java.util.Set: boolean add(java.lang.Object)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
return;
}
public void writePageV2(int, int, int, org.apache.parquet.bytes.BytesInput, org.apache.parquet.bytes.BytesInput, org.apache.parquet.column.Encoding, org.apache.parquet.bytes.BytesInput, org.apache.parquet.column.statistics.Statistics) throws java.io.IOException
{
byte[] v, v, v, v, v, v;
org.apache.parquet.column.Encoding v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
org.apache.parquet.format.BlockCipher$Encryptor v, v, v, v;
org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder v;
long v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v, v, v, v, v;
org.apache.parquet.format.converter.ParquetMetadataConverter v;
org.apache.parquet.bytes.BytesInput v, v, v, v, v, v;
org.apache.parquet.bytes.BytesInput[] v;
java.util.List v;
int v, v, v, v, v, v, v, v, v, v, v, v, v, v;
org.apache.parquet.column.statistics.Statistics v, v, v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: int;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.bytes.BytesInput;
v := @parameter: org.apache.parquet.bytes.BytesInput;
v := @parameter: org.apache.parquet.column.Encoding;
v := @parameter: org.apache.parquet.bytes.BytesInput;
v := @parameter: org.apache.parquet.column.statistics.Statistics;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
v = v + 1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal> = v;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v + v;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v + v;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor>;
v = interfaceinvoke v.<org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor: org.apache.parquet.bytes.BytesInput compress(org.apache.parquet.bytes.BytesInput)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
if null == v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
staticinvoke <org.apache.parquet.crypto.AesCipher: void quickUpdatePageAAD(byte[],int)>(v, v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: byte[] toByteArray()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageAAD>;
v = interfaceinvoke v.<org.apache.parquet.format.BlockCipher$Encryptor: byte[] encrypt(byte[],byte[])>(v, v);
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(byte[])>(v);
label:
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v + v;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v + v;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void reset()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
if null == v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal>;
staticinvoke <org.apache.parquet.crypto.AesCipher: void quickUpdatePageAAD(byte[],int)>(v, v);
label:
v = <org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.format.converter.ParquetMetadataConverter parquetMetadataConverter>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] dataPageHeaderAAD>;
virtualinvoke v.<org.apache.parquet.format.converter.ParquetMetadataConverter: void writeDataPageV2Header(int,int,int,int,int,org.apache.parquet.column.Encoding,int,int,java.io.OutputStream,org.apache.parquet.format.BlockCipher$Encryptor,byte[])>(v, v, v, v, v, v, v, v, v, v, v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount>;
v = v + v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount>;
v = v + 1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount> = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
if v != null goto label;
v = virtualinvoke v.<org.apache.parquet.column.statistics.Statistics: org.apache.parquet.column.statistics.Statistics copy()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics> = v;
goto label;
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
virtualinvoke v.<org.apache.parquet.column.statistics.Statistics: void mergeStatistics(org.apache.parquet.column.statistics.Statistics)>(v);
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder>;
virtualinvoke v.<org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder: void add(org.apache.parquet.column.statistics.Statistics)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: long size()>();
v = v + v;
v = specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int toIntWithCheck(long)>(v);
virtualinvoke v.<org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder: void add(int,long)>(v, v);
v = newarray (org.apache.parquet.bytes.BytesInput)[4];
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(org.apache.parquet.bytes.CapacityByteArrayOutputStream)>(v);
v[0] = v;
v[1] = v;
v[2] = v;
v[3] = v;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput concat(org.apache.parquet.bytes.BytesInput[])>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
virtualinvoke v.<org.apache.parquet.bytes.BytesInput: void writeAllTo(java.io.OutputStream)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
interfaceinvoke v.<java.util.List: boolean add(java.lang.Object)>(v);
return;
}
private int toIntWithCheck(long)
{
byte v;
org.apache.parquet.io.ParquetEncodingException v;
long v;
java.lang.String v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: long;
v = v cmp 2147483647L;
if v <= 0 goto label;
v = new org.apache.parquet.io.ParquetEncodingException;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (long)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("Cannot write page larger than 2147483647 bytes: \u0001");
specialinvoke v.<org.apache.parquet.io.ParquetEncodingException: void <init>(java.lang.String)>(v);
throw v;
label:
return v;
}
public long getMemSize()
{
long v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: long size()>();
return v;
}
public void writeToFileWriter(org.apache.parquet.hadoop.ParquetFileWriter) throws java.io.IOException
{
byte[] v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v, v;
java.lang.Integer v, v, v, v;
java.lang.Long v, v, v, v;
org.apache.parquet.format.BlockCipher$Encryptor v, v;
org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder v, v;
java.util.Set v, v, v, v, v, v;
org.apache.parquet.column.ColumnDescriptor v, v, v;
org.apache.parquet.hadoop.metadata.CompressionCodecName v, v;
java.lang.Object[] v, v;
long v, v, v, v, v, v, v, v, v, v;
org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder v, v;
java.lang.String v, v, v;
org.apache.parquet.hadoop.ParquetFileWriter v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v, v, v;
org.apache.parquet.bytes.BytesInput v, v;
boolean v;
java.util.List v, v, v, v;
org.apache.parquet.column.page.DictionaryPage v, v, v, v, v, v;
java.util.HashSet v;
org.apache.parquet.column.values.bloomfilter.BloomFilter v, v;
int v, v, v, v, v, v, v;
org.apache.parquet.column.statistics.Statistics v, v;
org.slf4j.Logger v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.hadoop.ParquetFileWriter;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
if null != v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.ColumnDescriptor path>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor>;
v = interfaceinvoke v.<org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor: org.apache.parquet.hadoop.metadata.CompressionCodecName getCodecName()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(org.apache.parquet.bytes.CapacityByteArrayOutputStream)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.values.bloomfilter.BloomFilter bloomFilter>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set rlEncodings>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set dlEncodings>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
virtualinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter: void writeColumnChunk(org.apache.parquet.column.ColumnDescriptor,long,org.apache.parquet.hadoop.metadata.CompressionCodecName,org.apache.parquet.column.page.DictionaryPage,org.apache.parquet.bytes.BytesInput,long,long,org.apache.parquet.column.statistics.Statistics,org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder,org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder,org.apache.parquet.column.values.bloomfilter.BloomFilter,java.util.Set,java.util.Set,java.util.List)>(v, v, v, v, v, v, v, v, v, v, v, v, v, v);
goto label;
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.ColumnDescriptor path>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor>;
v = interfaceinvoke v.<org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor: org.apache.parquet.hadoop.metadata.CompressionCodecName getCodecName()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(org.apache.parquet.bytes.CapacityByteArrayOutputStream)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.statistics.Statistics totalStatistics>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder columnIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder offsetIndexBuilder>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.values.bloomfilter.BloomFilter bloomFilter>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set rlEncodings>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set dlEncodings>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor headerBlockEncryptor>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int rowGroupOrdinal>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int columnOrdinal>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] fileAAD>;
virtualinvoke v.<org.apache.parquet.hadoop.ParquetFileWriter: void writeColumnChunk(org.apache.parquet.column.ColumnDescriptor,long,org.apache.parquet.hadoop.metadata.CompressionCodecName,org.apache.parquet.column.page.DictionaryPage,org.apache.parquet.bytes.BytesInput,long,long,org.apache.parquet.column.statistics.Statistics,org.apache.parquet.internal.column.columnindex.ColumnIndexBuilder,org.apache.parquet.internal.column.columnindex.OffsetIndexBuilder,org.apache.parquet.column.values.bloomfilter.BloomFilter,java.util.Set,java.util.Set,java.util.List,org.apache.parquet.format.BlockCipher$Encryptor,int,int,byte[])>(v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v, v);
label:
v = <org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.slf4j.Logger LOG>;
v = interfaceinvoke v.<org.slf4j.Logger: boolean isDebugEnabled()>();
if v == 0 goto label;
v = <org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.slf4j.Logger LOG>;
v = newarray (java.lang.Object)[7];
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: long size()>();
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[0] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.ColumnDescriptor path>;
v[1] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long totalValueCount>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[2] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long uncompressedLength>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[3] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: long compressedLength>;
v = staticinvoke <java.lang.Long: java.lang.Long valueOf(long)>(v);
v[4] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount>;
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
v[5] = v;
v = new java.util.HashSet;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
specialinvoke v.<java.util.HashSet: void <init>(java.util.Collection)>(v);
v[6] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>("written %,dB for %s: %,d values, %,dB raw, %,dB comp, %d pages, encodings: %s", v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
if v == null goto label;
v = newarray (java.lang.Object)[3];
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: int getDictionarySize()>();
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
v[0] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: int getUncompressedSize()>();
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
v[1] = v;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: int getDictionarySize()>();
v = staticinvoke <java.lang.Integer: java.lang.Integer valueOf(int)>(v);
v[2] = v;
v = staticinvoke <java.lang.String: java.lang.String format(java.lang.String,java.lang.Object[])>(", dic { %,d entries, %,dB raw, %,dB comp}", v);
goto label;
label:
v = "";
label:
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String,java.lang.String)>(v, v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u0001\u0001");
interfaceinvoke v.<org.slf4j.Logger: void debug(java.lang.String)>(v);
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set rlEncodings>;
interfaceinvoke v.<java.util.Set: void clear()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.Set dlEncodings>;
interfaceinvoke v.<java.util.Set: void clear()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: java.util.List dataEncodings>;
interfaceinvoke v.<java.util.List: void clear()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageCount> = 0;
v = (int) -1;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int pageOrdinal> = v;
return;
}
public long allocatedSize()
{
long v;
org.apache.parquet.bytes.CapacityByteArrayOutputStream v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: long size()>();
return v;
}
public void writeDictionaryPage(org.apache.parquet.column.page.DictionaryPage) throws java.io.IOException
{
byte[] v, v, v, v;
org.apache.parquet.column.Encoding v;
long v;
org.apache.parquet.column.page.DictionaryPage v, v, v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
org.apache.parquet.bytes.BytesInput v, v, v;
int v, v, v, v;
org.apache.parquet.format.BlockCipher$Encryptor v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.crypto.ModuleCipherFactory$ModuleType v;
org.apache.parquet.io.ParquetEncodingException v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.column.page.DictionaryPage;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage>;
if v == null goto label;
v = new org.apache.parquet.io.ParquetEncodingException;
specialinvoke v.<org.apache.parquet.io.ParquetEncodingException: void <init>(java.lang.String)>("Only one dictionary page is allowed");
throw v;
label:
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: org.apache.parquet.bytes.BytesInput getBytes()>();
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: long size()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor compressor>;
v = interfaceinvoke v.<org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor: org.apache.parquet.bytes.BytesInput compress(org.apache.parquet.bytes.BytesInput)>(v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
if null == v goto label;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: byte[] fileAAD>;
v = <org.apache.parquet.crypto.ModuleCipherFactory$ModuleType: org.apache.parquet.crypto.ModuleCipherFactory$ModuleType DictionaryPage>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int rowGroupOrdinal>;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: int columnOrdinal>;
v = (int) -1;
v = staticinvoke <org.apache.parquet.crypto.AesCipher: byte[] createModuleAAD(byte[],org.apache.parquet.crypto.ModuleCipherFactory$ModuleType,int,int,int)>(v, v, v, v, v);
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.format.BlockCipher$Encryptor pageBlockEncryptor>;
v = virtualinvoke v.<org.apache.parquet.bytes.BytesInput: byte[] toByteArray()>();
v = interfaceinvoke v.<org.apache.parquet.format.BlockCipher$Encryptor: byte[] encrypt(byte[],byte[])>(v, v);
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput 'from'(byte[])>(v);
label:
v = new org.apache.parquet.column.page.DictionaryPage;
v = staticinvoke <org.apache.parquet.bytes.BytesInput: org.apache.parquet.bytes.BytesInput copy(org.apache.parquet.bytes.BytesInput)>(v);
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: int getDictionarySize()>();
v = virtualinvoke v.<org.apache.parquet.column.page.DictionaryPage: org.apache.parquet.column.Encoding getEncoding()>();
specialinvoke v.<org.apache.parquet.column.page.DictionaryPage: void <init>(org.apache.parquet.bytes.BytesInput,int,int,org.apache.parquet.column.Encoding)>(v, v, v, v);
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.page.DictionaryPage dictionaryPage> = v;
return;
}
public java.lang.String memUsageString(java.lang.String)
{
org.apache.parquet.bytes.CapacityByteArrayOutputStream v;
java.lang.String v, v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: java.lang.String;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
v = dynamicinvoke "makeConcatWithConstants" <java.lang.String (java.lang.String)>(v) <java.lang.invoke.StringConcatFactory: java.lang.invoke.CallSite makeConcatWithConstants(java.lang.invoke.MethodHandles$Lookup,java.lang.String,java.lang.invoke.MethodType,java.lang.String,java.lang.Object[])>("\u ColumnChunkPageWriter");
v = virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: java.lang.String memUsageString(java.lang.String)>(v);
return v;
}
public void writeBloomFilter(org.apache.parquet.column.values.bloomfilter.BloomFilter)
{
org.apache.parquet.column.values.bloomfilter.BloomFilter v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v := @parameter: org.apache.parquet.column.values.bloomfilter.BloomFilter;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.column.values.bloomfilter.BloomFilter bloomFilter> = v;
return;
}
public void close()
{
org.apache.parquet.bytes.CapacityByteArrayOutputStream v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream tempOutputStream>;
virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void close()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: org.apache.parquet.bytes.CapacityByteArrayOutputStream buf>;
virtualinvoke v.<org.apache.parquet.bytes.CapacityByteArrayOutputStream: void close()>();
return;
}
}