public class org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore extends java.lang.Object implements org.apache.parquet.column.page.PageWriteStore, org.apache.parquet.column.values.bloomfilter.BloomFilterWriteStore, java.lang.AutoCloseable
{
private static final org.slf4j.Logger LOG;
private static org.apache.parquet.format.converter.ParquetMetadataConverter parquetMetadataConverter;
private final java.util.Map writers;
private final org.apache.parquet.schema.MessageType schema;
public void <init>(org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor, org.apache.parquet.schema.MessageType, int, int, org.apache.parquet.bytes.ByteBufferAllocator, int)
{
org.apache.parquet.bytes.ByteBufferAllocator v;
org.apache.parquet.schema.MessageType v;
int v, v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor;
v := @parameter: org.apache.parquet.schema.MessageType;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.bytes.ByteBufferAllocator;
v := @parameter: int;
specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: void <init>(org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor,org.apache.parquet.schema.MessageType,int,int,org.apache.parquet.bytes.ByteBufferAllocator,int,boolean)>(v, v, v, v, v, v, 1);
return;
}
public void <init>(org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor, org.apache.parquet.schema.MessageType, int, int, org.apache.parquet.bytes.ByteBufferAllocator, int, boolean)
{
java.util.HashMap v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
java.util.Map v;
int v, v, v, v, v;
boolean v, v;
java.util.Iterator v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v;
org.apache.parquet.bytes.ByteBufferAllocator v;
org.apache.parquet.schema.MessageType v;
java.util.List v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor;
v := @parameter: org.apache.parquet.schema.MessageType;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.bytes.ByteBufferAllocator;
v := @parameter: int;
v := @parameter: boolean;
specialinvoke v.<java.lang.Object: void <init>()>();
v = new java.util.HashMap;
specialinvoke v.<java.util.HashMap: void <init>()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.schema.MessageType schema> = v;
v = virtualinvoke v.<org.apache.parquet.schema.MessageType: java.util.List getColumns()>();
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = new org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v = (int) -1;
v = (int) -1;
specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void <init>(org.apache.parquet.column.ColumnDescriptor,org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor,int,int,org.apache.parquet.bytes.ByteBufferAllocator,int,boolean,org.apache.parquet.format.BlockCipher$Encryptor,org.apache.parquet.format.BlockCipher$Encryptor,byte[],int,int)>(v, v, v, v, v, v, v, null, null, null, v, v);
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
return;
}
public void <init>(org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor, org.apache.parquet.schema.MessageType, int, int, org.apache.parquet.bytes.ByteBufferAllocator, int, boolean, org.apache.parquet.crypto.InternalFileEncryptor, int)
{
byte[] v;
org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor v;
org.apache.parquet.crypto.InternalColumnEncryptionSetup v;
java.util.Map v, v;
org.apache.parquet.format.BlockCipher$Encryptor v, v;
boolean v, v, v, v;
org.apache.parquet.schema.MessageType v;
java.util.List v, v;
org.apache.parquet.hadoop.metadata.ColumnPath v;
java.lang.String[] v;
java.util.HashMap v;
org.apache.parquet.crypto.InternalFileEncryptor v;
int v, v, v, v, v, v, v, v;
java.util.Iterator v, v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter v, v;
org.apache.parquet.bytes.ByteBufferAllocator v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v, v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor;
v := @parameter: org.apache.parquet.schema.MessageType;
v := @parameter: int;
v := @parameter: int;
v := @parameter: org.apache.parquet.bytes.ByteBufferAllocator;
v := @parameter: int;
v := @parameter: boolean;
v := @parameter: org.apache.parquet.crypto.InternalFileEncryptor;
v := @parameter: int;
specialinvoke v.<java.lang.Object: void <init>()>();
v = new java.util.HashMap;
specialinvoke v.<java.util.HashMap: void <init>()>();
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers> = v;
v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.schema.MessageType schema> = v;
if null != v goto label;
v = virtualinvoke v.<org.apache.parquet.schema.MessageType: java.util.List getColumns()>();
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = new org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
v = (int) -1;
v = (int) -1;
specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void <init>(org.apache.parquet.column.ColumnDescriptor,org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor,int,int,org.apache.parquet.bytes.ByteBufferAllocator,int,boolean,org.apache.parquet.format.BlockCipher$Encryptor,org.apache.parquet.format.BlockCipher$Encryptor,byte[],int,int)>(v, v, v, v, v, v, v, null, null, null, v, v);
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
return;
label:
v = (int) -1;
v = v;
v = virtualinvoke v.<org.apache.parquet.crypto.InternalFileEncryptor: byte[] getFileAAD()>();
v = virtualinvoke v.<org.apache.parquet.schema.MessageType: java.util.List getColumns()>();
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = v + 1;
v = null;
v = null;
v = virtualinvoke v.<org.apache.parquet.column.ColumnDescriptor: java.lang.String[] getPath()>();
v = staticinvoke <org.apache.parquet.hadoop.metadata.ColumnPath: org.apache.parquet.hadoop.metadata.ColumnPath get(java.lang.String[])>(v);
v = virtualinvoke v.<org.apache.parquet.crypto.InternalFileEncryptor: org.apache.parquet.crypto.InternalColumnEncryptionSetup getColumnSetup(org.apache.parquet.hadoop.metadata.ColumnPath,boolean,int)>(v, 1, v);
v = virtualinvoke v.<org.apache.parquet.crypto.InternalColumnEncryptionSetup: boolean isEncrypted()>();
if v == 0 goto label;
v = virtualinvoke v.<org.apache.parquet.crypto.InternalColumnEncryptionSetup: org.apache.parquet.format.BlockCipher$Encryptor getMetaDataEncryptor()>();
v = virtualinvoke v.<org.apache.parquet.crypto.InternalColumnEncryptionSetup: org.apache.parquet.format.BlockCipher$Encryptor getDataEncryptor()>();
label:
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = new org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter;
specialinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void <init>(org.apache.parquet.column.ColumnDescriptor,org.apache.parquet.compression.CompressionCodecFactory$BytesInputCompressor,int,int,org.apache.parquet.bytes.ByteBufferAllocator,int,boolean,org.apache.parquet.format.BlockCipher$Encryptor,org.apache.parquet.format.BlockCipher$Encryptor,byte[],int,int)>(v, v, v, v, v, v, v, v, v, v, v, v);
interfaceinvoke v.<java.util.Map: java.lang.Object put(java.lang.Object,java.lang.Object)>(v, v);
goto label;
label:
return;
}
public org.apache.parquet.column.page.PageWriter getPageWriter(org.apache.parquet.column.ColumnDescriptor)
{
org.apache.parquet.column.ColumnDescriptor v;
java.util.Map v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.column.ColumnDescriptor;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
return v;
}
public org.apache.parquet.column.values.bloomfilter.BloomFilterWriter getBloomFilterWriter(org.apache.parquet.column.ColumnDescriptor)
{
org.apache.parquet.column.ColumnDescriptor v;
java.util.Map v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.column.ColumnDescriptor;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
return v;
}
public void flushToFileWriter(org.apache.parquet.hadoop.ParquetFileWriter) throws java.io.IOException
{
java.util.Iterator v;
org.apache.parquet.hadoop.ParquetFileWriter v;
org.apache.parquet.schema.MessageType v;
java.util.List v;
java.util.Map v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v, v;
boolean v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v := @parameter: org.apache.parquet.hadoop.ParquetFileWriter;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.schema.MessageType schema>;
v = virtualinvoke v.<org.apache.parquet.schema.MessageType: java.util.List getColumns()>();
v = interfaceinvoke v.<java.util.List: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = interfaceinvoke v.<java.util.Map: java.lang.Object get(java.lang.Object)>(v);
virtualinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void writeToFileWriter(org.apache.parquet.hadoop.ParquetFileWriter)>(v);
goto label;
label:
return;
}
public void close()
{
java.util.Iterator v;
java.util.Collection v;
java.util.Map v;
org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore v;
java.lang.Object v;
boolean v;
v := @this: org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore;
v = v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: java.util.Map writers>;
v = interfaceinvoke v.<java.util.Map: java.util.Collection values()>();
v = interfaceinvoke v.<java.util.Collection: java.util.Iterator iterator()>();
label:
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
virtualinvoke v.<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore$ColumnChunkPageWriter: void close()>();
goto label;
label:
return;
}
static void <clinit>()
{
org.slf4j.Logger v;
org.apache.parquet.format.converter.ParquetMetadataConverter v;
v = staticinvoke <org.slf4j.LoggerFactory: org.slf4j.Logger getLogger(java.lang.Class)>(class "Lorg/apache/parquet/hadoop/ParquetColumnChunkPageWriteStore;");
<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.slf4j.Logger LOG> = v;
v = new org.apache.parquet.format.converter.ParquetMetadataConverter;
specialinvoke v.<org.apache.parquet.format.converter.ParquetMetadataConverter: void <init>()>();
<org.apache.parquet.hadoop.ParquetColumnChunkPageWriteStore: org.apache.parquet.format.converter.ParquetMetadataConverter parquetMetadataConverter> = v;
return;
}
}