use std::cmp;
use std::collections::VecDeque;
use std::mem;
use std::rc::Rc;
use basic::{Compression, Encoding, PageType, Type};
use column::page::{CompressedPage, Page, PageWriteSpec, PageWriter};
use compression::{Codec, create_codec};
use data_type::*;
use encodings::encoding::{DictEncoder, Encoder, get_encoder};
use encodings::levels::{LevelEncoder, max_buffer_size};
use errors::{ParquetError, Result};
use file::metadata::ColumnChunkMetaData;
use file::properties::{WriterPropertiesPtr, WriterVersion};
use schema::types::ColumnDescPtr;
use util::memory::{ByteBufferPtr, MemTracker};
pub enum ColumnWriter {
BoolColumnWriter(ColumnWriterImpl<BoolType>),
Int32ColumnWriter(ColumnWriterImpl<Int32Type>),
Int64ColumnWriter(ColumnWriterImpl<Int64Type>),
Int96ColumnWriter(ColumnWriterImpl<Int96Type>),
FloatColumnWriter(ColumnWriterImpl<FloatType>),
DoubleColumnWriter(ColumnWriterImpl<DoubleType>),
ByteArrayColumnWriter(ColumnWriterImpl<ByteArrayType>),
FixedLenByteArrayColumnWriter(ColumnWriterImpl<FixedLenByteArrayType>)
}
pub fn get_column_writer(
descr: ColumnDescPtr,
props: WriterPropertiesPtr,
page_writer: Box<PageWriter>
) -> ColumnWriter {
match descr.physical_type() {
Type::BOOLEAN => ColumnWriter::BoolColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::INT32 => ColumnWriter::Int32ColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::INT64 => ColumnWriter::Int64ColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::INT96 => ColumnWriter::Int96ColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::FLOAT => ColumnWriter::FloatColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::DOUBLE => ColumnWriter::DoubleColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::BYTE_ARRAY => ColumnWriter::ByteArrayColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer)),
Type::FIXED_LEN_BYTE_ARRAY => ColumnWriter::FixedLenByteArrayColumnWriter(
ColumnWriterImpl::new(descr, props, page_writer))
}
}
pub fn get_typed_column_writer<T: DataType>(
col_writer: ColumnWriter
) -> ColumnWriterImpl<T> {
match col_writer {
ColumnWriter::BoolColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::Int32ColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::Int64ColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::Int96ColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::FloatColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::DoubleColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::ByteArrayColumnWriter(r) => unsafe { mem::transmute(r) },
ColumnWriter::FixedLenByteArrayColumnWriter(r) => unsafe { mem::transmute(r) }
}
}
pub struct ColumnWriterImpl<T: DataType> {
descr: ColumnDescPtr,
props: WriterPropertiesPtr,
page_writer: Box<PageWriter>,
has_dictionary: bool,
dict_encoder: Option<DictEncoder<T>>,
encoder: Box<Encoder<T>>,
codec: Compression,
compressor: Option<Box<Codec>>,
num_buffered_values: u32,
num_buffered_encoded_values: u32,
num_buffered_rows: u32,
total_bytes_written: u64,
total_rows_written: u64,
total_uncompressed_size: u64,
total_compressed_size: u64,
total_num_values: u64,
dictionary_page_offset: Option<u64>,
data_page_offset: Option<u64>,
def_levels_sink: Vec<i16>,
rep_levels_sink: Vec<i16>,
data_pages: VecDeque<CompressedPage>
}
impl<T: DataType> ColumnWriterImpl<T> {
pub fn new(
descr: ColumnDescPtr,
props: WriterPropertiesPtr,
page_writer: Box<PageWriter>
) -> Self {
let codec = props.compression(descr.path());
let compressor = create_codec(codec).unwrap();
let dict_encoder = if props.dictionary_enabled(descr.path()) {
Some(DictEncoder::new(descr.clone(), Rc::new(MemTracker::new())))
} else {
None
};
let has_dictionary = dict_encoder.is_some();
let fallback_encoder = get_encoder(
descr.clone(),
props.encoding(descr.path()),
Rc::new(MemTracker::new())
).unwrap();
Self {
descr: descr,
props: props,
page_writer: page_writer,
has_dictionary: has_dictionary,
dict_encoder: dict_encoder,
encoder: fallback_encoder,
codec: codec,
compressor: compressor,
num_buffered_values: 0,
num_buffered_encoded_values: 0,
num_buffered_rows: 0,
total_bytes_written: 0,
total_rows_written: 0,
total_uncompressed_size: 0,
total_compressed_size: 0,
total_num_values: 0,
dictionary_page_offset: None,
data_page_offset: None,
def_levels_sink: vec![],
rep_levels_sink: vec![],
data_pages: VecDeque::new()
}
}
pub fn write_batch(
&mut self,
values: &[T::T],
def_levels: Option<&[i16]>,
rep_levels: Option<&[i16]>
) -> Result<usize> {
let mut min_len = values.len();
if let Some(levels) = def_levels {
min_len = cmp::min(min_len, levels.len());
}
if let Some(levels) = rep_levels {
min_len = cmp::min(min_len, levels.len());
}
let write_batch_size = self.props.write_batch_size();
let num_batches = min_len / write_batch_size;
let mut values_offset = 0;
let mut levels_offset = 0;
for _ in 0..num_batches {
values_offset += self.write_mini_batch(
&values[values_offset..values_offset + write_batch_size],
def_levels.map(|lv| &lv[levels_offset..levels_offset + write_batch_size]),
rep_levels.map(|lv| &lv[levels_offset..levels_offset + write_batch_size])
)?;
levels_offset += write_batch_size;
}
values_offset += self.write_mini_batch(
&values[values_offset..],
def_levels.map(|lv| &lv[levels_offset..]),
rep_levels.map(|lv| &lv[levels_offset..])
)?;
Ok(values_offset)
}
pub fn get_total_bytes_written(&self) -> u64 {
self.total_bytes_written
}
pub fn get_total_rows_written(&self) -> u64 {
self.total_rows_written
}
pub fn close(mut self) -> Result<(u64, u64, ColumnChunkMetaData)> {
if self.dict_encoder.is_some() {
self.write_dictionary_page()?;
}
self.flush_data_pages()?;
let metadata = self.write_column_metadata()?;
self.dict_encoder = None;
self.page_writer.close()?;
Ok((self.total_bytes_written, self.total_rows_written, metadata))
}
fn write_mini_batch(
&mut self,
values: &[T::T],
def_levels: Option<&[i16]>,
rep_levels: Option<&[i16]>
) -> Result<usize> {
let num_values;
let mut values_to_write = 0;
if def_levels.is_some() && rep_levels.is_some() {
let def = def_levels.unwrap();
let rep = rep_levels.unwrap();
if def.len() != rep.len() {
return Err(general_err!(
"Inconsistent length of definition and repetition levels: {} != {}",
def.len(),
rep.len()
));
}
}
if self.descr.max_def_level() > 0 {
if def_levels.is_none() {
return Err(general_err!(
"Definition levels are required, because max definition level = {}",
self.descr.max_def_level()
));
}
let levels = def_levels.unwrap();
num_values = levels.len();
for &level in levels {
values_to_write += (level == self.descr.max_def_level()) as usize;
}
self.write_definition_levels(levels);
} else {
values_to_write = values.len();
num_values = values_to_write;
}
if self.descr.max_rep_level() > 0 {
if rep_levels.is_none() {
return Err(general_err!(
"Repetition levels are required, because max repetition level = {}",
self.descr.max_rep_level()
));
}
let levels = rep_levels.unwrap();
for &level in levels {
self.num_buffered_rows += (level == 0) as u32
}
self.write_repetition_levels(levels);
} else {
self.num_buffered_rows += num_values as u32;
}
if values.len() < values_to_write {
return Err(general_err!(
"Expected to write {} values, but have only {}",
values_to_write,
values.len()
));
}
self.write_values(&values[0..values_to_write])?;
self.num_buffered_values += num_values as u32;
self.num_buffered_encoded_values += values_to_write as u32;
if self.should_add_data_page() {
self.add_data_page()?;
}
if self.should_dict_fallback() {
self.dict_fallback()?;
}
Ok(values_to_write)
}
#[inline]
fn write_definition_levels(&mut self, def_levels: &[i16]) {
self.def_levels_sink.extend_from_slice(def_levels);
}
#[inline]
fn write_repetition_levels(&mut self, rep_levels: &[i16]) {
self.rep_levels_sink.extend_from_slice(rep_levels);
}
#[inline]
fn write_values(&mut self, values: &[T::T]) -> Result<()> {
match self.dict_encoder {
Some(ref mut encoder) => encoder.put(values),
None => self.encoder.put(values)
}
}
#[inline]
fn should_dict_fallback(&self) -> bool {
match self.dict_encoder {
Some(ref encoder) => {
encoder.dict_encoded_size() >= self.props.dictionary_pagesize_limit()
},
None => false
}
}
#[inline]
fn should_add_data_page(&self) -> bool {
self.encoder.estimated_data_encoded_size() >= self.props.data_pagesize_limit()
}
fn dict_fallback(&mut self) -> Result<()> {
self.write_dictionary_page()?;
self.flush_data_pages()?;
self.dict_encoder = None;
Ok(())
}
fn add_data_page(&mut self) -> Result<()> {
let value_bytes = match self.dict_encoder {
Some(ref mut encoder) => encoder.write_indices()?,
None => self.encoder.flush_buffer()?
};
let encoding = if self.dict_encoder.is_some() {
self.props.dictionary_data_page_encoding()
} else {
self.encoder.encoding()
};
let max_def_level = self.descr.max_def_level();
let max_rep_level = self.descr.max_rep_level();
let compressed_page = match self.props.writer_version() {
WriterVersion::PARQUET_1_0 => {
let mut buffer = vec![];
if max_rep_level > 0 {
buffer.extend_from_slice(
&self.encode_levels_v1(
Encoding::RLE, &self.rep_levels_sink[..], max_rep_level)?[..]
);
}
if max_def_level > 0 {
buffer.extend_from_slice(
&self.encode_levels_v1(
Encoding::RLE, &self.def_levels_sink[..], max_def_level)?[..]
);
}
buffer.extend_from_slice(value_bytes.data());
let uncompressed_size = buffer.len();
if let Some(ref mut cmpr) = self.compressor {
let mut compressed_buf = Vec::with_capacity(value_bytes.data().len());
cmpr.compress(&buffer[..], &mut compressed_buf)?;
buffer = compressed_buf;
}
let data_page = Page::DataPage {
buf: ByteBufferPtr::new(buffer),
num_values: self.num_buffered_values,
encoding: encoding,
def_level_encoding: Encoding::RLE,
rep_level_encoding: Encoding::RLE,
statistics: None
};
CompressedPage::new(data_page, uncompressed_size)
},
WriterVersion::PARQUET_2_0 => {
let mut rep_levels_byte_len = 0;
let mut def_levels_byte_len = 0;
let mut buffer = vec![];
if max_rep_level > 0 {
let levels = self.encode_levels_v2(&self.rep_levels_sink[..], max_rep_level)?;
rep_levels_byte_len = levels.len();
buffer.extend_from_slice(&levels[..]);
}
if max_def_level > 0 {
let levels = self.encode_levels_v2(&self.def_levels_sink[..], max_def_level)?;
def_levels_byte_len = levels.len();
buffer.extend_from_slice(&levels[..]);
}
let uncompressed_size =
rep_levels_byte_len + def_levels_byte_len + value_bytes.len();
match self.compressor {
Some(ref mut cmpr) => {
let mut compressed_buf = Vec::with_capacity(value_bytes.data().len());
cmpr.compress(value_bytes.data(), &mut compressed_buf)?;
buffer.extend_from_slice(&compressed_buf[..]);
},
None => {
buffer.extend_from_slice(value_bytes.data());
}
}
let data_page = Page::DataPageV2 {
buf: ByteBufferPtr::new(buffer),
num_values: self.num_buffered_values,
encoding: encoding,
num_nulls: self.num_buffered_values - self.num_buffered_encoded_values,
num_rows: self.num_buffered_rows,
def_levels_byte_len: def_levels_byte_len as u32,
rep_levels_byte_len: rep_levels_byte_len as u32,
is_compressed: self.compressor.is_some(),
statistics: None
};
CompressedPage::new(data_page, uncompressed_size)
}
};
if self.dict_encoder.is_some() {
self.data_pages.push_back(compressed_page);
} else {
self.write_data_page(compressed_page)?;
}
self.total_rows_written += self.num_buffered_rows as u64;
self.rep_levels_sink.clear();
self.def_levels_sink.clear();
self.num_buffered_values = 0;
self.num_buffered_encoded_values = 0;
self.num_buffered_rows = 0;
Ok(())
}
#[inline]
fn flush_data_pages(&mut self) -> Result<()> {
if self.num_buffered_values > 0 {
self.add_data_page()?;
}
while let Some(page) = self.data_pages.pop_front() {
self.write_data_page(page)?;
}
Ok(())
}
fn write_column_metadata(&mut self) -> Result<ColumnChunkMetaData> {
let total_compressed_size = self.total_compressed_size as i64;
let total_uncompressed_size = self.total_uncompressed_size as i64;
let num_values = self.total_num_values as i64;
let dict_page_offset = self.dictionary_page_offset.map(|v| v as i64);
let data_page_offset = self.data_page_offset.unwrap_or(0) as i64;
let file_offset;
let mut encodings = Vec::new();
if self.has_dictionary {
assert!(dict_page_offset.is_some(), "Dictionary offset is not set");
file_offset = dict_page_offset.unwrap() + total_compressed_size;
encodings.push(self.props.dictionary_page_encoding());
encodings.push(self.props.dictionary_data_page_encoding());
if self.dict_encoder.is_none() {
encodings.push(self.encoder.encoding());
}
} else {
file_offset = data_page_offset + total_compressed_size;
encodings.push(self.encoder.encoding());
}
encodings.push(Encoding::RLE);
let metadata = ColumnChunkMetaData::builder(self.descr.clone())
.set_compression(self.codec)
.set_encodings(encodings)
.set_file_offset(file_offset)
.set_total_compressed_size(total_compressed_size)
.set_total_uncompressed_size(total_uncompressed_size)
.set_num_values(num_values)
.set_data_page_offset(data_page_offset)
.set_dictionary_page_offset(dict_page_offset)
.build()?;
self.page_writer.write_metadata(&metadata)?;
Ok(metadata)
}
#[inline]
fn encode_levels_v1(
&self,
encoding: Encoding,
levels: &[i16],
max_level: i16
) -> Result<Vec<u8>> {
let size = max_buffer_size(encoding, max_level, levels.len());
let mut encoder = LevelEncoder::v1(encoding, max_level, vec![0; size]);
encoder.put(&levels)?;
encoder.consume()
}
#[inline]
fn encode_levels_v2(&self, levels: &[i16], max_level: i16) -> Result<Vec<u8>> {
let size = max_buffer_size(Encoding::RLE, max_level, levels.len());
let mut encoder = LevelEncoder::v2(max_level, vec![0; size]);
encoder.put(&levels)?;
encoder.consume()
}
#[inline]
fn write_data_page(&mut self, page: CompressedPage) -> Result<()> {
let page_spec = self.page_writer.write_page(page)?;
self.update_metrics_for_page(page_spec);
Ok(())
}
#[inline]
fn write_dictionary_page(&mut self) -> Result<()> {
if self.dict_encoder.is_none() {
return Err(general_err!("Dictionary encoder is not set"));
}
let compressed_page = {
let encoder = self.dict_encoder.as_ref().unwrap();
let is_sorted = encoder.is_sorted();
let num_values = encoder.num_entries();
let mut values_buf = encoder.write_dict()?;
let uncompressed_size = values_buf.len();
if let Some(ref mut cmpr) = self.compressor {
let mut output_buf = Vec::with_capacity(uncompressed_size);
cmpr.compress(values_buf.data(), &mut output_buf)?;
values_buf = ByteBufferPtr::new(output_buf);
}
let dict_page = Page::DictionaryPage {
buf: values_buf,
num_values: num_values as u32,
encoding: self.props.dictionary_page_encoding(),
is_sorted: is_sorted
};
CompressedPage::new(dict_page, uncompressed_size)
};
let page_spec = self.page_writer.write_page(compressed_page)?;
self.update_metrics_for_page(page_spec);
Ok(())
}
#[inline]
fn update_metrics_for_page(&mut self, page_spec: PageWriteSpec) {
self.total_uncompressed_size += page_spec.uncompressed_size as u64;
self.total_compressed_size += page_spec.compressed_size as u64;
self.total_num_values += page_spec.num_values as u64;
self.total_bytes_written += page_spec.bytes_written;
match page_spec.page_type {
PageType::DATA_PAGE | PageType::DATA_PAGE_V2 => {
if self.data_page_offset.is_none() {
self.data_page_offset = Some(page_spec.offset);
}
},
PageType::DICTIONARY_PAGE => {
assert!(
self.dictionary_page_offset.is_none(),
"Dictionary offset is already set"
);
self.dictionary_page_offset = Some(page_spec.offset);
},
_ => { }
}
}
fn get_page_writer_ref(&self) -> &Box<PageWriter> {
&self.page_writer
}
}
#[cfg(test)]
mod tests {
use super::*;
use rand::distributions::range::SampleRange;
use std::error::Error;
use column::page::PageReader;
use column::reader::{ColumnReaderImpl, get_column_reader, get_typed_column_reader};
use file::properties::WriterProperties;
use file::reader::SerializedPageReader;
use file::writer::SerializedPageWriter;
use schema::types::{ColumnDescriptor, ColumnPath, Type as SchemaType};
use util::io::{FileSink, FileSource};
use util::test_common::{get_temp_file, random_numbers_range};
#[test]
fn test_column_writer_inconsistent_def_rep_length() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 1, 1, props);
let res = writer.write_batch(&[1, 2, 3, 4], Some(&[1, 1, 1]), Some(&[0, 0]));
assert!(res.is_err());
if let Err(err) = res {
assert_eq!(
err.description(),
"Inconsistent length of definition and repetition levels: 3 != 2"
);
}
}
#[test]
fn test_column_writer_invalid_def_levels() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 1, 0, props);
let res = writer.write_batch(&[1, 2, 3, 4], None, None);
assert!(res.is_err());
if let Err(err) = res {
assert_eq!(
err.description(),
"Definition levels are required, because max definition level = 1"
);
}
}
#[test]
fn test_column_writer_invalid_rep_levels() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 0, 1, props);
let res = writer.write_batch(&[1, 2, 3, 4], None, None);
assert!(res.is_err());
if let Err(err) = res {
assert_eq!(
err.description(),
"Repetition levels are required, because max repetition level = 1"
);
}
}
#[test]
fn test_column_writer_not_enough_values_to_write() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 1, 0, props);
let res = writer.write_batch(&[1, 2], Some(&[1, 1, 1, 1]), None);
assert!(res.is_err());
if let Err(err) = res {
assert_eq!(err.description(), "Expected to write 4 values, but have only 2");
}
}
#[test]
#[should_panic(expected = "Dictionary offset is already set")]
fn test_column_writer_write_only_one_dictionary_page() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 0, 0, props);
writer.write_batch(&[1, 2, 3, 4], None, None).unwrap();
let res = writer.write_dictionary_page();
assert!(res.is_ok());
writer.write_dictionary_page().unwrap();
}
#[test]
fn test_column_writer_error_when_writing_disabled_dictionary() {
let page_writer = get_test_page_writer();
let props =
Rc::new(WriterProperties::builder().set_dictionary_enabled(false).build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 0, 0, props);
writer.write_batch(&[1, 2, 3, 4], None, None).unwrap();
let res = writer.write_dictionary_page();
assert!(res.is_err());
if let Err(err) = res {
assert_eq!(err.description(), "Dictionary encoder is not set");
}
}
#[test]
fn test_column_writer_check_metadata() {
let page_writer = get_test_page_writer();
let props = Rc::new(WriterProperties::builder().build());
let mut writer = get_test_column_writer::<Int32Type>(page_writer, 0, 0, props);
writer.write_batch(&[1, 2, 3, 4], None, None).unwrap();
let (bytes_written, rows_written, metadata) = writer.close().unwrap();
assert_eq!(bytes_written, 20);
assert_eq!(rows_written, 4);
assert_eq!(
metadata.encodings(),
&vec![Encoding::PLAIN, Encoding::RLE_DICTIONARY, Encoding::RLE]
);
assert_eq!(metadata.num_values(), 8);
assert_eq!(metadata.compressed_size(), 20);
assert_eq!(metadata.uncompressed_size(), 20);
assert_eq!(metadata.data_page_offset(), 0);
assert_eq!(metadata.dictionary_page_offset(), Some(0));
}
#[test]
fn test_column_writer_empty_column_roundtrip() {
let props = WriterProperties::builder().build();
column_roundtrip::<Int32Type>("test_col_writer_rnd_1", props, &[], None, None);
}
#[test]
fn test_column_writer_non_nullable_values_roundtrip() {
let props = WriterProperties::builder().build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_2", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 0, 0);
}
#[test]
fn test_column_writer_nullable_non_repeated_values_roundtrip() {
let props = WriterProperties::builder().build();
column_roundtrip_random::<Int32Type>(
"test_column_writer_nullable_non_repeated_values_roundtrip", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 0);
}
#[test]
fn test_column_writer_nullable_repeated_values_roundtrip() {
let props = WriterProperties::builder().build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_3", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
#[test]
fn test_column_writer_dictionary_fallback_small_data_page() {
let props = WriterProperties::builder()
.set_dictionary_pagesize_limit(32)
.set_data_pagesize_limit(32)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_4", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
#[test]
fn test_column_writer_small_write_batch_size() {
for i in vec![1, 2, 5, 10, 11, 1023] {
let props = WriterProperties::builder()
.set_write_batch_size(i)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_5", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
}
#[test]
fn test_column_writer_dictionary_disabled_v1() {
let props = WriterProperties::builder()
.set_writer_version(WriterVersion::PARQUET_1_0)
.set_dictionary_enabled(false)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_6", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
#[test]
fn test_column_writer_dictionary_disabled_v2() {
let props = WriterProperties::builder()
.set_writer_version(WriterVersion::PARQUET_2_0)
.set_dictionary_enabled(false)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_7", props, 1024,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
#[test]
fn test_column_writer_compression_v1() {
let props = WriterProperties::builder()
.set_writer_version(WriterVersion::PARQUET_1_0)
.set_compression(Compression::SNAPPY)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_8", props, 2048,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
#[test]
fn test_column_writer_compression_v2() {
let props = WriterProperties::builder()
.set_writer_version(WriterVersion::PARQUET_2_0)
.set_compression(Compression::SNAPPY)
.build();
column_roundtrip_random::<Int32Type>("test_col_writer_rnd_9", props, 2048,
::std::i32::MIN, ::std::i32::MAX, 10, 10);
}
fn column_roundtrip_random<'a, T: DataType>(
file_name: &'a str,
props: WriterProperties,
max_size: usize,
min_value: T::T,
max_value: T::T,
max_def_level: i16,
max_rep_level: i16
) where T::T: PartialOrd + SampleRange + Copy {
let mut num_values: usize = 0;
let mut buf: Vec<i16> = Vec::new();
let def_levels = if max_def_level > 0 {
random_numbers_range(max_size, 0, max_def_level + 1, &mut buf);
for &dl in &buf[..] {
if dl == max_def_level {
num_values += 1;
}
}
Some(&buf[..])
} else {
num_values = max_size;
None
};
let mut buf: Vec<i16> = Vec::new();
let rep_levels = if max_rep_level > 0 {
random_numbers_range(max_size, 0, max_rep_level + 1, &mut buf);
Some(&buf[..])
} else {
None
};
let mut values: Vec<T::T> = Vec::new();
random_numbers_range(num_values, min_value, max_value, &mut values);
column_roundtrip::<T>(file_name, props, &values[..], def_levels, rep_levels);
}
fn column_roundtrip<'a, T: DataType>(
file_name: &'a str,
props: WriterProperties,
values: &[T::T],
def_levels: Option<&[i16]>,
rep_levels: Option<&[i16]>
) {
let file = get_temp_file(file_name, &[]);
let sink = FileSink::new(&file);
let page_writer = Box::new(SerializedPageWriter::new(sink));
let max_def_level = match def_levels {
Some(buf) => *buf.iter().max().unwrap_or(&0i16),
None => 0i16
};
let max_rep_level = match rep_levels {
Some(buf) => *buf.iter().max().unwrap_or(&0i16),
None => 0i16
};
let mut max_batch_size = values.len();
if let Some(levels) = def_levels {
max_batch_size = cmp::max(max_batch_size, levels.len());
}
if let Some(levels) = rep_levels {
max_batch_size = cmp::max(max_batch_size, levels.len());
}
let mut writer = get_test_column_writer::<T>(
page_writer,
max_def_level,
max_rep_level,
Rc::new(props)
);
let values_written = writer.write_batch(values, def_levels, rep_levels).unwrap();
assert_eq!(values_written, values.len());
let (bytes_written, rows_written, column_metadata) = writer.close().unwrap();
let source = FileSource::new(&file, 0, bytes_written as usize);
let page_reader = Box::new(SerializedPageReader::new(
source,
column_metadata.num_values(),
column_metadata.compression(),
T::get_physical_type()
).unwrap());
let reader = get_test_column_reader::<T>(
page_reader,
max_def_level,
max_rep_level
);
let mut actual_values = vec![T::T::default(); max_batch_size];
let mut actual_def_levels = match def_levels {
Some(_) => Some(vec![0i16; max_batch_size]),
None => None
};
let mut actual_rep_levels = match rep_levels {
Some(_) => Some(vec![0i16; max_batch_size]),
None => None
};
let (values_read, levels_read) = read_fully(
reader,
max_batch_size,
actual_def_levels.as_mut(),
actual_rep_levels.as_mut(),
actual_values.as_mut_slice()
);
assert_eq!(&actual_values[..values_read], values);
match actual_def_levels {
Some(ref vec) => assert_eq!(Some(&vec[..levels_read]), def_levels),
None => assert_eq!(None, def_levels)
}
match actual_rep_levels {
Some(ref vec) => assert_eq!(Some(&vec[..levels_read]), rep_levels),
None => assert_eq!(None, rep_levels)
}
if let Some(levels) = actual_rep_levels {
let mut actual_rows_written = 0;
for l in levels {
if l == 0 {
actual_rows_written += 1;
}
}
assert_eq!(actual_rows_written, rows_written);
} else if actual_def_levels.is_some() {
assert_eq!(levels_read as u64, rows_written);
} else {
assert_eq!(values_read as u64, rows_written);
}
}
fn read_fully<T: DataType>(
mut reader: ColumnReaderImpl<T>,
batch_size: usize,
mut def_levels: Option<&mut Vec<i16>>,
mut rep_levels: Option<&mut Vec<i16>>,
values: &mut [T::T]
) -> (usize, usize) {
let actual_def_levels = match &mut def_levels {
Some(ref mut vec) => Some(&mut vec[..]),
None => None
};
let actual_rep_levels = match rep_levels {
Some(ref mut vec) => Some(&mut vec[..]),
None => None
};
reader.read_batch(batch_size, actual_def_levels, actual_rep_levels, values).unwrap()
}
fn get_test_column_writer<T: DataType>(
page_writer: Box<PageWriter>,
max_def_level: i16,
max_rep_level: i16,
props: WriterPropertiesPtr
) -> ColumnWriterImpl<T> {
let descr = Rc::new(get_test_column_descr::<T>(max_def_level, max_rep_level));
let column_writer = get_column_writer(descr, props, page_writer);
get_typed_column_writer::<T>(column_writer)
}
fn get_test_column_reader<T: DataType>(
page_reader: Box<PageReader>,
max_def_level: i16,
max_rep_level: i16
) -> ColumnReaderImpl<T> {
let descr = Rc::new(get_test_column_descr::<T>(max_def_level, max_rep_level));
let column_reader = get_column_reader(descr, page_reader);
get_typed_column_reader::<T>(column_reader)
}
fn get_test_column_descr<T: DataType>(
max_def_level: i16,
max_rep_level: i16
) -> ColumnDescriptor {
let path = ColumnPath::from("col");
let tpe = SchemaType::primitive_type_builder("col", T::get_physical_type())
.build().unwrap();
ColumnDescriptor::new(Rc::new(tpe), None, max_def_level, max_rep_level, path)
}
fn get_test_page_writer() -> Box<PageWriter> {
Box::new(TestPageWriter {})
}
struct TestPageWriter {}
impl PageWriter for TestPageWriter {
fn write_page(&mut self, page: CompressedPage) -> Result<PageWriteSpec> {
let mut res = PageWriteSpec::new();
res.page_type = page.page_type();
res.uncompressed_size = page.uncompressed_size();
res.compressed_size = page.compressed_size();
res.num_values = page.num_values();
res.offset = 0;
res.bytes_written = page.data().len() as u64;
Ok(res)
}
fn write_metadata(&mut self, _metadata: &ColumnChunkMetaData) -> Result<()> {
Ok(())
}
fn close(&mut self) -> Result<()> {
Ok(())
}
}
}