Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: bump datafusion #894

Merged
merged 7 commits into from
May 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
234 changes: 123 additions & 111 deletions Cargo.lock

Large diffs are not rendered by default.

16 changes: 10 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ name = "ceresdb-server"
path = "src/bin/ceresdb-server.rs"

[workspace.dependencies]
arrow = { version = "36.0.0", features = ["prettyprint"] }
arrow_ipc = { version = "36.0.0" }
arrow = { version = "38.0.0", features = ["prettyprint"] }
arrow_ipc = { version = "38.0.0" }
arrow_ext = { path = "components/arrow_ext" }
analytic_engine = { path = "analytic_engine" }
arena = { path = "components/arena" }
Expand All @@ -76,8 +76,8 @@ cluster = { path = "cluster" }
criterion = "0.3"
common_types = { path = "common_types" }
common_util = { path = "common_util" }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev = "b87871fdd1f4ce64201eb1f7c79a0547627f37e9" }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev = "b87871fdd1f4ce64201eb1f7c79a0547627f37e9" }
datafusion = { git = "https://github.com/apache/arrow-datafusion.git", rev = "06e9f53637f20dd91bef43b74942ec36c38c22d5" }
datafusion-proto = { git = "https://github.com/apache/arrow-datafusion.git", rev = "06e9f53637f20dd91bef43b74942ec36c38c22d5" }
df_operator = { path = "df_operator" }
etcd-client = "0.10.3"
env_logger = "0.6"
Expand All @@ -89,13 +89,17 @@ lazy_static = "1.4.0"
log = "0.4"
logger = { path = "components/logger" }
lru = "0.7.6"
influxql-logical-planner = { git = "https://github.com/CeresDB/influxql", package = "iox_query_influxql" }
influxql-parser = { git = "https://github.com/CeresDB/influxql", package = "influxdb_influxql_parser" }
influxql-query = { git = "https://github.com/CeresDB/influxql", package = "iox_query" }
influxql-schema = { git = "https://github.com/CeresDB/influxql", package = "schema" }
interpreters = { path = "interpreters" }
itertools = "0.10.5"
meta_client = { path = "meta_client" }
object_store = { path = "components/object_store" }
partition_table_engine = { path = "partition_table_engine" }
parquet_ext = { path = "components/parquet_ext" }
parquet = { version = "36.0.0" }
parquet = { version = "38.0.0" }
paste = "1.0"
pin-project-lite = "0.2.8"
profile = { path = "components/profile" }
Expand All @@ -117,7 +121,7 @@ smallvec = "1.6"
slog = "2.7"
spin = "0.9.6"
query_frontend = { path = "query_frontend" }
sqlparser = { version = "0.32", features = ["serde"] }
sqlparser = { version = "0.33", features = ["serde"] }
system_catalog = { path = "system_catalog" }
table_engine = { path = "table_engine" }
table_kv = { path = "components/table_kv" }
Expand Down
19 changes: 11 additions & 8 deletions analytic_engine/src/sst/parquet/encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ impl HybridRecordDecoder {
.iter()
.map(|f| {
if let DataType::List(nested_field) = f.data_type() {
Field::new(f.name(), nested_field.data_type().clone(), true)
Arc::new(Field::new(f.name(), nested_field.data_type().clone(), true))
} else {
f.clone()
}
Expand Down Expand Up @@ -554,9 +554,10 @@ impl HybridRecordDecoder {
assert_eq!(array_ref.len() + 1, value_offsets.len());

let values_num = *value_offsets.last().unwrap() as usize;
let offset_slices = array_ref.data().buffers()[0].as_slice();
let value_slices = array_ref.data().buffers()[1].as_slice();
let nulls = array_ref.data().nulls();
let array_data = array_ref.to_data();
let offset_slices = array_data.buffers()[0].as_slice();
let value_slices = array_data.buffers()[1].as_slice();
let nulls = array_data.nulls();
trace!(
"raw buffer slice, offsets:{:#02x?}, values:{:#02x?}",
offset_slices,
Expand Down Expand Up @@ -630,8 +631,9 @@ impl HybridRecordDecoder {
assert!(!value_offsets.is_empty());

let values_num = *value_offsets.last().unwrap() as usize;
let old_values_buffer = array_ref.data().buffers()[0].as_slice();
let old_nulls = array_ref.data().nulls();
let array_data = array_ref.to_data();
let old_values_buffer = array_data.buffers()[0].as_slice();
let old_nulls = array_data.nulls();

let mut new_values_buffer = MutableBuffer::new(value_size * values_num);
let mut new_null_buffer = hybrid::new_ones_buffer(values_num);
Expand Down Expand Up @@ -683,7 +685,8 @@ impl RecordDecoder for HybridRecordDecoder {
let mut value_offsets = None;
// Find value offsets from the first col in collapsible_cols_idx.
if let Some(idx) = self.collapsible_cols_idx.first() {
let offset_slices = arrays[*idx as usize].data().buffers()[0].as_slice();
let array_data = arrays[*idx as usize].to_data();
let offset_slices = array_data.buffers()[0].as_slice();
value_offsets = Some(Self::get_array_offsets(offset_slices));
} else {
CollapsibleColsIdxEmpty.fail()?;
Expand All @@ -703,7 +706,7 @@ impl RecordDecoder for HybridRecordDecoder {
// are collapsed by hybrid storage format, to differentiate
// List column in original records
DataType::List(_nested_field) => {
Ok(make_array(array_ref.data().child_data()[0].clone()))
Ok(make_array(array_ref.to_data().child_data()[0].clone()))
}
_ => {
let datum_kind = DatumKind::from_data_type(data_type).unwrap();
Expand Down
16 changes: 8 additions & 8 deletions analytic_engine/src/sst/parquet/hybrid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,12 @@ impl ArrayHandle {
}

// Note: this require primitive array
fn data_slice(&self) -> &[u8] {
self.array.data().buffers()[0].as_slice()
fn data_slice(&self) -> Vec<u8> {
self.array.to_data().buffers()[0].as_slice().to_vec()
}

fn nulls(&self) -> Option<&NullBuffer> {
self.array.data().nulls()
self.array.nulls()
}
}

Expand Down Expand Up @@ -122,12 +122,12 @@ pub fn build_hybrid_arrow_schema(schema: &Schema) -> ArrowSchemaRef {
.enumerate()
.map(|(idx, field)| {
if schema.is_collapsible_column(idx) {
let field_type = DataType::List(Box::new(Field::new(
let field_type = DataType::List(Arc::new(Field::new(
LIST_ITEM_NAME,
field.data_type().clone(),
true,
)));
Field::new(field.name(), field_type, true)
Arc::new(Field::new(field.name(), field_type, true))
} else {
field.clone()
}
Expand Down Expand Up @@ -418,7 +418,7 @@ impl ListArrayBuilder {
let array_len = self.multi_row_arrays.len();
let mut offsets = MutableBuffer::new(array_len * std::mem::size_of::<i32>());
let child_data = self.build_child_data(&mut offsets)?;
let field = Box::new(Field::new(
let field = Arc::new(Field::new(
LIST_ITEM_NAME,
self.datum_kind.to_arrow_data_type(),
true,
Expand Down Expand Up @@ -731,14 +731,14 @@ mod tests {
let string_data =
string_array(vec![Some("bb"), None, Some("ccc"), Some("eeee"), Some("a")]);
let offsets: [i32; 3] = [0, 4, 5];
let array_data = ArrayData::builder(DataType::List(Box::new(Field::new(
let array_data = ArrayData::builder(DataType::List(Arc::new(Field::new(
LIST_ITEM_NAME,
DataType::Utf8,
true,
))))
.len(2)
.add_buffer(Buffer::from_slice_ref(offsets))
.add_child_data(string_data.data().to_owned())
.add_child_data(string_data.to_data())
.build()
.unwrap();
let expected = ListArray::from(array_data);
Expand Down
6 changes: 3 additions & 3 deletions common_types/src/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ macro_rules! impl_from_array_and_slice {
// the underlying vector of [arrow::buffer::Buffer] and Bitmap (also
// holds a Buffer), thus require some allocation. However, the Buffer is
// managed by Arc, so cloning the buffer is not too expensive.
let array_data = array_ref.data().clone();
let array_data = array_ref.into_data();
let array = $ArrayType::from(array_data);

Self(array)
Expand All @@ -356,7 +356,7 @@ macro_rules! impl_from_array_and_slice {
impl $Column {
fn to_arrow_array(&self) -> $ArrayType {
// Clone the array data.
let array_data = self.0.data().clone();
let array_data = self.0.clone().into_data();
$ArrayType::from(array_data)
}

Expand All @@ -367,7 +367,7 @@ macro_rules! impl_from_array_and_slice {
fn slice(&self, offset: usize, length: usize) -> Self {
let array_slice = self.0.slice(offset, length);
// Clone the slice data.
let array_data = array_slice.data().clone();
let array_data = array_slice.into_data();
let array = $ArrayType::from(array_data);

Self(array)
Expand Down
6 changes: 3 additions & 3 deletions common_types/src/column_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

//! Schema of column

use std::{collections::HashMap, convert::TryFrom, str::FromStr};
use std::{collections::HashMap, convert::TryFrom, str::FromStr, sync::Arc};

use arrow::datatypes::{DataType, Field};
use ceresdbproto::schema as schema_pb;
Expand Down Expand Up @@ -280,10 +280,10 @@ impl TryFrom<schema_pb::ColumnSchema> for ColumnSchema {
}
}

impl TryFrom<&Field> for ColumnSchema {
impl TryFrom<&Arc<Field>> for ColumnSchema {
type Error = Error;

fn try_from(field: &Field) -> Result<Self> {
fn try_from(field: &Arc<Field>) -> Result<Self> {
let ArrowFieldMeta {
id,
is_tag,
Expand Down
2 changes: 1 addition & 1 deletion common_types/src/datum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -977,7 +977,7 @@ pub mod arrow_convert {
| DataType::LargeBinary
| DataType::FixedSizeBinary(_)
| DataType::Struct(_)
| DataType::Union(_, _, _)
| DataType::Union(_, _)
| DataType::List(_)
| DataType::LargeList(_)
| DataType::FixedSizeList(_, _)
Expand Down
2 changes: 1 addition & 1 deletion common_types/src/record_batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ fn cast_arrow_record_batch(source: ArrowRecordBatch) -> Result<ArrowRecordBatch>
})
.collect::<Vec<_>>();
let mills_schema = Schema {
fields: mills_fileds,
fields: mills_fileds.into(),
metadata: schema.metadata().clone(),
};
let result =
Expand Down
8 changes: 6 additions & 2 deletions common_types/src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ impl RecordSchema {
.columns
.iter()
.map(|col| col.to_arrow_field())
.collect();
.collect::<Vec<_>>();
// Build arrow schema.
let arrow_schema = Arc::new(ArrowSchema::new_with_metadata(
fields,
Expand Down Expand Up @@ -1222,7 +1222,11 @@ impl Builder {
);
}

let fields = self.columns.iter().map(|c| c.to_arrow_field()).collect();
let fields = self
.columns
.iter()
.map(|c| c.to_arrow_field())
.collect::<Vec<_>>();
let meta = self.build_arrow_schema_meta();

Ok(Schema {
Expand Down
2 changes: 1 addition & 1 deletion components/parquet_ext/src/prune/min_max.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ mod test {
let fields = fields
.into_iter()
.map(|(name, data_type)| ArrowField::new(name, data_type, false))
.collect();
.collect::<Vec<_>>();
Arc::new(ArrowSchema::new(fields))
}

Expand Down
13 changes: 8 additions & 5 deletions integration_tests/build_meta.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@

set -exo

if [ -d ceresmeta ]; then
SRC=/tmp/ceresmeta-src
TARGET=$(pwd)/ceresmeta

if [ -d $SRC ]; then
echo "Remove old meta..."
rm -r ceresmeta
rm -rf $SRC
fi

git clone --depth 1 https://github.com/ceresdb/ceresmeta.git
cd ceresmeta
go build -o ceresmeta ./cmd/meta/...
git clone --depth 1 https://github.com/ceresdb/ceresmeta.git ${SRC}
cd ${SRC}
go build -o ${TARGET}/ceresmeta ./cmd/meta/...
6 changes: 4 additions & 2 deletions integration_tests/cases/common/dummy/select_1.result
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Int64(1),

SELECT x;

Failed to execute query, err: Server(ServerError { code: 500, msg: "Failed to create plan, query: SELECT x;. Caused by: Failed to create plan, err:Failed to generate datafusion plan, err:Schema error: No field named \"x\"." })
Failed to execute query, err: Server(ServerError { code: 500, msg: "Failed to create plan, query: SELECT x;. Caused by: Failed to create plan, err:Failed to generate datafusion plan, err:Schema error: No field named x." })

SELECT 'a';

Expand All @@ -22,7 +22,9 @@ Boolean(false),

SELECT NOT(1);

Failed to execute query, err: Server(ServerError { code: 500, msg: "Failed to execute interpreter, sql: SELECT NOT(1);. Caused by: Failed to execute select, err:Failed to execute logical plan, err:Failed to collect record batch stream, err:Stream error, msg:convert from arrow record batch, err:Internal error: NOT 'Literal { value: Int64(1) }' can't be evaluated because the expression's type is Int64, not boolean or NULL. This was likely caused by a bug in DataFusion's code and we would welcome that you file an bug report in our issue tracker" })
NOT Int64(1),
Int64(-2),


SELECT TRUE;

Expand Down
6 changes: 3 additions & 3 deletions proxy/src/influxdb/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,7 @@ pub(crate) fn convert_influxql_output(output: Output) -> Result<InfluxqlResponse
mod tests {
use std::sync::Arc;

use arrow::datatypes::{Field as ArrowField, Schema as ArrowSchema};
use arrow::datatypes::{Field as ArrowField, Fields, Schema as ArrowSchema};
use common_types::{
column::{ColumnBlock, ColumnBlockBuilder},
column_schema,
Expand Down Expand Up @@ -796,14 +796,14 @@ mod tests {
false,
);
let project_fields = vec![
measurement_field,
Arc::new(measurement_field),
fields[1].clone(),
fields[0].clone(),
fields[2].clone(),
fields[3].clone(),
];
let project_arrow_schema = Arc::new(ArrowSchema::new_with_metadata(
project_fields,
Fields::from(project_fields),
arrow_schema.metadata().clone(),
));

Expand Down
2 changes: 1 addition & 1 deletion query_engine/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ common_util = { workspace = true }
datafusion = { workspace = true }
df_operator = { workspace = true }
futures = { workspace = true }
iox_query = { git = "https://github.com/CeresDB/influxql" }
influxql-query = { workspace = true }
log = { workspace = true }
query_frontend = { workspace = true }
serde = { workspace = true }
Expand Down
Loading