Skip to content

Commit

Permalink
Merge remote-tracking branch 'apache/main' into file_sink_config
Browse files Browse the repository at this point in the history
  • Loading branch information
alamb committed Feb 11, 2025
2 parents 52253a4 + f64097f commit 3721401
Show file tree
Hide file tree
Showing 223 changed files with 2,766 additions and 981 deletions.
2 changes: 1 addition & 1 deletion .github/actions/setup-builder/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ runs:
run: |
RETRY=("ci/scripts/retry" timeout 120)
"${RETRY[@]}" apt-get update
"${RETRY[@]}" apt-get install -y protobuf-compiler
"${RETRY[@]}" apt-get install -y protobuf-compiler cmake
- name: Setup Rust toolchain
shell: bash
# rustfmt is needed for the substrait build script
Expand Down
13 changes: 11 additions & 2 deletions .github/workflows/extended.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ jobs:
with:
rust-version: stable
- name: Prepare cargo build
run: cargo check --profile ci --all-targets
run: |
cargo check --profile ci --all-targets
cargo clean
# Run extended tests (with feature 'extended_tests')
linux-test-extended:
Expand All @@ -70,6 +72,8 @@ jobs:
run: cargo test --profile ci --exclude datafusion-examples --exclude datafusion-benchmarks --workspace --lib --tests --bins --features avro,json,backtrace,extended_tests
- name: Verify Working Directory Clean
run: git diff --exit-code
- name: Cleanup
run: cargo clean

# Check answers are correct when hash values collide
hash-collisions:
Expand All @@ -90,6 +94,7 @@ jobs:
run: |
cd datafusion
cargo test --profile ci --exclude datafusion-examples --exclude datafusion-benchmarks --exclude datafusion-sqllogictest --workspace --lib --tests --features=force_hash_collisions,avro,extended_tests
cargo clean
sqllogictest-sqlite:
name: "Run sqllogictests with the sqlite test suite"
Expand All @@ -106,4 +111,8 @@ jobs:
with:
rust-version: stable
- name: Run sqllogictest
run: cargo test --profile release-nonlto --test sqllogictests -- --include-sqlite
run: |
cargo test --profile release-nonlto --test sqllogictests -- --include-sqlite
cargo clean
29 changes: 12 additions & 17 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion datafusion/catalog-listing/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ default = ["compression"]

[dependencies]
arrow = { workspace = true }
arrow-schema = { workspace = true }
async-compression = { version = "0.4.0", features = [
"bzip2",
"gzip",
Expand Down
2 changes: 1 addition & 1 deletion datafusion/catalog-listing/src/file_sink_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

use crate::write::demux::{start_demuxer_task, DemuxedStreamReceiver};
use crate::{ListingTableUrl, PartitionedFile};
use arrow_schema::{DataType, SchemaRef};
use arrow::datatypes::{DataType, SchemaRef};
use async_trait::async_trait;
use datafusion_common::Result;
use datafusion_common_runtime::SpawnedTask;
Expand Down
3 changes: 1 addition & 2 deletions datafusion/catalog-listing/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,9 @@ use datafusion_expr::{BinaryExpr, Operator};
use arrow::{
array::{Array, ArrayRef, AsArray, StringBuilder},
compute::{and, cast, prep_null_mask_filter},
datatypes::{DataType, Field, Schema},
datatypes::{DataType, Field, Fields, Schema},
record_batch::RecordBatch,
};
use arrow_schema::Fields;
use datafusion_expr::execution_props::ExecutionProps;
use futures::stream::FuturesUnordered;
use futures::{stream::BoxStream, StreamExt, TryStreamExt};
Expand Down
2 changes: 1 addition & 1 deletion datafusion/catalog-listing/src/write/demux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ use arrow::array::{
builder::UInt64Builder, cast::AsArray, downcast_dictionary_array, RecordBatch,
StringArray, StructArray,
};
use arrow_schema::{DataType, Schema};
use arrow::datatypes::{DataType, Schema};
use datafusion_common::cast::{
as_boolean_array, as_date32_array, as_date64_array, as_int32_array, as_int64_array,
as_string_array, as_string_view_array,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/catalog-listing/src/write/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use crate::file_sink_config::FileSinkConfig;
use datafusion_common::error::Result;

use arrow::array::RecordBatch;
use arrow_schema::Schema;
use arrow::datatypes::Schema;
use bytes::Bytes;
use object_store::buffered::BufWriter;
use object_store::path::Path;
Expand Down
1 change: 0 additions & 1 deletion datafusion/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ apache-avro = { version = "0.17", default-features = false, features = [
], optional = true }
arrow = { workspace = true }
arrow-ipc = { workspace = true }
arrow-schema = { workspace = true }
base64 = "0.22.1"
half = { workspace = true }
hashbrown = { workspace = true }
Expand Down
5 changes: 2 additions & 3 deletions datafusion/common/src/column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
use crate::error::{_schema_err, add_possible_columns_to_diag};
use crate::utils::{parse_identifiers_normalized, quote_identifier};
use crate::{DFSchema, Diagnostic, Result, SchemaError, Spans, TableReference};
use arrow_schema::{Field, FieldRef};
use arrow::datatypes::{Field, FieldRef};
use std::collections::HashSet;
use std::convert::Infallible;
use std::fmt;
Expand Down Expand Up @@ -373,8 +373,7 @@ impl fmt::Display for Column {
#[cfg(test)]
mod tests {
use super::*;
use arrow::datatypes::DataType;
use arrow_schema::SchemaBuilder;
use arrow::datatypes::{DataType, SchemaBuilder};
use std::sync::Arc;

fn create_qualified_schema(qualifier: &str, names: Vec<&str>) -> Result<DFSchema> {
Expand Down
12 changes: 6 additions & 6 deletions datafusion/common/src/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@ use crate::{
};

use arrow::compute::can_cast_types;
use arrow::datatypes::{DataType, Field, FieldRef, Fields, Schema, SchemaRef};
use arrow_schema::SchemaBuilder;
use arrow::datatypes::{
DataType, Field, FieldRef, Fields, Schema, SchemaBuilder, SchemaRef,
};

/// A reference-counted reference to a [DFSchema].
pub type DFSchemaRef = Arc<DFSchema>;
Expand All @@ -56,7 +57,7 @@ pub type DFSchemaRef = Arc<DFSchema>;
///
/// ```rust
/// use datafusion_common::{DFSchema, Column};
/// use arrow_schema::{DataType, Field, Schema};
/// use arrow::datatypes::{DataType, Field, Schema};
///
/// let arrow_schema = Schema::new(vec![
/// Field::new("c1", DataType::Int32, false),
Expand All @@ -77,7 +78,7 @@ pub type DFSchemaRef = Arc<DFSchema>;
///
/// ```rust
/// use datafusion_common::{DFSchema, Column};
/// use arrow_schema::{DataType, Field, Schema};
/// use arrow::datatypes::{DataType, Field, Schema};
///
/// let arrow_schema = Schema::new(vec![
/// Field::new("c1", DataType::Int32, false),
Expand All @@ -94,8 +95,7 @@ pub type DFSchemaRef = Arc<DFSchema>;
///
/// ```rust
/// use datafusion_common::DFSchema;
/// use arrow_schema::Schema;
/// use arrow::datatypes::Field;
/// use arrow::datatypes::{Schema, Field};
/// use std::collections::HashMap;
///
/// let df_schema = DFSchema::from_unqualified_fields(vec![
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/file_options/parquet_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use crate::{
DataFusionError, Result, _internal_datafusion_err,
};

use arrow_schema::Schema;
use arrow::datatypes::Schema;
// TODO: handle once deprecated
#[allow(deprecated)]
use parquet::{
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/param_value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

use crate::error::{_plan_datafusion_err, _plan_err};
use crate::{Result, ScalarValue};
use arrow_schema::DataType;
use arrow::datatypes::DataType;
use std::collections::HashMap;

/// The parameter value corresponding to the placeholder
Expand Down
Loading

0 comments on commit 3721401

Please sign in to comment.