Skip to content

Commit c4935fc

Browse files
authored
Merge branch 'main' into warehouse_rbac
2 parents 02cd206 + 6850110 commit c4935fc

File tree

13 files changed

+303
-42
lines changed

13 files changed

+303
-42
lines changed

.github/actions/pack_binaries/action.yml

+8
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,14 @@ runs:
2323
category: ${{ inputs.category }}
2424
path: distro/bin
2525
artifacts: metactl,meta,query,query.debug
26+
- name: Download BendSQL
27+
shell: bash
28+
env:
29+
GH_TOKEN: ${{ github.token }}
30+
run: |
31+
verison=$(gh release list --repo databendlabs/bendsql | head -n 1 | awk '{print $1}')
32+
curl -sSLfo /tmp/bendsql.tar.gz https://github.com/databendlabs/bendsql/releases/download/${verison}/bendsql-${verison}-${{ inputs.target }}.tar.gz
33+
tar -xzvf /tmp/bendsql.tar.gz -C distro/bin
2634
- name: Pack Binaries
2735
id: pack_binaries
2836
shell: bash

Cargo.lock

+3-5
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+3-3
Original file line numberDiff line numberDiff line change
@@ -369,10 +369,10 @@ num-derive = "0.3.3"
369369
num-traits = "0.2.19"
370370
num_cpus = "1.13.1"
371371
object = "0.36.5"
372-
object_store_opendal = { version = "0.49.0" }
372+
object_store_opendal = { version = "0.49.0", package = "object_store_opendal", git = "https://github.com/apache/opendal", rev = "78b6a9f" }
373373
once_cell = "1.15.0"
374374
openai_api_rust = "0.1"
375-
opendal = { version = "0.51.1", features = [
375+
opendal = { version = "0.51.1", package = "opendal", git = "https://github.com/apache/opendal", rev = "78b6a9f", features = [
376376
"layers-fastrace",
377377
"layers-prometheus-client",
378378
"layers-async-backtrace",
@@ -639,7 +639,7 @@ deltalake = { git = "https://github.com/delta-io/delta-rs", rev = "3038c145" }
639639
ethnum = { git = "https://github.com/datafuse-extras/ethnum-rs", rev = "4cb05f1" }
640640
openai_api_rust = { git = "https://github.com/datafuse-extras/openai-api", rev = "819a0ed" }
641641
openraft = { git = "https://github.com/databendlabs/openraft", tag = "v0.10.0-alpha.7" }
642-
orc-rust = { git = "https://github.com/datafusion-contrib/orc-rust", rev = "dfb1ede" }
642+
orc-rust = { git = "https://github.com/youngsofun/orc-rust", rev = "6c5ac57" }
643643
recursive = { git = "https://github.com/datafuse-extras/recursive.git", rev = "6af35a1" }
644644
sled = { git = "https://github.com/datafuse-extras/sled", tag = "v0.34.7-datafuse.1" }
645645
tantivy = { git = "https://github.com/datafuse-extras/tantivy", rev = "7502370" }

src/query/service/src/pipelines/pipeline_builder.rs

+7-5
Original file line numberDiff line numberDiff line change
@@ -94,11 +94,13 @@ impl PipelineBuilder {
9494
}
9595

9696
// unload spill metas
97-
self.main_pipeline
98-
.set_on_finished(always_callback(move |_info: &ExecutionInfo| {
99-
self.ctx.unload_spill_meta();
100-
Ok(())
101-
}));
97+
if !self.ctx.mark_unload_callbacked() {
98+
self.main_pipeline
99+
.set_on_finished(always_callback(move |_info: &ExecutionInfo| {
100+
self.ctx.unload_spill_meta();
101+
Ok(())
102+
}));
103+
}
102104

103105
Ok(PipelineBuildResult {
104106
main_pipeline: self.main_pipeline,

src/query/service/src/sessions/query_ctx.rs

+9
Original file line numberDiff line numberDiff line change
@@ -322,6 +322,9 @@ impl QueryContext {
322322

323323
pub fn update_init_query_id(&self, id: String) {
324324
self.shared.spilled_files.write().clear();
325+
self.shared
326+
.unload_callbacked
327+
.store(false, Ordering::Release);
325328
self.shared.cluster_spill_progress.write().clear();
326329
*self.shared.init_query_id.write() = id;
327330
}
@@ -471,6 +474,12 @@ impl QueryContext {
471474
Ok(table)
472475
}
473476

477+
pub fn mark_unload_callbacked(&self) -> bool {
478+
self.shared
479+
.unload_callbacked
480+
.fetch_or(true, Ordering::SeqCst)
481+
}
482+
474483
pub fn unload_spill_meta(&self) {
475484
const SPILL_META_SUFFIX: &str = ".list";
476485
let r = self.shared.spilled_files.read();

src/query/service/src/sessions/query_ctx_shared.rs

+2
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,7 @@ pub struct QueryContextShared {
150150
pub(in crate::sessions) cluster_spill_progress: Arc<RwLock<HashMap<String, SpillProgress>>>,
151151
pub(in crate::sessions) spilled_files:
152152
Arc<RwLock<HashMap<crate::spillers::Location, crate::spillers::Layout>>>,
153+
pub(in crate::sessions) unload_callbacked: AtomicBool,
153154
}
154155

155156
impl QueryContextShared {
@@ -209,6 +210,7 @@ impl QueryContextShared {
209210

210211
cluster_spill_progress: Default::default(),
211212
spilled_files: Default::default(),
213+
unload_callbacked: AtomicBool::new(false),
212214
warehouse_cache: Arc::new(RwLock::new(None)),
213215
}))
214216
}

src/query/sql/src/planner/binder/copy_into_table.rs

+5-8
Original file line numberDiff line numberDiff line change
@@ -182,15 +182,12 @@ impl Binder {
182182
files: stmt.files.clone(),
183183
pattern,
184184
};
185-
let required_values_schema: DataSchemaRef = Arc::new(
186-
match &stmt.dst_columns {
187-
Some(cols) => self.schema_project(&table.schema(), cols)?,
188-
None => self.schema_project(&table.schema(), &[])?,
189-
}
190-
.into(),
191-
);
185+
let stage_schema = match &stmt.dst_columns {
186+
Some(cols) => self.schema_project(&table.schema(), cols)?,
187+
None => self.schema_project(&table.schema(), &[])?,
188+
};
192189

193-
let stage_schema = infer_table_schema(&required_values_schema)?;
190+
let required_values_schema: DataSchemaRef = Arc::new(stage_schema.clone().into());
194191

195192
let default_values = if stage_info.file_format_params.need_field_default() {
196193
Some(

src/query/sql/src/planner/expression_parser.rs

+24-1
Original file line numberDiff line numberDiff line change
@@ -395,7 +395,30 @@ pub fn parse_cluster_keys(
395395
table_meta: Arc<dyn Table>,
396396
ast_exprs: Vec<AExpr>,
397397
) -> Result<Vec<Expr>> {
398-
let exprs = parse_ast_exprs(ctx, table_meta, ast_exprs)?;
398+
let schema = table_meta.schema();
399+
let (mut bind_context, metadata) = bind_table(table_meta)?;
400+
let settings = ctx.get_settings();
401+
let name_resolution_ctx = NameResolutionContext::try_from(settings.as_ref())?;
402+
let mut type_checker = TypeChecker::try_create(
403+
&mut bind_context,
404+
ctx,
405+
&name_resolution_ctx,
406+
metadata,
407+
&[],
408+
false,
409+
)?;
410+
411+
let exprs: Vec<Expr> = ast_exprs
412+
.iter()
413+
.map(|ast| {
414+
let (scalar, _) = *type_checker.resolve(ast)?;
415+
let expr = scalar
416+
.as_expr()?
417+
.project_column_ref(|col| schema.index_of(&col.column_name).unwrap());
418+
Ok(expr)
419+
})
420+
.collect::<Result<_>>()?;
421+
399422
let mut res = Vec::with_capacity(exprs.len());
400423
for expr in exprs {
401424
let inner_type = expr.data_type().remove_nullable();

0 commit comments

Comments
 (0)