14
14
15
15
use std:: sync:: Arc ;
16
16
17
- use common_ast:: ast:: Expr as AExpr ;
18
- use common_ast:: parser:: parse_expr;
19
- use common_ast:: parser:: parser_values_with_placeholder;
20
- use common_ast:: parser:: tokenize_sql;
21
- use common_ast:: Dialect ;
22
- use common_catalog:: table_context:: StageAttachment ;
23
17
use common_catalog:: table_context:: TableContext ;
24
- use common_exception:: ErrorCode ;
25
- use common_exception:: Result ;
26
- use common_expression:: types:: DataType ;
27
- use common_expression:: types:: NumberDataType ;
28
- use common_expression:: types:: NumberScalar ;
29
- use common_expression:: BlockEntry ;
30
- use common_expression:: DataBlock ;
31
- use common_expression:: DataField ;
32
- use common_expression:: DataSchema ;
33
- use common_expression:: DataSchemaRef ;
34
- use common_expression:: Expr ;
35
- use common_expression:: Scalar ;
36
- use common_expression:: Value ;
37
18
use common_meta_app:: principal:: StageInfo ;
38
- use common_pipeline_transforms:: processors:: transforms:: Transform ;
39
- use common_sql:: binder:: wrap_cast;
40
- use common_sql:: evaluator:: BlockOperator ;
41
- use common_sql:: evaluator:: CompoundBlockOperator ;
42
- use common_sql:: plans:: FunctionCall ;
43
- use common_sql:: BindContext ;
44
- use common_sql:: Metadata ;
45
- use common_sql:: MetadataRef ;
46
- use common_sql:: NameResolutionContext ;
47
- use common_sql:: ScalarBinder ;
48
- use common_sql:: ScalarExpr ;
49
19
use common_storage:: StageFileInfo ;
50
20
use common_storages_fuse:: io:: Files ;
51
21
use common_storages_stage:: StageTable ;
52
- use parking_lot:: RwLock ;
53
22
use tracing:: error;
54
23
55
24
use crate :: sessions:: QueryContext ;
56
25
57
- #[ async_backtrace:: framed]
58
- pub async fn prepared_values (
59
- ctx : & Arc < QueryContext > ,
60
- source_schema : & DataSchemaRef ,
61
- attachment : & Arc < StageAttachment > ,
62
- ) -> Result < ( DataSchemaRef , Vec < Scalar > ) > {
63
- let settings = ctx. get_settings ( ) ;
64
- let sql_dialect = settings. get_sql_dialect ( ) ?;
65
- let tokens = tokenize_sql ( attachment. values_str . as_str ( ) ) ?;
66
- let expr_or_placeholders = parser_values_with_placeholder ( & tokens, sql_dialect) ?;
67
-
68
- if source_schema. num_fields ( ) != expr_or_placeholders. len ( ) {
69
- return Err ( ErrorCode :: SemanticError ( format ! (
70
- "need {} fields in values, got only {}" ,
71
- source_schema. num_fields( ) ,
72
- expr_or_placeholders. len( )
73
- ) ) ) ;
74
- }
75
-
76
- let mut attachment_fields = vec ! [ ] ;
77
- let mut const_fields = vec ! [ ] ;
78
- let mut exprs = vec ! [ ] ;
79
- for ( i, eo) in expr_or_placeholders. into_iter ( ) . enumerate ( ) {
80
- match eo {
81
- Some ( e) => {
82
- exprs. push ( e) ;
83
- const_fields. push ( source_schema. fields ( ) [ i] . clone ( ) ) ;
84
- }
85
- None => attachment_fields. push ( source_schema. fields ( ) [ i] . clone ( ) ) ,
86
- }
87
- }
88
- let name_resolution_ctx = NameResolutionContext :: try_from ( settings. as_ref ( ) ) ?;
89
- let mut bind_context = BindContext :: new ( ) ;
90
- let metadata = Arc :: new ( RwLock :: new ( Metadata :: default ( ) ) ) ;
91
- let const_schema = Arc :: new ( DataSchema :: new ( const_fields) ) ;
92
- let const_values = exprs_to_scalar (
93
- exprs,
94
- & const_schema,
95
- ctx. clone ( ) ,
96
- & name_resolution_ctx,
97
- & mut bind_context,
98
- metadata,
99
- )
100
- . await ?;
101
- Ok ( ( Arc :: new ( DataSchema :: new ( attachment_fields) ) , const_values) )
102
- }
103
-
104
26
#[ async_backtrace:: framed]
105
27
pub async fn try_purge_files (
106
28
ctx : Arc < QueryContext > ,
@@ -125,142 +47,3 @@ pub async fn try_purge_files(
125
47
}
126
48
}
127
49
}
128
-
129
- pub async fn exprs_to_scalar (
130
- exprs : Vec < AExpr > ,
131
- schema : & DataSchemaRef ,
132
- ctx : Arc < dyn TableContext > ,
133
- name_resolution_ctx : & NameResolutionContext ,
134
- bind_context : & mut BindContext ,
135
- metadata : MetadataRef ,
136
- ) -> Result < Vec < Scalar > > {
137
- let schema_fields_len = schema. fields ( ) . len ( ) ;
138
- if exprs. len ( ) != schema_fields_len {
139
- return Err ( ErrorCode :: TableSchemaMismatch ( format ! (
140
- "Table columns count is not match, expect {schema_fields_len}, input: {}, expr: {:?}" ,
141
- exprs. len( ) ,
142
- exprs
143
- ) ) ) ;
144
- }
145
- let mut scalar_binder = ScalarBinder :: new (
146
- bind_context,
147
- ctx. clone ( ) ,
148
- name_resolution_ctx,
149
- metadata. clone ( ) ,
150
- & [ ] ,
151
- ) ;
152
-
153
- let mut map_exprs = Vec :: with_capacity ( exprs. len ( ) ) ;
154
- for ( i, expr) in exprs. iter ( ) . enumerate ( ) {
155
- // `DEFAULT` in insert values will be parsed as `Expr::ColumnRef`.
156
- if let AExpr :: ColumnRef { column, .. } = expr {
157
- if column. name . eq_ignore_ascii_case ( "default" ) {
158
- let field = schema. field ( i) ;
159
- fill_default_value ( & mut scalar_binder, & mut map_exprs, field, schema) . await ?;
160
- continue ;
161
- }
162
- }
163
-
164
- let ( mut scalar, data_type) = scalar_binder. bind ( expr) . await ?;
165
- let field_data_type = schema. field ( i) . data_type ( ) ;
166
- scalar = if field_data_type. remove_nullable ( ) == DataType :: Variant {
167
- match data_type. remove_nullable ( ) {
168
- DataType :: Boolean
169
- | DataType :: Number ( _)
170
- | DataType :: Decimal ( _)
171
- | DataType :: Timestamp
172
- | DataType :: Date
173
- | DataType :: Bitmap
174
- | DataType :: Variant => wrap_cast ( & scalar, field_data_type) ,
175
- DataType :: String => {
176
- // parse string to JSON value
177
- ScalarExpr :: FunctionCall ( FunctionCall {
178
- span : None ,
179
- func_name : "parse_json" . to_string ( ) ,
180
- params : vec ! [ ] ,
181
- arguments : vec ! [ scalar] ,
182
- } )
183
- }
184
- _ => {
185
- if data_type == DataType :: Null && field_data_type. is_nullable ( ) {
186
- scalar
187
- } else {
188
- return Err ( ErrorCode :: BadBytes ( format ! (
189
- "unable to cast type `{}` to type `{}`" ,
190
- data_type, field_data_type
191
- ) ) ) ;
192
- }
193
- }
194
- }
195
- } else {
196
- wrap_cast ( & scalar, field_data_type)
197
- } ;
198
- let expr = scalar
199
- . as_expr ( ) ?
200
- . project_column_ref ( |col| schema. index_of ( & col. index . to_string ( ) ) . unwrap ( ) ) ;
201
- map_exprs. push ( expr) ;
202
- }
203
-
204
- let mut operators = Vec :: with_capacity ( schema_fields_len) ;
205
- operators. push ( BlockOperator :: Map { exprs : map_exprs } ) ;
206
-
207
- let one_row_chunk = DataBlock :: new (
208
- vec ! [ BlockEntry {
209
- data_type: DataType :: Number ( NumberDataType :: UInt8 ) ,
210
- value: Value :: Scalar ( Scalar :: Number ( NumberScalar :: UInt8 ( 1 ) ) ) ,
211
- } ] ,
212
- 1 ,
213
- ) ;
214
- let func_ctx = ctx. get_function_context ( ) ?;
215
- let mut expression_transform = CompoundBlockOperator {
216
- operators,
217
- ctx : func_ctx,
218
- } ;
219
- let res = expression_transform. transform ( one_row_chunk) ?;
220
- let scalars: Vec < Scalar > = res
221
- . columns ( )
222
- . iter ( )
223
- . skip ( 1 )
224
- . map ( |col| unsafe { col. value . as_ref ( ) . index_unchecked ( 0 ) . to_owned ( ) } )
225
- . collect ( ) ;
226
- Ok ( scalars)
227
- }
228
-
229
- pub async fn fill_default_value (
230
- binder : & mut ScalarBinder < ' _ > ,
231
- map_exprs : & mut Vec < Expr > ,
232
- field : & DataField ,
233
- schema : & DataSchema ,
234
- ) -> Result < ( ) > {
235
- if let Some ( default_expr) = field. default_expr ( ) {
236
- let tokens = tokenize_sql ( default_expr) ?;
237
- let ast = parse_expr ( & tokens, Dialect :: PostgreSQL ) ?;
238
- let ( mut scalar, _) = binder. bind ( & ast) . await ?;
239
- scalar = wrap_cast ( & scalar, field. data_type ( ) ) ;
240
-
241
- let expr = scalar
242
- . as_expr ( ) ?
243
- . project_column_ref ( |col| schema. index_of ( & col. index . to_string ( ) ) . unwrap ( ) ) ;
244
- map_exprs. push ( expr) ;
245
- } else {
246
- // If field data type is nullable, then we'll fill it with null.
247
- if field. data_type ( ) . is_nullable ( ) {
248
- let expr = Expr :: Constant {
249
- span : None ,
250
- scalar : Scalar :: Null ,
251
- data_type : field. data_type ( ) . clone ( ) ,
252
- } ;
253
- map_exprs. push ( expr) ;
254
- } else {
255
- let data_type = field. data_type ( ) . clone ( ) ;
256
- let default_value = Scalar :: default_value ( & data_type) ;
257
- let expr = Expr :: Constant {
258
- span : None ,
259
- scalar : default_value,
260
- data_type,
261
- } ;
262
- map_exprs. push ( expr) ;
263
- }
264
- }
265
- Ok ( ( ) )
266
- }
0 commit comments