Skip to content

Commit

Permalink
Minor: Small comment changes in sql folder (#12838)
Browse files Browse the repository at this point in the history
* Small changes in datafusion/sql folder

* more small changes
  • Loading branch information
jonathanc-n authored Oct 10, 2024
1 parent 7093bbd commit f7591fb
Show file tree
Hide file tree
Showing 18 changed files with 100 additions and 100 deletions.
4 changes: 2 additions & 2 deletions datafusion/sql/src/cte.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
};

// Each recursive CTE consists from two parts in the logical plan:
// 1. A static term (the left hand side on the SQL, where the
// Each recursive CTE consists of two parts in the logical plan:
// 1. A static term (the left-hand side on the SQL, where the
// referencing to the same CTE is not allowed)
//
// 2. A recursive term (the right hand side, and the recursive
Expand Down
10 changes: 5 additions & 5 deletions datafusion/sql/src/expr/function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
}

// user-defined function (UDF) should have precedence
// User-defined function (UDF) should have precedence
if let Some(fm) = self.context_provider.get_function_meta(&name) {
let args = self.function_args_to_expr(args, schema, planner_context)?;
return Ok(Expr::ScalarFunction(ScalarFunction::new_udf(fm, args)));
Expand All @@ -260,12 +260,12 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
);
}

// then, window function
// Then, window function
if let Some(WindowType::WindowSpec(window)) = over {
let partition_by = window
.partition_by
.into_iter()
// ignore window spec PARTITION BY for scalar values
// Ignore window spec PARTITION BY for scalar values
// as they do not change and thus do not generate new partitions
.filter(|e| !matches!(e, sqlparser::ast::Expr::Value { .. },))
.map(|e| self.sql_expr_to_logical_expr(e, schema, planner_context))
Expand Down Expand Up @@ -383,7 +383,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
&self,
name: &str,
) -> Result<WindowFunctionDefinition> {
// check udaf first
// Check udaf first
let udaf = self.context_provider.get_aggregate_meta(name);
// Use the builtin window function instead of the user-defined aggregate function
if udaf.as_ref().is_some_and(|udaf| {
Expand Down Expand Up @@ -434,7 +434,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}),
FunctionArg::Unnamed(FunctionArgExpr::QualifiedWildcard(object_name)) => {
let qualifier = self.object_name_to_table_reference(object_name)?;
// sanity check on qualifier with schema
// Sanity check on qualifier with schema
let qualified_indices = schema.fields_indices_with_qualified(&qualifier);
if qualified_indices.is_empty() {
return plan_err!("Invalid qualifier {qualifier}");
Expand Down
28 changes: 14 additions & 14 deletions datafusion/sql/src/expr/identifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,9 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {

let search_result = search_dfschema(&ids, schema);
match search_result {
// found matching field with spare identifier(s) for nested field(s) in structure
// Found matching field with spare identifier(s) for nested field(s) in structure
Some((field, qualifier, nested_names)) if !nested_names.is_empty() => {
// found matching field with spare identifier(s) for nested field(s) in structure
// Found matching field with spare identifier(s) for nested field(s) in structure
for planner in self.context_provider.get_expr_planners() {
if let Ok(planner_result) = planner.plan_compound_identifier(
field,
Expand All @@ -134,21 +134,21 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
plan_err!("could not parse compound identifier from {ids:?}")
}
// found matching field with no spare identifier(s)
// Found matching field with no spare identifier(s)
Some((field, qualifier, _nested_names)) => {
Ok(Expr::Column(Column::from((qualifier, field))))
}
None => {
// return default where use all identifiers to not have a nested field
// Return default where use all identifiers to not have a nested field
// this len check is because at 5 identifiers will have to have a nested field
if ids.len() == 5 {
not_impl_err!("compound identifier: {ids:?}")
} else {
// check the outer_query_schema and try to find a match
// Check the outer_query_schema and try to find a match
if let Some(outer) = planner_context.outer_query_schema() {
let search_result = search_dfschema(&ids, outer);
match search_result {
// found matching field with spare identifier(s) for nested field(s) in structure
// Found matching field with spare identifier(s) for nested field(s) in structure
Some((field, qualifier, nested_names))
if !nested_names.is_empty() =>
{
Expand All @@ -158,15 +158,15 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
Column::from((qualifier, field)).quoted_flat_name()
)
}
// found matching field with no spare identifier(s)
// Found matching field with no spare identifier(s)
Some((field, qualifier, _nested_names)) => {
// found an exact match on a qualified name in the outer plan schema, so this is an outer reference column
// Found an exact match on a qualified name in the outer plan schema, so this is an outer reference column
Ok(Expr::OuterReferenceColumn(
field.data_type().clone(),
Column::from((qualifier, field)),
))
}
// found no matching field, will return a default
// Found no matching field, will return a default
None => {
let s = &ids[0..ids.len()];
// safe unwrap as s can never be empty or exceed the bounds
Expand All @@ -177,7 +177,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
} else {
let s = &ids[0..ids.len()];
// safe unwrap as s can never be empty or exceed the bounds
// Safe unwrap as s can never be empty or exceed the bounds
let (relation, column_name) = form_identifier(s).unwrap();
Ok(Expr::Column(Column::new(relation, column_name)))
}
Expand Down Expand Up @@ -311,15 +311,15 @@ fn search_dfschema<'ids, 'schema>(
fn generate_schema_search_terms(
ids: &[String],
) -> impl Iterator<Item = (Option<TableReference>, &String, &[String])> {
// take at most 4 identifiers to form a Column to search with
// Take at most 4 identifiers to form a Column to search with
// - 1 for the column name
// - 0 to 3 for the TableReference
let bound = ids.len().min(4);
// search terms from most specific to least specific
// Search terms from most specific to least specific
(0..bound).rev().map(|i| {
let nested_names_index = i + 1;
let qualifier_and_column = &ids[0..nested_names_index];
// safe unwrap as qualifier_and_column can never be empty or exceed the bounds
// Safe unwrap as qualifier_and_column can never be empty or exceed the bounds
let (relation, column_name) = form_identifier(qualifier_and_column).unwrap();
(relation, column_name, &ids[nested_names_index..])
})
Expand All @@ -331,7 +331,7 @@ mod test {

#[test]
// testing according to documentation of generate_schema_search_terms function
// where ensure generated search terms are in correct order with correct values
// where it ensures generated search terms are in correct order with correct values
fn test_generate_schema_search_terms() -> Result<()> {
type ExpectedItem = (
Option<TableReference>,
Expand Down
2 changes: 1 addition & 1 deletion datafusion/sql/src/expr/order_by.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
expr_vec.push(Sort::new(
expr,
asc,
// when asc is true, by default nulls last to be consistent with postgres
// When asc is true, by default nulls last to be consistent with postgres
// postgres rule: https://www.postgresql.org/docs/current/queries-order.html
nulls_first.unwrap_or(!asc),
))
Expand Down
4 changes: 2 additions & 2 deletions datafusion/sql/src/expr/unary_op.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
}
UnaryOperator::Minus => {
match expr {
// optimization: if it's a number literal, we apply the negative operator
// Optimization: if it's a number literal, we apply the negative operator
// here directly to calculate the new literal.
SQLExpr::Value(Value::Number(n, _)) => {
self.parse_sql_number(&n, true)
}
SQLExpr::Interval(interval) => {
self.sql_interval_to_expr(true, interval)
}
// not a literal, apply negative operator on expression
// Not a literal, apply negative operator on expression
_ => Ok(Expr::Negative(Box::new(self.sql_expr_to_logical_expr(
expr,
schema,
Expand Down
10 changes: 5 additions & 5 deletions datafusion/sql/src/expr/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
let value = interval_literal(*interval.value, negative)?;

// leading_field really means the unit if specified
// for example, "month" in `INTERVAL '5' month`
// For example, "month" in `INTERVAL '5' month`
let value = match interval.leading_field.as_ref() {
Some(leading_field) => format!("{value} {leading_field}"),
None => value,
Expand Down Expand Up @@ -323,9 +323,9 @@ const fn try_decode_hex_char(c: u8) -> Option<u8> {
fn parse_decimal_128(unsigned_number: &str, negative: bool) -> Result<Expr> {
// remove leading zeroes
let trimmed = unsigned_number.trim_start_matches('0');
// parse precision and scale, remove decimal point if exists
// Parse precision and scale, remove decimal point if exists
let (precision, scale, replaced_str) = if trimmed == "." {
// special cases for numbers such as “0.”, “000.”, and so on.
// Special cases for numbers such as “0.”, “000.”, and so on.
(1, 0, Cow::Borrowed("0"))
} else if let Some(i) = trimmed.find('.') {
(
Expand All @@ -334,7 +334,7 @@ fn parse_decimal_128(unsigned_number: &str, negative: bool) -> Result<Expr> {
Cow::Owned(trimmed.replace('.', "")),
)
} else {
// no decimal point, keep as is
// No decimal point, keep as is
(trimmed.len(), 0, Cow::Borrowed(trimmed))
};

Expand All @@ -344,7 +344,7 @@ fn parse_decimal_128(unsigned_number: &str, negative: bool) -> Result<Expr> {
)))
})?;

// check precision overflow
// Check precision overflow
if precision as u8 > DECIMAL128_MAX_PRECISION {
return Err(DataFusionError::from(ParserError(format!(
"Cannot parse {replaced_str} as i128 when building decimal: precision overflow"
Expand Down
6 changes: 3 additions & 3 deletions datafusion/sql/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -761,10 +761,10 @@ impl<'a> DFParser<'a> {
// Note that mixing both names and definitions is not allowed
let peeked = self.parser.peek_nth_token(2);
if peeked == Token::Comma || peeked == Token::RParen {
// list of column names
// List of column names
builder.table_partition_cols = Some(self.parse_partitions()?)
} else {
// list of column defs
// List of column defs
let (cols, cons) = self.parse_columns()?;
builder.table_partition_cols = Some(
cols.iter().map(|col| col.name.to_string()).collect(),
Expand Down Expand Up @@ -850,7 +850,7 @@ impl<'a> DFParser<'a> {
options.push((key, value));
let comma = self.parser.consume_token(&Token::Comma);
if self.parser.consume_token(&Token::RParen) {
// allow a trailing comma, even though it's not in standard
// Allow a trailing comma, even though it's not in standard
break;
} else if !comma {
return self.expected(
Expand Down
22 changes: 11 additions & 11 deletions datafusion/sql/src/planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ impl ValueNormalizer {
/// CTEs, Views, subqueries and PREPARE statements. The states include
/// Common Table Expression (CTE) provided with WITH clause and
/// Parameter Data Types provided with PREPARE statement and the query schema of the
/// outer query plan
/// outer query plan.
///
/// # Cloning
///
Expand Down Expand Up @@ -166,12 +166,12 @@ impl PlannerContext {
self
}

// return a reference to the outer queries schema
// Return a reference to the outer query's schema
pub fn outer_query_schema(&self) -> Option<&DFSchema> {
self.outer_query_schema.as_ref().map(|s| s.as_ref())
}

/// sets the outer query schema, returning the existing one, if
/// Sets the outer query schema, returning the existing one, if
/// any
pub fn set_outer_query_schema(
&mut self,
Expand All @@ -181,12 +181,12 @@ impl PlannerContext {
schema
}

// return a clone of the outer FROM schema
// Return a clone of the outer FROM schema
pub fn outer_from_schema(&self) -> Option<Arc<DFSchema>> {
self.outer_from_schema.clone()
}

/// sets the outer FROM schema, returning the existing one, if any
/// Sets the outer FROM schema, returning the existing one, if any
pub fn set_outer_from_schema(
&mut self,
mut schema: Option<DFSchemaRef>,
Expand All @@ -195,7 +195,7 @@ impl PlannerContext {
schema
}

/// extends the FROM schema, returning the existing one, if any
/// Extends the FROM schema, returning the existing one, if any
pub fn extend_outer_from_schema(&mut self, schema: &DFSchemaRef) -> Result<()> {
match self.outer_from_schema.as_mut() {
Some(from_schema) => Arc::make_mut(from_schema).merge(schema),
Expand All @@ -209,7 +209,7 @@ impl PlannerContext {
&self.prepare_param_data_types
}

/// returns true if there is a Common Table Expression (CTE) /
/// Returns true if there is a Common Table Expression (CTE) /
/// Subquery for the specified name
pub fn contains_cte(&self, cte_name: &str) -> bool {
self.ctes.contains_key(cte_name)
Expand Down Expand Up @@ -520,9 +520,9 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
| SQLDataType::CharVarying(_)
| SQLDataType::CharacterLargeObject(_)
| SQLDataType::CharLargeObject(_)
// precision is not supported
// Precision is not supported
| SQLDataType::Timestamp(Some(_), _)
// precision is not supported
// Precision is not supported
| SQLDataType::Time(Some(_), _)
| SQLDataType::Dec(_)
| SQLDataType::BigNumeric(_)
Expand Down Expand Up @@ -586,7 +586,7 @@ pub fn object_name_to_table_reference(
object_name: ObjectName,
enable_normalization: bool,
) -> Result<TableReference> {
// use destructure to make it clear no fields on ObjectName are ignored
// Use destructure to make it clear no fields on ObjectName are ignored
let ObjectName(idents) = object_name;
idents_to_table_reference(idents, enable_normalization)
}
Expand All @@ -597,7 +597,7 @@ pub(crate) fn idents_to_table_reference(
enable_normalization: bool,
) -> Result<TableReference> {
struct IdentTaker(Vec<Ident>);
/// take the next identifier from the back of idents, panic'ing if
/// Take the next identifier from the back of idents, panic'ing if
/// there are none left
impl IdentTaker {
fn take(&mut self, enable_normalization: bool) -> String {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/sql/src/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ fn convert_usize_with_check(n: i64, arg_name: &str) -> Result<usize> {
/// Returns the order by expressions from the query.
fn to_order_by_exprs(order_by: Option<OrderBy>) -> Result<Vec<OrderByExpr>> {
let Some(OrderBy { exprs, interpolate }) = order_by else {
// if no order by, return an empty array
// If no order by, return an empty array.
return Ok(vec![]);
};
if let Some(_interpolate) = interpolate {
Expand Down
4 changes: 2 additions & 2 deletions datafusion/sql/src/relation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
.build()?;
(plan, alias)
} else {
// normalize name and alias
// Normalize name and alias
let table_ref = self.object_name_to_table_reference(name)?;
let table_name = table_ref.to_string();
let cte = planner_context.get_cte(&table_name);
Expand Down Expand Up @@ -163,7 +163,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
subquery: TableFactor,
planner_context: &mut PlannerContext,
) -> Result<LogicalPlan> {
// At this point for a syntacitally valid query the outer_from_schema is
// At this point for a syntactically valid query the outer_from_schema is
// guaranteed to be set, so the `.unwrap()` call will never panic. This
// is the case because we only call this method for lateral table
// factors, and those can never be the first factor in a FROM list. This
Expand Down
Loading

0 comments on commit f7591fb

Please sign in to comment.