Skip to content

Commit

Permalink
Make all Common_Spec tests pass for SQLSerever (#10810)
Browse files Browse the repository at this point in the history
* More SQL tests

* Remove duplicate

* Fix another test

* Temp fix

* More temporay fixes

* More green

* Another green

* Another one

* Fix counts

* Temporary change

* Fix tests

* Auto-commit work in progress before clean build on 2024-09-06 10:53:46

* Another fix

* Green

* Specialise ensure_query_has_no_holes

* cleanup

* Cleanup

* Cleanup

* Clean

* Code review changes

(cherry picked from commit b8516f7)
  • Loading branch information
AdRiley authored and jdunkerley committed Sep 13, 2024
1 parent cb7e2d6 commit 55690c7
Show file tree
Hide file tree
Showing 14 changed files with 148 additions and 61 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument

from Standard.Table import Aggregate_Column, Value_Type

Expand Down Expand Up @@ -28,6 +29,7 @@ import Standard.Database.SQL_Type.SQL_Type
from Standard.Database.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection

import project.Database.Redshift.Internal.Redshift_Error_Mapper.Redshift_Error_Mapper

Expand Down Expand Up @@ -217,3 +219,8 @@ type Redshift_Dialect
needs_literal_table_cast self value_type =
_ = value_type
False

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
jdbc.ensure_query_has_no_holes raw_sql
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ type Connection
table_naming_helper.verify_table_name name <|
make_table_for_name self name alias
SQL_Query.Raw_SQL raw_sql -> handle_sql_errors <| alias.if_not_error <|
self.jdbc_connection.ensure_query_has_no_holes raw_sql . if_not_error <|
self.dialect.ensure_query_has_no_holes self.jdbc_connection raw_sql . if_not_error <|
columns = self.fetch_columns raw_sql Statement_Setter.null
name = if alias == "" then (UUID.randomUUID.to_text) else alias
ctx = Context.for_query raw_sql name
Expand Down
8 changes: 8 additions & 0 deletions distribution/lib/Standard/Database/0.0.0-dev/src/Dialect.enso
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Unimplemented.Unimplemented

import Standard.Table.Internal.Problem_Builder.Problem_Builder
Expand Down Expand Up @@ -27,6 +28,7 @@ import project.SQL_Statement.SQL_Statement
import project.SQL_Type.SQL_Type
from project.Dialect_Flags import all
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Internal.JDBC_Connection import JDBC_Connection
from project.Internal.Result_Set import result_set_to_table

## PRIVATE
Expand Down Expand Up @@ -275,6 +277,12 @@ type Dialect
_ = [base_table, key_columns, resolved_aggregates, problem_builder]
Unimplemented.throw "This is an interface only."

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes jdbc:JDBC_Connection raw_sql:Text =
_ = [jdbc, raw_sql]
Unimplemented.throw "This is an interface only."

## PRIVATE

The dialect of SQLite databases.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ from project.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Internal.IR.Operation_Metadata import Date_Period_Metadata
from project.Internal.JDBC_Connection import JDBC_Connection

polyglot java import java.sql.Types

Expand Down Expand Up @@ -322,6 +323,11 @@ type Postgres_Dialect
_ = value_type
False

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
jdbc.ensure_query_has_no_holes raw_sql

## PRIVATE
make_dialect_operations =
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import project.SQL_Type.SQL_Type
from project.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Internal.JDBC_Connection import JDBC_Connection

## PRIVATE

Expand Down Expand Up @@ -325,6 +326,11 @@ type SQLite_Dialect
_ = value_type
False

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
jdbc.ensure_query_has_no_holes raw_sql

## PRIVATE
We need custom handling for First and Last, as SQLite does not support
such aggregation functions out of the box, so instead we create a row
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Standard.Base.Runtime.Context

import project.SQL_Query.SQL_Query
from project.Errors import SQL_Error, Table_Already_Exists
from project.Internal.Upload.Operations.Internal_Core import internal_create_table_structure
from project.Internal.Upload.Operations.Internal_Core import internal_create_table_structure, resolve_temp_table_name

## PRIVATE
Creates a new database table with the provided structure and returns the name
Expand All @@ -23,15 +23,16 @@ from project.Internal.Upload.Operations.Internal_Core import internal_create_tab
intercepting the 'already exists' error.
create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems:Problem_Behavior =
connection.base_connection.maybe_run_maintenance
resolved_table_name = resolve_temp_table_name connection temporary table_name
table_naming_helper = connection.base_connection.table_naming_helper
on_exists =
if allow_existing then connection.query (SQL_Query.Table_Name table_name) else Error.throw (Table_Already_Exists.Error table_name)
table_naming_helper.verify_table_name table_name <|
case connection.base_connection.table_exists table_name of
if allow_existing then connection.query (SQL_Query.Table_Name resolved_table_name) else Error.throw (Table_Already_Exists.Error resolved_table_name)
table_naming_helper.verify_table_name resolved_table_name <|
case connection.base_connection.table_exists resolved_table_name of
True -> on_exists
False ->
dry_run = Context.Output.is_enabled.not
effective_table_name = if dry_run.not then table_name else table_naming_helper.generate_dry_run_table_name table_name
effective_table_name = if dry_run.not then table_name else table_naming_helper.generate_dry_run_table_name resolved_table_name
effective_temporary = temporary || dry_run
created_table_name = Context.Output.with_enabled <|
if dry_run then
Expand All @@ -53,5 +54,5 @@ create_table_implementation connection table_name structure primary_key temporar
# If the table was just created by someone else
case dry_run of
# If this was a dry-run, we had a race condition - to ensure correct structure, we re-try the whole operation
True -> create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems
True -> create_table_implementation connection resolved_table_name structure primary_key temporary allow_existing on_problems
False -> on_exists
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,23 @@ import project.Internal.DDL_Transaction
from project.Errors import SQL_Error, Table_Already_Exists
from project.Internal.Upload.Helpers.Constants import dry_run_row_limit
from project.Internal.Upload.Helpers.Error_Helpers import handle_upload_errors
from project.Internal.Upload.Operations.Internal_Core import internal_upload_table
from project.Internal.Upload.Operations.Internal_Core import internal_upload_table, resolve_temp_table_name

## PRIVATE
select_into_table_implementation source_table connection table_name primary_key temporary on_problems:Problem_Behavior =
connection.base_connection.maybe_run_maintenance
resolved_table_name = resolve_temp_table_name connection temporary table_name
table_naming_helper = connection.base_connection.table_naming_helper
table_naming_helper.verify_table_name table_name <|
table_naming_helper.verify_table_name resolved_table_name <|
Panic.recover SQL_Error <| handle_upload_errors <|
real_target_already_exists = connection.base_connection.table_exists table_name
if real_target_already_exists then Error.throw (Table_Already_Exists.Error table_name) else
real_target_already_exists = connection.base_connection.table_exists resolved_table_name
if real_target_already_exists then Error.throw (Table_Already_Exists.Error resolved_table_name) else
dry_run = Context.Output.is_enabled.not
case dry_run of
False ->
upload_table_in_transaction source_table connection table_name primary_key temporary=temporary on_problems=on_problems row_limit=Nothing
upload_table_in_transaction source_table connection resolved_table_name primary_key temporary=temporary on_problems=on_problems row_limit=Nothing
True ->
tmp_table_name = table_naming_helper.generate_dry_run_table_name table_name
tmp_table_name = table_naming_helper.generate_dry_run_table_name resolved_table_name
create_dry_run_table _ =
table = Context.Output.with_enabled <|
## This temporary table can be safely dropped if it
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ import Standard.Database.SQL_Type.SQL_Type
from Standard.Database.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
from Standard.Database.Internal.Base_Generator import lift_binary_op
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
from Standard.Database.Internal.Statement_Setter import fill_hole_default

import project.Internal.SQLServer_Error_Mapper.SQLServer_Error_Mapper
Expand Down Expand Up @@ -283,34 +285,55 @@ type SQLSever_Dialect
Warning.attach (Inexact_Type_Coercion.Warning base_type new_type unavailable=False) new_type
_ -> base_type

## PRIVATE
needs_literal_table_cast : Value_Type -> Boolean
needs_literal_table_cast self value_type =
_ = value_type
False

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
## The jdbc driver doesn't work for asking about holes for SQLServer temp tables
We can skip this check and still get a decent error message
if raw_sql.contains "#" . not then
jdbc.ensure_query_has_no_holes raw_sql

## PRIVATE
make_dialect_operations =
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
text = [starts_with, contains, ends_with, agg_shortest, agg_longest, make_case_sensitive, ["REPLACE", replace], left, right]+concat_ops+cases+trim_ops
counts = [agg_count_is_null, agg_count_empty, agg_count_not_empty, ["COUNT_DISTINCT", agg_count_distinct], ["COUNT_DISTINCT_INCLUDE_NULL", agg_count_distinct_include_null]]
arith_extensions = [is_nan, is_inf, is_finite, floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]]
bool = [bool_or]
eq = lift_binary_op "==" make_equals
compare = [eq]

stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
special_overrides = []
special_overrides = [is_null]
other = [["RUNTIME_ERROR", make_runtime_error_op]]
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + compare + date_ops + special_overrides + other
Base_Generator.base_dialect_operations . extend_with my_mappings

## PRIVATE
is_null = Base_Generator.lift_unary_op "IS_NULL" arg->
arg.paren ++ " IS NULL"

## PRIVATE
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL)"
SQL_Builder.code "SUM(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 ELSE 0 END)"

## PRIVATE
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
SQL_Builder.code "COUNT_IF("++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " == '')"
SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 ELSE 0 END)"

## PRIVATE
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NOT NULL AND " ++ arg.paren ++ " != '')"
SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 ELSE 0 END)"


## PRIVATE
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
Expand Down Expand Up @@ -339,6 +362,14 @@ first_last_aggregators =
last_not_null = make_first_aggregator reverse=True ignore_null=True
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]

## PRIVATE
make_equals a b =
case a.build.prepare.second==[True] of
True -> b.paren
False -> case b.build.prepare.second==[True] of
True -> a.paren
False -> a.paren ++ " = " ++ b.paren

## PRIVATE
make_first_aggregator reverse ignore_null args =
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
Expand Down Expand Up @@ -430,9 +461,7 @@ right = Base_Generator.lift_binary_op "RIGHT" str-> n->

## PRIVATE
make_order_descriptor internal_column sort_direction text_ordering =
nulls = case sort_direction of
Sort_Direction.Ascending -> Nulls_Order.First
Sort_Direction.Descending -> Nulls_Order.Last
nulls = Nothing
case text_ordering of
Nothing ->
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ type SQLServer_Type_Mapping
Types.VARCHAR ->
effective_size = if sql_type.precision==max_length || (sql_type.precision==9 && sql_type.scale==9) then Nothing else sql_type.precision
Value_Type.Char size=effective_size variable_length=True
Types.NVARCHAR ->
effective_size = if sql_type.precision==max_length || (sql_type.precision==9 && sql_type.scale==9) then Nothing else sql_type.precision
Value_Type.Char size=effective_size variable_length=True
Types.DATE -> Value_Type.Date
Types.TIME -> Value_Type.Time
Types.TIMESTAMP -> case sql_type.name of
Expand All @@ -108,8 +111,12 @@ type SQLServer_Type_Mapping
the database backend.
infer_return_type : (SQL_Expression -> SQL_Type_Reference) -> Text -> Vector -> SQL_Expression -> SQL_Type_Reference
infer_return_type infer_from_database_callback op_name arguments expression =
_ = [op_name, arguments]
infer_from_database_callback expression
case operations_dict.contains_key op_name of
False -> infer_from_database_callback expression
True ->
handler = operations_dict.at op_name
sql_type = handler arguments
SQL_Type_Reference.from_constant sql_type

## PRIVATE
We want to respect any overriding references, but references that rely on
Expand Down Expand Up @@ -142,6 +149,11 @@ type SQLServer_Type_Mapping
on_unknown_type sql_type =
Value_Type.Unsupported_Data_Type sql_type.name sql_type

## PRIVATE
Maps operation names to functions that infer its result type.
operations_dict : Dictionary Text (Vector -> SQL_Type)
operations_dict = Dictionary.from_vector [["IS_NULL", const (SQL_Type.Value Types.BIT "BIT")],["==", const (SQL_Type.Value Types.BIT "BIT")]]

## PRIVATE
This is the maximum size that JDBC driver reports for 'unbounded' types in
SQLServer.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ from Standard.Table import Table
import Standard.Database.Column_Description.Column_Description
import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Internal.Common.Encoding_Limited_Naming_Properties.Encoding_Limited_Naming_Properties
import Standard.Database.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import Standard.Database.Internal.JDBC_Connection
import Standard.Database.SQL_Query.SQL_Query
Expand All @@ -31,8 +32,11 @@ type SQLServer_Connection
create : Text -> Vector -> (Text -> Text -> SQLServer_Connection) -> SQLServer_Connection
create url properties make_new =
jdbc_connection = JDBC_Connection.create url properties
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=False
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver entity_naming_properties) make_new
jdbc_entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
## jdbc reports table name length limit as 128, but it actually seems to be 116 for temp tables so we override it
limited = Encoding_Limited_Naming_Properties.Instance Encoding.utf_8 limit=116 is_case_sensitive=True
modified_entity_naming_properties = Entity_Naming_Properties.Value for_table_names=limited for_column_names=jdbc_entity_naming_properties.for_column_names for_generated_column_names=jdbc_entity_naming_properties.for_generated_column_names
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties) make_new

## PRIVATE

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ from Standard.Database.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
from Standard.Database.Internal.Statement_Setter import fill_hole_default

import project.Internal.Snowflake_Error_Mapper.Snowflake_Error_Mapper
Expand Down Expand Up @@ -305,6 +306,11 @@ type Snowflake_Dialect
Value_Type.Date_Time _ -> True
_ -> False

## PRIVATE
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
jdbc.ensure_query_has_no_holes raw_sql

## PRIVATE
In Snowflake we need to create tables outside of transactions.
However, currently we decide to opt-out of the integrity check for
Expand Down
Loading

0 comments on commit 55690c7

Please sign in to comment.