diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 6fb01efcb3d1..bf5c88ae95a7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -547,8 +547,9 @@ private _make_table_for_name connection name alias internal_temporary_keep_alive warning. We do not want to fail, as we do want to allow the user to access any table already present in the database. DB_Table_Module.make_table connection name columns ctx on_problems=Problem_Behavior.Report_Warning - result.catch SQL_Error _-> + if result.is_error then Error.throw (Table_Not_Found.Error name) + result ## PRIVATE private _check_statement_is_allowed connection stmt = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso index 6b24578cae4f..2bce8fb31751 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso @@ -96,7 +96,7 @@ type DB_Column Converts this column into a single-column table. to_table : DB_Table to_table self = - DB_Table.Value self.name self.connection [self.as_internal] self.context + DB_Table.new self.name self.connection [self.as_internal] self.context ## ALIAS column type, field info, metadata GROUP Standard.Base.Metadata diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso index 67ed8c188ab7..3e190cf8899f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso @@ -68,6 +68,7 @@ import project.Internal.IR.Query.Query import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind import project.Internal.SQL_Type_Reference.SQL_Type_Reference +import project.Internal.Type_Refinements.DB_Table_Refinements import project.SQL_Query.SQL_Query import project.SQL_Statement.SQL_Statement import project.SQL_Type.SQL_Type @@ -80,9 +81,8 @@ polyglot java import java.util.UUID ## Represents a column-oriented table data structure backed by a database. type DB_Table - ## PRIVATE - - Represents a column-oriented table data structure backed by a database. + ## Internal constructor that should not be used directly. + Please use `DB_Table.new` instead. Arguments: - internal_name: The name of the table. @@ -91,6 +91,14 @@ type DB_Table - context: The context associated with this table. private Value internal_name:Text connection:(Connection | Any) (internal_columns:(Vector Internal_Column)) context:Context + ## The internal constructor used to construct a DB_Table instance. + + It can perform some additional operations, like refining the type, + so it should always be preferred over calling `DB_Table.Value` directly. + private new (internal_name:Text) connection:(Connection | Any) (internal_columns:(Vector Internal_Column)) context:Context -> DB_Table = + DB_Table_Refinements.refine_table <| + DB_Table.Value internal_name connection internal_columns context + ## GROUP Standard.Base.Metadata ICON metadata The name of the table. @@ -1648,7 +1656,7 @@ type DB_Table problem_builder.attach_problems_before on_problems <| new_from = From_Spec.Join sql_join_kind left_setup.subquery right_setup.subquery on_expressions new_ctx = Context.for_subquery new_from . set_where_filters where_expressions - DB_Table.Value new_table_name self.connection result_columns new_ctx + DB_Table.new new_table_name self.connection result_columns new_ctx ## ALIAS append, cartesian join GROUP Standard.Base.Calculations @@ -2067,7 +2075,7 @@ type DB_Table input_column = Internal_Column.Value name (infer_return_type expression) expression dialect.adapt_unified_column input_column result_type infer_return_type - DB_Table.Value union_alias self.connection new_columns new_ctx + DB_Table.new union_alias self.connection new_columns new_ctx ## ALIAS average, count, count distinct, first, group by, last, longest, maximum, mean, median, minimum, mode, percentile, shortest, standard deviation, sum, summarize, variance GROUP Standard.Base.Calculations @@ -2805,7 +2813,7 @@ type DB_Table Arguments: - columns: The columns with which to update this table. updated_columns : Vector Internal_Column -> DB_Table - updated_columns self internal_columns = DB_Table.Value self.name self.connection internal_columns self.context + updated_columns self internal_columns = DB_Table.new self.name self.connection internal_columns self.context ## PRIVATE @@ -2814,7 +2822,7 @@ type DB_Table Arguments: - ctx: The new context for this table. updated_context : Context -> DB_Table - updated_context self ctx = DB_Table.Value self.name self.connection self.internal_columns ctx + updated_context self ctx = DB_Table.new self.name self.connection self.internal_columns ctx ## PRIVATE @@ -2838,9 +2846,9 @@ type DB_Table setup = ctx.as_subquery self.name [internal_columns] new_ctx = Context.for_subquery setup.subquery new_columns = setup.new_columns.first - DB_Table.Value self.name self.connection new_columns new_ctx + DB_Table.new self.name self.connection new_columns new_ctx False -> - DB_Table.Value self.name self.connection internal_columns ctx + DB_Table.new self.name self.connection internal_columns ctx ## PRIVATE Nests a table as a subquery, using `updated_context_and_columns`, which @@ -2926,10 +2934,11 @@ type DB_Table - `Auto_Detect`: The file format is determined by the provided file. - `Bytes` and `Plain_Text`: The Table does not support these types in - the `write` function. If passed as format, an - `Illegal_Argument` is raised. To write out the table as plain - text, the user needs to call the `Text.from Table` method and then - use the `Text.write` function. + the `write` function. If passed as format, an + `Illegal_Argument` is raised. To write out the table as plain + text, the user needs to convert the Table to Text + (e.g. using `to_delimited` method) and then use the `Text.write` + function. > Example Write a database table to a CSV file. @@ -3216,7 +3225,7 @@ make_table connection table_name columns ctx on_problems = problem_builder.report_unique_name_strategy column_names_validator # We do not want to stop the table from being fetched, so we report the issues as warnings. problem_builder.attach_problems_before on_problems <| - DB_Table.Value table_name connection cols ctx + DB_Table.new table_name connection cols ctx ## PRIVATE By default, join on the first column, unless it's a cross join, in which @@ -3276,7 +3285,7 @@ make_literal_table connection column_vectors column_names alias = if needs_cast.not then base_column else connection.dialect.make_cast base_column sql_type infer_type_from_database - DB_Table.Value alias connection internal_columns context + DB_Table.new alias connection internal_columns context ## PRIVATE Many_Files_List.from (that : DB_Table) = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso index aa7cd603d1cb..0356d29a61eb 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso @@ -57,7 +57,7 @@ build_lookup_query base_table lookup_table key_columns add_new_columns allow_unm new_ctx_with_invariant_check = new_ctx.add_where_filters [make_invariant_check subquery_setup.lookup_counter allow_unmatched_rows] precheck_for_duplicate_matches lookup_columns subquery_setup base_table.connection new_ctx <| - DB_Table.Value subquery_setup.new_table_name base_table.connection new_columns new_ctx_with_invariant_check + DB_Table.new subquery_setup.new_table_name base_table.connection new_columns new_ctx_with_invariant_check ## PRIVATE Checks if they key contains NULL values or if there would be unmatched rows @@ -196,7 +196,7 @@ precheck_for_duplicate_matches lookup_columns subquery_setup connection new_ctx key_columns_for_duplicate_check = (_.flatten) <| lookup_columns.map_with_index ix-> c-> case c of Lookup_Column.Key_Column _ _ -> [subquery_setup.get_self_column ix] _ -> [] - table_for_duplicate_check = DB_Table.Value subquery_setup.new_table_name connection [subquery_setup.lookup_counter]+key_columns_for_duplicate_check new_ctx + table_for_duplicate_check = DB_Table.new subquery_setup.new_table_name connection [subquery_setup.lookup_counter]+key_columns_for_duplicate_check new_ctx duplicate_lookup_matches = table_for_duplicate_check.filter 0 (Filter_Condition.Greater than=1) . read (..First 1) case duplicate_lookup_matches.row_count > 0 of True -> diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/DB_Table_Refinements.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/DB_Table_Refinements.enso new file mode 100644 index 000000000000..0db0e217a13a --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/DB_Table_Refinements.enso @@ -0,0 +1,15 @@ +private + +from Standard.Base import all + +import project.DB_Column.DB_Column +import project.DB_Table.DB_Table +from project.Internal.Type_Refinements.Single_Column_DB_Table_Conversions import all + +refine_table (table : DB_Table) = + if is_single_column table . not then table else + r = table : DB_Table & DB_Column + r + +is_single_column table:DB_Table -> Boolean = + table.column_count == 1 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_DB_Table_Conversions.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_DB_Table_Conversions.enso new file mode 100644 index 000000000000..23fe09484ca9 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_DB_Table_Conversions.enso @@ -0,0 +1,12 @@ +private + +from Standard.Base import all + +import project.DB_Column.DB_Column +import project.DB_Table.DB_Table +from project.Internal.Type_Refinements.DB_Table_Refinements import is_single_column + +## This conversion is internal and should never be exported. +DB_Column.from (that : DB_Table) -> DB_Column = + Runtime.assert (is_single_column that) + that.at 0 diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index d5088f50b5e0..186b17dc5385 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -27,6 +27,7 @@ import project.Internal.Offset_Helper import project.Internal.Parse_Values_Helper import project.Internal.Read_Many_Helpers import project.Internal.Storage +import project.Internal.Type_Refinements.Column_Refinements import project.Internal.Value_Type_Helpers import project.Internal.Widget_Helpers import project.Fill_With.Fill_With @@ -38,7 +39,7 @@ from project.Errors import Conversion_Failure, Inexact_Type_Coercion, Invalid_Co from project.Internal.Column_Format import all from project.Internal.Java_Exports import make_string_builder from project.Internal.Storage import enso_to_java, java_to_enso -from project.Internal.Type_Refinements.Single_Value_Column import refine_with_single_value +from project.Table import from_java_table polyglot java import org.enso.base.Time_Utils polyglot java import org.enso.table.data.column.operation.cast.CastProblemAggregator @@ -142,7 +143,7 @@ type Column Creates a new column given a Java Column object. from_java_column java_column:Java_Column -> Column = column = Column.Value java_column - column |> refine_with_single_value + Column_Refinements.refine_column column ## PRIVATE ADVANCED @@ -160,12 +161,16 @@ type Column Column.from_java_column java_column ## PRIVATE - - A representation of a column in a Table. + Internal constructor that should not be used directly. + Please use `from_java_column` instead. Arguments: - - java_column: The internal representation of the column. - private Value java_column + - internal_java_column: The internal representation of the column. + private Value internal_java_column + + ## PRIVATE + A getter that is a workaround for bug https://github.com/enso-org/enso/issues/12180 + private java_column self = self.internal_java_column ## PRIVATE ADVANCED @@ -2396,7 +2401,7 @@ type Column example_to_table = Examples.integer_column.to_table to_table : Table - to_table self = Table.Value self.java_column.toTable + to_table self = from_java_table self.java_column.toTable ## ALIAS column type, field info, metadata GROUP Standard.Base.Metadata diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso index 69a6f1cbf4b4..6392c0be6e02 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso @@ -34,6 +34,7 @@ import project.Table.Table from project.Errors import Empty_Sheet, No_Rows from project.Internal.Excel_Reader import handle_invalid_location from project.Internal.Table_Helpers import duplicate_rows +from project.Table import from_java_table polyglot java import java.io.File as Java_File polyglot java import org.apache.poi.ss.usermodel.Workbook @@ -244,7 +245,7 @@ type Excel_Workbook names = self.sheet_names if (query < 1 || query >= names.length) then Error.throw (Illegal_Argument.Error "Worksheet index out of range (1 - "+names.length.to_text+").") else ExcelReader.readRangeByName java_workbook (names.at (query - 1)) java_headers skip_rows java_limit java_problem_aggregator - limit.attach_warning (Table.Value java_table) + limit.attach_warning (from_java_table java_table) ## PRIVATE GROUP Standard.Base.Input @@ -269,7 +270,7 @@ type Excel_Workbook java_table = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> self.with_java_workbook java_workbook-> ExcelReader.readRangeByName java_workbook sheet_name java_headers skip_rows java_limit java_problem_aggregator - row_limit.attach_warning (Table.Value java_table) + row_limit.attach_warning (from_java_table java_table) Excel_Section.Cell_Range address headers skip_rows row_limit -> java_headers = Excel_Reader.make_java_headers headers java_limit = row_limit.rows_to_read @@ -277,7 +278,7 @@ type Excel_Workbook self.with_java_workbook java_workbook-> case address of _ : Excel_Range -> ExcelReader.readRange java_workbook address.java_range java_headers skip_rows java_limit java_problem_aggregator _ : Text -> ExcelReader.readRangeByName java_workbook address java_headers skip_rows java_limit java_problem_aggregator - row_limit.attach_warning (Table.Value java_table) + row_limit.attach_warning (from_java_table java_table) ## PRIVATE GROUP Standard.Base.Input diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso index ad2dff7b8533..077455a2029a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso @@ -12,6 +12,7 @@ import project.Internal.Java_Problems import project.Rows_To_Read.Rows_To_Read import project.Table.Table from project.Errors import Empty_File_Error, Mismatched_Quote, Parser_Error +from project.Table import from_java_table polyglot java import com.univocity.parsers.common.TextParsingException polyglot java import java.io.InputStream @@ -98,7 +99,7 @@ read_from_reader format java_reader on_problems:Problem_Behavior max_columns=409 Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> reader = prepare_reader format max_columns on_problems java_problem_aggregator java_table = reader.read java_reader - format.row_limit.attach_warning (Table.Value java_table) + format.row_limit.attach_warning (from_java_table java_table) ## PRIVATE prepare_reader format:Delimited_Format max_columns on_problems:Problem_Behavior java_problem_aggregator newline_override=Nothing = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso index f14995abd49c..414caf62321d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso @@ -14,6 +14,7 @@ import project.Internal.Java_Problems import project.Match_Columns.Match_Columns import project.Table.Table from project.Errors import Column_Count_Mismatch, Column_Name_Mismatch +from project.Table import from_java_table polyglot java import java.io.IOException polyglot java import java.io.PrintWriter @@ -94,7 +95,7 @@ append_to_local_file table format (file : File) match_columns on_problems:Proble Error.throw (Illegal_Argument.Error "Cannot append by name when headers are not present in the existing data.") Match_Columns.By_Position -> ColumnMapper.mapColumnsByPosition table.java_table column_count - reordered_table = Table.Value reordered_java_table + reordered_table = from_java_table reordered_java_table writing_new_file = preexisting_headers == Nothing amended_format_1 = case writing_new_file && (should_write_headers format.headers) of True -> format.with_headers diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso index 7e529266f886..8e1817eda4c9 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso @@ -11,6 +11,7 @@ import project.Internal.Excel_Section.Excel_Section import project.Internal.Java_Problems import project.Table.Table from project.Errors import Duplicate_Output_Column_Names, Empty_Sheet, Invalid_Column_Names, Invalid_Location +from project.Table import from_java_table polyglot java import java.io.File as Java_File polyglot java import org.apache.poi.poifs.filesystem.NotOLE2FileException @@ -60,14 +61,14 @@ read_file file section on_problems:Problem_Behavior xls_format=False = java_table = case sheet of _ : Integer -> ExcelReader.readSheetByIndex java_file sheet (make_java_headers headers) skip_rows java_limit file_format java_problem_aggregator _ : Text -> ExcelReader.readSheetByName java_file sheet (make_java_headers headers) skip_rows java_limit file_format java_problem_aggregator - row_limit.attach_warning (Table.Value java_table) + row_limit.attach_warning (from_java_table java_table) Excel_Section.Cell_Range address headers skip_rows row_limit -> Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> java_limit = row_limit.rows_to_read java_table = case address of _ : Excel_Range -> ExcelReader.readRange java_file address.java_range (make_java_headers headers) skip_rows java_limit file_format java_problem_aggregator _ : Text -> ExcelReader.readRangeByName java_file address (make_java_headers headers) skip_rows java_limit file_format java_problem_aggregator - row_limit.attach_warning (Table.Value java_table) + row_limit.attach_warning (from_java_table java_table) handle_reader file reader diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Read_Many_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Read_Many_Helpers.enso index e665033bb56b..3a39274f1159 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Read_Many_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Read_Many_Helpers.enso @@ -77,9 +77,9 @@ _merge_input_and_tables (input_table : Table) (tables_for_rows : Vector Read_Man multiplicated_inputs = duplicate_rows input_table counts Runtime.assert (unified_data.row_count == multiplicated_inputs.row_count) - Runtime.assert (unified_metadata.is_nothing || (unified_metadata.row_count == unified_data.row_count)) + Runtime.assert ((Nothing == unified_metadata) || (unified_metadata.row_count == unified_data.row_count)) - first_pass = if unified_metadata.is_nothing then multiplicated_inputs else + first_pass = if Nothing == unified_metadata then multiplicated_inputs else multiplicated_inputs.zip unified_metadata right_prefix="" first_pass.zip unified_data right_prefix="" @@ -87,16 +87,16 @@ _merge_input_and_tables (input_table : Table) (tables_for_rows : Vector Read_Man metadata - in such case we want to insert as many Nothing rows for metadata as there are rows in the corresponding data table. _unify_metadata (tables : Vector Read_Many_As_Table_Result) (on_problems : Problem_Behavior) -> Table | Nothing = - has_no_metadata = tables.all r-> r.metadata.is_nothing + has_no_metadata = tables.all r-> Nothing == r.metadata if has_no_metadata then Nothing else unique = Column_Naming_Helper.in_memory.create_unique_name_strategy tables.each r-> - if r.metadata.is_nothing.not then unique.mark_used r.metadata.column_names + if Nothing != r.metadata then unique.mark_used r.metadata.column_names # A dummy column because we cannot create a table with 0 columns, it will be removed after union. We find an unique name for it to avoid conflicts. dummy_column_name = unique.make_unique "_Internal_Placeholder_Column_" tables_for_union = tables.map r-> - if r.metadata.is_nothing.not then r.metadata else + if Nothing != r.metadata then r.metadata else Table.new [Column.from_repeated_item dummy_column_name Nothing r.data.row_count] # Metadata are always merged by-name and columns that appear only in some tables are kept. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso index 23f5666fd0dc..925647f02f1f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso @@ -5,6 +5,7 @@ import Standard.Base.Errors.Illegal_State.Illegal_State from Standard.Base.Data.Decimal import from_big_decimal, get_big_decimal import project.Column.Column +import project.Internal.Value_Type_Helpers import project.Value_Type.Bits import project.Value_Type.Value_Type from project.Errors import Inexact_Type_Coercion @@ -69,9 +70,8 @@ closest_storage_type value_type = case value_type of Value_Type.Date_Time _ -> DateTimeType.INSTANCE Value_Type.Time -> TimeOfDayType.INSTANCE Value_Type.Mixed -> AnyObjectType.INSTANCE - Value_Type.Decimal _ scale -> - is_integer = scale.is_nothing.not && scale <= 0 - if is_integer then BigIntegerType.INSTANCE else BigDecimalType.INSTANCE + Value_Type.Decimal _ _ -> + if Value_Type_Helpers.is_decimal_integer value_type then BigIntegerType.INSTANCE else BigDecimalType.INSTANCE Value_Type.Null -> NullType.INSTANCE _ -> Error.throw (Illegal_Argument.Error "Columns of type "+value_type.to_display_text+" are currently not supported in the in-memory backend.") diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Column_Refinements.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Column_Refinements.enso new file mode 100644 index 000000000000..be23ee9c8cb6 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Column_Refinements.enso @@ -0,0 +1,37 @@ +private + +from Standard.Base import all + +import project.Column.Column +import project.Internal.Value_Type_Helpers +import project.Value_Type.Value_Type +from project.Internal.Type_Refinements.Single_Value_Column_Conversions import all + +refine_column (column : Column) = + ## We treat a column as single value if it contains a single not-nothing value. + if is_single_value column . not then column else + inferred_value_type = column.inferred_precise_value_type + case inferred_value_type of + Value_Type.Integer _ -> + # `inferred_precise_value_type` will return Integer if the column was Float (or Mixed) but contained integral values - e.g. [2.0] + # We inspect the actual value to correctly deal with both Float and Mixed base type. + value = column.at 0 + case value of + # If the value was really a float, we preserve that. + _ : Float -> (column : Column & Float) + # Otherwise we treat it as an integer. + _ -> (column : Column & Integer) + Value_Type.Float _ -> (column : Column & Float) + Value_Type.Char _ _ -> (column : Column & Text) + Value_Type.Boolean -> (column : Column & Boolean) + Value_Type.Date -> (column : Column & Date) + Value_Type.Time -> (column : Column & Time_Of_Day) + Value_Type.Date_Time True -> (column : Column & Date_Time) + Value_Type.Decimal _ _ -> + is_integer = Value_Type_Helpers.is_decimal_integer inferred_value_type + if is_integer then (column : Column & Integer) else (column : Column & Decimal) + # Other types (e.g. Mixed) are not supported. + _ -> column + +is_single_value column:Column -> Boolean = + (column.length == 1) && (column.at 0 . is_nothing . not) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_Table_Conversions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_Table_Conversions.enso new file mode 100644 index 000000000000..7d774a5626f0 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Column_Table_Conversions.enso @@ -0,0 +1,45 @@ +private + +from Standard.Base import all + +import project.Column.Column +import project.Table.Table +from project.Internal.Type_Refinements.Single_Value_Column_Conversions import all +from project.Internal.Type_Refinements.Table_Refinements import is_single_column + +## This conversion is internal and should never be exported. +Column.from (that : Table) -> Column = + Runtime.assert (is_single_column that) + that.at 0 + +## This conversion is internal and should never be exported. +Integer.from (that : Table) -> Integer = + Integer.from (Column.from that) + +## This conversion is internal and should never be exported. +Float.from (that : Table) -> Float = + Float.from (Column.from that) + +## This conversion is internal and should never be exported. +Text.from (that : Table) -> Text = + Text.from (Column.from that) + +## This conversion is internal and should never be exported. +Boolean.from (that : Table) -> Boolean = + Boolean.from (Column.from that) + +## This conversion is internal and should never be exported. +Date.from (that : Table) -> Date = + Date.from (Column.from that) + +## This conversion is internal and should never be exported. +Time_Of_Day.from (that : Table) -> Time_Of_Day = + Time_Of_Day.from (Column.from that) + +## This conversion is internal and should never be exported. +Date_Time.from (that : Table) -> Date_Time = + Date_Time.from (Column.from that) + +## This conversion is internal and should never be exported. +Decimal.from (that : Table) -> Decimal = + Decimal.from (Column.from that) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column.enso deleted file mode 100644 index ef349b0f4d38..000000000000 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column.enso +++ /dev/null @@ -1,34 +0,0 @@ -private - -from Standard.Base import all - -import project.Column.Column -import project.Value_Type.Value_Type -from project.Internal.Type_Refinements.Single_Value_Column_Extensions import all - -refine_with_single_value (column : Column) = - ## We treat a column as single value if it contains a single not-nothing value. - if is_single_value column . not then column else case column.inferred_precise_value_type of - Value_Type.Integer _ -> - # `inferred_precise_value_type` will return Integer if the column was Float (or Mixed) but contained integral values - e.g. [2.0] - # We inspect the actual value to correctly deal with both Float and Mixed base type. - value = column.at 0 - case value of - # If the value was really a float, we preserve that. - _ : Float -> (column : Column & Float) - # Otherwise we treat it as an integer. - _ -> (column : Column & Integer) - Value_Type.Float _ -> (column : Column & Float) - Value_Type.Char _ _ -> (column : Column & Text) - Value_Type.Boolean -> (column : Column & Boolean) - Value_Type.Date -> (column : Column & Date) - Value_Type.Time -> (column : Column & Time_Of_Day) - Value_Type.Date_Time True -> (column : Column & Date_Time) - Value_Type.Decimal _ scale -> - is_integer = scale == 0 - if is_integer then (column : Column & Integer) else (column : Column & Decimal) - # Other types (e.g. Mixed) are not supported. - _ -> column - -is_single_value column:Column -> Boolean = - (column.length == 1) && (column.at 0 . is_nothing . not) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Extensions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Conversions.enso similarity index 94% rename from distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Extensions.enso rename to distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Conversions.enso index 8ded7f7557d0..ba246bffb75a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Extensions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Single_Value_Column_Conversions.enso @@ -3,7 +3,7 @@ private from Standard.Base import all import project.Column.Column -from project.Internal.Type_Refinements.Single_Value_Column import is_single_value +from project.Internal.Type_Refinements.Column_Refinements import is_single_value ## This conversion is internal and should never be exported. Integer.from (that : Column) -> Integer = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Table_Refinements.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Table_Refinements.enso new file mode 100644 index 000000000000..ff2729ff7085 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Type_Refinements/Table_Refinements.enso @@ -0,0 +1,41 @@ +private + +from Standard.Base import all + +import project.Column.Column +import project.Internal.Value_Type_Helpers +import project.Table.Table +import project.Value_Type.Value_Type +from project.Internal.Type_Refinements.Single_Column_Table_Conversions import all +from project.Internal.Type_Refinements.Column_Refinements import is_single_value + +refine_table (table : Table) = + if is_single_column table . not then table else + column = table.at 0 + # This should be consistent with `Column_Refinements.refine_column` - the code needs to be copied because we need to spell out all the types. + if is_single_value column . not then table : Table & Column else + inferred_value_type = column.inferred_precise_value_type + case inferred_value_type of + Value_Type.Integer _ -> + # `inferred_precise_value_type` will return Integer if the column was Float (or Mixed) but contained integral values - e.g. [2.0] + # We inspect the actual value to correctly deal with both Float and Mixed base type. + value = column.at 0 + case value of + # If the value was really a float, we preserve that. + _ : Float -> (table : Table & Column & Float) + # Otherwise we treat it as an integer. + _ -> (table : Table & Column & Integer) + Value_Type.Float _ -> (table : Table & Column & Float) + Value_Type.Char _ _ -> (table : Table & Column & Text) + Value_Type.Boolean -> (table : Table & Column & Boolean) + Value_Type.Date -> (table : Table & Column & Date) + Value_Type.Time -> (table : Table & Column & Time_Of_Day) + Value_Type.Date_Time True -> (table : Table & Column & Date_Time) + Value_Type.Decimal _ _ -> + is_integer = Value_Type_Helpers.is_decimal_integer inferred_value_type + if is_integer then (table : Table & Column & Integer) else (table : Table & Column & Decimal) + # Other types (e.g. Mixed) are not supported. + _ -> (table : Table & Column) + +is_single_column table:Table -> Boolean = + table.column_count == 1 diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Value_Type_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Value_Type_Helpers.enso index 4b1abbbbbb5c..2181b319c020 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Value_Type_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Value_Type_Helpers.enso @@ -312,3 +312,9 @@ find_common_type_for_arguments arguments = common_type : Value_Type -> common_type Nothing -> Error.throw <| No_Common_Type.Error types related_column_name=Nothing + +## PRIVATE + Checks if `Value_Type.Decimal` represents integers. +private is_decimal_integer value_type = case value_type of + Value_Type.Decimal _ scale -> (Nothing != scale) && (scale <= 0) + _ -> Error.throw (Illegal_Argument.Error "Expected Value_Type.Decimal, got "+value_type.to_display_text) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 016622f1bf01..1f1cbc7a6aa4 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -59,6 +59,7 @@ import project.Internal.Split_Tokenize import project.Internal.Table_Helpers import project.Internal.Table_Helpers.Table_Column_Helper import project.Internal.Table_Ref.Table_Ref +import project.Internal.Type_Refinements.Table_Refinements import project.Internal.Union import project.Internal.Value_Type_Helpers import project.Internal.Widget_Helpers @@ -166,7 +167,7 @@ type Table msg = "All columns must have the same row count, but the column [" + mismatched_size_column.getName + "] has " + mismatched_size_column.getSize.to_text + " rows, while the column [" + cols.first.getName + "] has " + cols.first.getSize.to_text + " rows." Panic.throw (Illegal_Argument.Error msg) - Table.Value (Java_Table.new cols) + from_java_table (Java_Table.new cols) ## GROUP Standard.Base.Constants ICON data_input @@ -238,12 +239,16 @@ type Table resolved_columns.if_not_error <| Table.new resolved_columns ## PRIVATE - - A table. + Internal constructor that should not be used directly. + Please use `from_java_table` if needed. Arguments: - - java_table: The internal java representation of the table. - private Value java_table + - internal_java_table: The internal java representation of the table. + private Value internal_java_table + + ## PRIVATE + A getter that is a workaround for bug https://github.com/enso-org/enso/issues/12180 + private java_table self = self.internal_java_table ## PRIVATE ADVANCED @@ -1174,8 +1179,8 @@ type Table index = self.java_table.indexFromColumns java_key_columns java_problem_aggregator new_columns = validated.valid_columns.map c->(Aggregate_Column_Helper.java_aggregator c.first c.second) java_table = index.makeTable new_columns - if validated.old_style.not then Table.Value java_table else - Warning.attach (Deprecated.Warning "Standard.Table.Aggregate_Column.Aggregate_Column" "Group_By" "Deprecated: `Group_By` constructor has been deprecated, use the `group_by` argument instead.") (Table.Value java_table) + if validated.old_style.not then from_java_table java_table else + Warning.attach (Deprecated.Warning "Standard.Table.Aggregate_Column.Aggregate_Column" "Group_By" "Deprecated: `Group_By` constructor has been deprecated, use the `group_by` argument instead.") (from_java_table java_table) ## ALIAS order_by GROUP Standard.Base.Selections @@ -1282,7 +1287,7 @@ type Table java_table = Illegal_Argument.handle_java_exception <| Incomparable_Values.handle_errors <| self.java_table.orderBy java_columns directions comparator - Table.Value java_table + from_java_table java_table ## PRIVATE GROUP Standard.Base.Selections @@ -1368,7 +1373,7 @@ type Table java_table = Illegal_Argument.handle_java_exception <| Java_Problems.with_problem_aggregator on_problems java_aggregator-> self.java_table.distinct java_columns text_folding_strategy java_aggregator - Table.Value java_table + from_java_table java_table ## GROUP Standard.Base.Selections ICON preparation @@ -1407,7 +1412,7 @@ type Table java_table = Illegal_Argument.handle_java_exception <| Java_Problems.with_problem_aggregator on_problems java_aggregator-> self.java_table.duplicates java_columns text_folding_strategy java_aggregator - Table.Value java_table + from_java_table java_table ## GROUP Standard.Base.Conversions ICON convert @@ -2166,7 +2171,7 @@ type Table filter : (Column | Expression | Text | Integer) -> (Filter_Condition | (Any -> Boolean)) -> Problem_Behavior -> Table ! No_Such_Column | Index_Out_Of_Bounds | Invalid_Value_Type filter self column=(Missing_Argument.throw "column") (filter : Filter_Condition | (Any -> Boolean) = Filter_Condition.Equal True) on_problems:Problem_Behavior=..Report_Warning = case column of _ : Column -> - mask filter_column = Table.Value (self.java_table.filter filter_column.java_column) + mask filter_column = from_java_table (self.java_table.filter filter_column.java_column) filter_condition = Filter_Condition.resolve_auto_scoped filter case filter_condition of _ : Filter_Condition -> @@ -2499,7 +2504,7 @@ type Table new_table = check_add_mode.if_not_error <| if resolved.length != self.row_count then Error.throw (Row_Count_Mismatch.Error self.row_count resolved.length) else - Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) + from_java_table (self.java_table.addOrReplaceColumn renamed.java_column) problem_builder.report_unique_name_strategy unique problem_builder.attach_problems_after on_problems new_table @@ -2778,7 +2783,7 @@ type Table java_conditions = join_resolution.conditions new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> self.java_table.join right.java_table java_conditions join_kind.to_java (columns_to_keep.at 0) (columns_to_keep.at 1) right_columns_to_drop right_prefix java_aggregator - Table.Value new_java_table + from_java_table new_java_table ## ALIAS append, cartesian GROUP Standard.Base.Calculations @@ -2826,7 +2831,7 @@ type Table on_problems.attach_problems_before limit_problems <| new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> self.java_table.crossJoin right.java_table right_prefix java_aggregator - Table.Value new_java_table + from_java_table new_java_table ## ALIAS hlookup, join, lookup, vlookup, xlookup GROUP Standard.Base.Calculations @@ -2920,7 +2925,7 @@ type Table handle_java_errors <| Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> java_table = LookupJoin.lookupAndReplace java_keys java_descriptions allow_unmatched_rows java_problem_aggregator - Table.Value java_table + from_java_table java_table ## ALIAS find replace GROUP Standard.Base.Text @@ -3070,7 +3075,7 @@ type Table on_problems.attach_problems_before problems <| new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> self.java_table.zip right.java_table keep_unmatched_bool right_prefix java_aggregator - Table.Value new_java_table + from_java_table new_java_table ## ALIAS append, concat, join GROUP Standard.Base.Calculations @@ -3295,7 +3300,7 @@ type Table unique.mark_used (id_columns.map .name) java_table = Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> Java_Table.transpose java_id java_data (unique.make_unique attribute_column_name) (unique.make_unique value_column_name) java_problem_aggregator - result = Table.Value java_table + result = from_java_table java_table problem_builder.report_unique_name_strategy unique problem_builder.attach_problems_after on_problems result @@ -3404,7 +3409,7 @@ type Table group_by = grouping.map g->(Aggregate_Column_Helper.java_aggregator g.name (Aggregate_Column.Group_By g)) index = self.java_table.indexFromColumns java_key_columns java_problem_aggregator new_java_table = index.makeTable (group_by + data_columns) - Table.Value new_java_table + from_java_table new_java_table False -> aggregate_names = validated_values.map c-> if c.as != "" then c.as else (name_mapper c) @@ -3415,7 +3420,7 @@ type Table Panic.catch TooManyColumnsException handler=too_many_columns <| Invalid_Column_Names.handle_java_exception <| new_java_table = self.java_table.makeCrossTabTable java_key_columns matched_name.first.java_column data_columns aggregate_names java_problem_aggregator - Table.Value new_java_table + from_java_table new_java_table ## PRIVATE Returns a table with a continuous sub-range of rows taken. @@ -3424,7 +3429,7 @@ type Table length = self.row_count offset = (start.min length).max 0 limit = ((end - offset).min (length - offset)).max 0 - Table.Value (self.java_table.slice offset limit) + from_java_table (self.java_table.slice offset limit) ## GROUP Standard.Base.Selections ICON order @@ -3440,7 +3445,7 @@ type Table reverse : Table reverse self = mask = OrderMask.reverse self.row_count - Table.Value <| self.java_table.applyMask mask + from_java_table <| self.java_table.applyMask mask ## ALIAS export, output, save, to_file GROUP Standard.Base.Output @@ -3486,8 +3491,9 @@ type Table - `Bytes` and `Plain_Text`: The Table does not support these types in the `write` function. If passed as format, an `Illegal_Argument` is raised. To write out the table as plain - text, the user needs to call the `Text.from Table` method and then - use the `Text.write` function. + text, the user needs to convert the Table to Text + (e.g. using `to_delimited` method) and then use the `Text.write` + function. > Example Write a table to a CSV file, without writing the header. @@ -3932,11 +3938,19 @@ type Table data = self.columns.map c->("[" + c.name.pretty + ", " + c.to_vector.pretty + "]") . join ", " "Table.new [" + data + "]" + ## Converts all table contents to a text value in delimited format. + + Arguments: + - format: Allows to customize the delimiter and other settings of the + format. Defaults to tab-separated values. + to_delimited self (format:Delimited_Format = ..Delimited '\t') -> Text = + Delimited_Writer.write_text self format + ## PRIVATE A helper to create a new table consisting of slices of the original table. slice_ranges table ranges = normalized = normalize_ranges ranges - Table.Value (table.java_table.slice normalized) + from_java_table (table.java_table.slice normalized) ## PRIVATE make_join_helpers left_table right_table = @@ -3968,17 +3982,6 @@ Table.from (that : Text) (format = Delimited_Format.Delimited '\t') (on_problems _ : Delimited_Format -> Delimited_Reader.read_text that resolved_format on_problems _ -> Unimplemented.throw "Table.from is currently only implemented for Delimited_Format." -## PRIVATE - Converts a Table into a Text value. - - The format of the text is determined by the `format` argument. - - Arguments: - - that: The table to convert. - - format: The format of the text. -Text.from (that : Table) (format:Delimited_Format = ..Delimited '\t') = - Delimited_Writer.write_text that format - ## PRIVATE Conversion method to a Table from a Vector. @fields (Widget.Vector_Editor item_editor=Widget.Text_Input item_default='""') @@ -4049,7 +4052,9 @@ make_fill_nothing_default_widget table cache=Nothing = ## PRIVATE Helper method for internal use to make a Table from a Java Table. -from_java_table java_table = Table.Value java_table +from_java_table java_table -> Table = + Table_Refinements.refine_table <| + Table.Value java_table ## PRIVATE Many_Files_List.from (that : Table) = diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso index eece79e8b6d9..cfed85842e85 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso @@ -25,31 +25,31 @@ prepare_visualization y max_rows=1000 = if y.is_error then (make_json_for_error x = Warning.set y [] result = case x of - _ : Vector -> make_json_for_vector x max_rows - _ : Array -> prepare_visualization x.to_vector max_rows - _ : Dictionary -> make_json_for_dictionary x max_rows - _ : JS_Object -> make_json_for_js_object x max_rows - _ : Row -> make_json_for_row x - _ : Column -> prepare_visualization x.to_table max_rows - _ : Table -> - all_rows_count = x.row_count - make_json_for_table x max_rows all_rows_count True False - _ : DB_Column -> prepare_visualization x.to_table max_rows - _ : DB_Table -> - dataframe = x.read (..First max_rows) - all_rows_count = x.row_count + v : Vector -> make_json_for_vector v max_rows + v : Array -> prepare_visualization v.to_vector max_rows + v : Dictionary -> make_json_for_dictionary v max_rows + v : JS_Object -> make_json_for_js_object v max_rows + v : Row -> make_json_for_row v + t : Table -> + all_rows_count = t.row_count + make_json_for_table t max_rows all_rows_count True False + c : Column -> prepare_visualization c.to_table max_rows + t : DB_Table -> + dataframe = t.read (..First max_rows) + all_rows_count = t.row_count make_json_for_table dataframe max_rows all_rows_count True True - _ : Function -> - pairs = [['_display_text_', '[Function '+x.to_text+']']] + c : DB_Column -> prepare_visualization c.to_table max_rows + f : Function -> + pairs = [['_display_text_', '[Function '+f.to_text+']']] value = JS_Object.from_pairs pairs JS_Object.from_pairs [["json", value]] - _ : Number -> - JS_Object.from_pairs [["json", make_json_for_value x]] - _ : Excel_Workbook -> - js_value = x.to_js_object - JS_Object.from_pairs [["json", js_value], ["sheet_names", x . sheet_names], ["get_child_node_action", "read"], ["type", "Excel_Workbook"]] - _ : XML_Document -> make_json_for_xml_element x.root_element max_rows "XML_Document" - _ : XML_Element -> make_json_for_xml_element x max_rows + v : Number -> + JS_Object.from_pairs [["json", make_json_for_value v]] + v : Excel_Workbook -> + js_value = v.to_js_object + JS_Object.from_pairs [["json", js_value], ["sheet_names", v.sheet_names], ["get_child_node_action", "read"], ["type", "Excel_Workbook"]] + v : XML_Document -> make_json_for_xml_element v.root_element max_rows "XML_Document" + v : XML_Element -> make_json_for_xml_element v max_rows _ -> make_json_for_other x result.to_text @@ -244,48 +244,48 @@ make_json_for_other x = make_json_for_value : Any -> Integer -> Text make_json_for_value val level=0 = case val of Nothing -> Nothing - _ : Text -> val - _ : Number -> - js_version = if val.is_finite then val.to_js_object else JS_Object.from_pairs [["type", "Float"], ["value", val.to_text]] + txt : Text -> txt + num : Number -> + js_version = if num.is_finite then num.to_js_object else JS_Object.from_pairs [["type", "Float"], ["value", num.to_text]] if js_version.is_a JS_Object . not then js_version else - pairs = [['_display_text_', val.to_display_text]] + js_version.field_names.map f-> [f, js_version.get f] + pairs = [['_display_text_', num.to_display_text]] + js_version.field_names.map f-> [f, js_version.get f] JS_Object.from_pairs pairs - _ : Boolean -> val - _ : Vector -> - if level != 0 then "[… "+val.length.to_text+" items]" else - truncated = val.take 5 . map v-> (make_json_for_value v level+1).to_text - prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated + boolean : Boolean -> boolean + vec : Vector -> + if level != 0 then "[… "+vec.length.to_text+" items]" else + truncated = vec.take 5 . map v-> (make_json_for_value v level+1).to_text + prepared = if vec.length > 5 then truncated + ["… " + (vec.length - 5).to_text+ " items"] else truncated "[" + (prepared.join ", ") + "]" - _ : Array -> make_json_for_value val.to_vector level - _ : Dictionary -> - if level != 0 then "{… "+val.size.to_text+" items}" else - truncated = val.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text - prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated + array : Array -> make_json_for_value array.to_vector level + dict : Dictionary -> + if level != 0 then "{… "+dict.size.to_text+" items}" else + truncated = dict.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text + prepared = if dict.length > 5 then truncated + ["… " + (dict.length - 5).to_text+ " items"] else truncated "{" + (prepared.join ", ") + "}" - _ : Column -> - if level != 0 then "Column{" +val.name + ": " + val.row_count + " rows}" else - items = make_json_for_value val.to_vector level - "Column{" + val.name + ": " + items + "}" - _ : Row -> - if level != 0 then "Row{" + val.table.column_count + " columns}" else - truncated = val.table.column_names.take 5 . map _.to_text - prepared = if val.table.column_count > 5 then truncated + ["… " + (val.table.column_count - 5).to_text+ " more"] else truncated + row : Row -> + if level != 0 then "Row{" + row.table.column_count + " columns}" else + truncated = row.table.column_names.take 5 . map _.to_text + prepared = if row.table.column_count > 5 then truncated + ["… " + (row.table.column_count - 5).to_text+ " more"] else truncated "Row{" + (prepared.join ", ") + "}" - _ : DB_Column -> - if level != 0 then "Column{" +val.name + ": " + val.row_count + " rows}" else - materialise = val.read (..First 5) - truncated = materialise . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text - prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated - "Column{" + val.name + ": " + prepared + "}" - _ : Table -> - if level != 0 then "Table{" + val.row_count + " rows x " + val.column_count + " columns}" else - truncated = val.columns.take 5 . map _.name - prepared = if val.column_count > 5 then truncated + ["… " + (val.column_count - 5).to_text+ " more"] else truncated - "Table{" + val.row_count.to_text + " rows x [" + (prepared.join ", ") + "]}" - _ : DB_Table -> - if level != 0 then "Table{" + val.row_count + " rows x " + val.column_count + " columns}" else - truncated = val.columns.take 5 . map _.name - prepared = if val.column_count > 5 then truncated + ["… " + (val.column_count - 5).to_text+ " more"] else truncated - "Table{" + val.row_count.to_text + " rows x [" + (prepared.join ", ") + "]}" - _ : Function -> "[Function "+val.to_text+"]" + table : Table -> + if level != 0 then "Table{" + table.row_count + " rows x " + table.column_count + " columns}" else + truncated = table.columns.take 5 . map _.name + prepared = if table.column_count > 5 then truncated + ["… " + (table.column_count - 5).to_text+ " more"] else truncated + "Table{" + table.row_count.to_text + " rows x [" + (prepared.join ", ") + "]}" + col : Column -> + if level != 0 then "Column{" +col.name + ": " + col.row_count + " rows}" else + items = make_json_for_value col.to_vector level + "Column{" + col.name + ": " + items + "}" + table : DB_Table -> + if level != 0 then "Table{" + table.row_count + " rows x " + table.column_count + " columns}" else + truncated = table.columns.take 5 . map _.name + prepared = if table.column_count > 5 then truncated + ["… " + (table.column_count - 5).to_text+ " more"] else truncated + "Table{" + table.row_count.to_text + " rows x [" + (prepared.join ", ") + "]}" + col : DB_Column -> + if level != 0 then "Column{" +col.name + ": " + col.row_count + " rows}" else + materialise = col.read (..First 5) + truncated = materialise . map k-> k.to_text + ": " + (make_json_for_value (col.get k) level+1).to_text + prepared = if col.length > 5 then truncated + ["… " + (col.length - 5).to_text+ " items"] else truncated + "Column{" + col.name + ": " + prepared + "}" + f : Function -> "[Function "+f.to_text+"]" _ -> val.to_display_text diff --git a/test/Base_Tests/src/Semantic/Multi_Value_As_Type_Refinement_Spec.enso b/test/Base_Tests/src/Semantic/Multi_Value_As_Type_Refinement_Spec.enso index 317503d59974..33923d61bc6f 100644 --- a/test/Base_Tests/src/Semantic/Multi_Value_As_Type_Refinement_Spec.enso +++ b/test/Base_Tests/src/Semantic/Multi_Value_As_Type_Refinement_Spec.enso @@ -314,6 +314,14 @@ add_specs suite_builder = y.b_method . should_equal "B method" Problems.expect_only_warning Illegal_State y + group_builder.specify "clearing warnings on an intersection type" <| + b = (B.B_Ctor 24) + bc = b : B & C + bc_without_warning = Warning.clear (Warning.attach (Illegal_State.Error "my warning") bc) + Problems.assume_no_problems bc_without_warning + Problems.assume_no_problems (bc_without_warning:B) + Problems.assume_no_problems (bc_without_warning:C) + group_builder.specify "removing warnings from an intersection type should not lose even the hidden refinements" <| ab = make_a_and_b x1 = Warning.attach (Illegal_State.Error "my warning") (ab:A) diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index 404ef8bf74f4..cffceca6285a 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -15,6 +15,7 @@ import Standard.Test.Suite.Suite_Builder import project.Database.Common.Default_Ordering_Spec import project.Database.Common.Names_Length_Limits_Spec +import project.Database.Common.Single_Column_Table_Spec import Standard.Database.Feature.Feature import project.Util @@ -154,6 +155,7 @@ add_commmon_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connec Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn setup Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn + Single_Column_Table_Spec.add_specs suite_builder prefix setup suite_builder.group (prefix + "Basic Table Access") group_builder-> data = Basic_Data.setup default_connection.get diff --git a/test/Table_Tests/src/Database/Common/Single_Column_Table_Spec.enso b/test/Table_Tests/src/Database/Common/Single_Column_Table_Spec.enso new file mode 100644 index 000000000000..fb9ccf6ef74f --- /dev/null +++ b/test/Table_Tests/src/Database/Common/Single_Column_Table_Spec.enso @@ -0,0 +1,48 @@ +from Standard.Base import all +import Standard.Base.Errors.Common.Type_Error + +import Standard.Database.DB_Column.DB_Column +import Standard.Database.DB_Table.DB_Table + +from Standard.Test import all + +type Lazy_Ref + Value ~get + +add_specs suite_builder (prefix : Text) setup = + suite_builder.group prefix+"single-column DB_Table" group_builder-> + table_1 = Lazy_Ref.Value <| setup.table_builder [["A", [1, 2, 3]]] + table_2 = Lazy_Ref.Value <| setup.table_builder [["A", [1, 2, 3]], ["B", [4, 5, 6]]] + group_builder.specify "should be a DB_Table but also a DB_Column" <| + t1 = table_1.get + t1.should_be_a DB_Table + (t1:DB_Column).name.should_equal "A" + (t1:DB_Column).to_vector.should_equal [1, 2, 3] + # Still has table methods + t1.column_names.should_equal ["A"] + + group_builder.specify "will not be a DB_Column if it has more than one column" <| + t1 = table_2.get + t1.should_be_a DB_Table + Test.expect_panic Type_Error (t1:DB_Column) + + group_builder.specify "should allow passing the casted column to methods expecting column (with inserted cast)" <| + t1 = table_1.get + ((t1:DB_Column) + (t1:DB_Column)) . to_vector . should_equal [2, 4, 6] + + group_builder.specify "should also work for DB_Tables returned from various operations" <| + t1 = table_2.get + + t2 = t1.select_columns ["A"] + t2.should_be_a DB_Table + (t2:DB_Column).name.should_equal "A" + + t3 = t1.remove_columns ["A"] + t3.should_be_a DB_Table + (t3:DB_Column).name.should_equal "B" + + group_builder.specify "in database, does not act as a single value" <| + t1 = setup.table_builder [["A", [23]]] + t1.should_be_a DB_Table + (t1:DB_Column).name.should_equal "A" + Test.expect_panic Type_Error (t1:Integer) diff --git a/test/Table_Tests/src/IO/Csv_Spec.enso b/test/Table_Tests/src/IO/Csv_Spec.enso index 0320cecb67ec..a6f708ff8f63 100644 --- a/test/Table_Tests/src/IO/Csv_Spec.enso +++ b/test/Table_Tests/src/IO/Csv_Spec.enso @@ -57,7 +57,7 @@ add_specs suite_builder = suite_builder.group 'Writing' group_builder-> group_builder.specify 'should properly serialize simple tables' <| varied_column = (enso_project.data / "varied_column.csv") . read - res = Text.from varied_column format=(..Delimited ",") + res = varied_column.to_delimited format=(..Delimited ",") exp = normalize_lines <| ''' Column 1,Column 2,Column 3,Column 4,Column 5,Column 6 2005-02-25,2005-02-25,1,1,1.0,1 @@ -81,7 +81,7 @@ add_specs suite_builder = "This;Name;;Is""""Strange";20 Marcin,,;"hello;world" - res = Text.from t format=(..Delimited ";") + res = t.to_delimited format=(..Delimited ";") res.should_equal expected group_builder.specify 'should allow forced quoting of records' <| @@ -95,7 +95,7 @@ add_specs suite_builder = "This;Name;;Is""""Strange",20 "Marcin,,","hello;world" - res = Text.from t format=(Delimited_Format.Delimited "," . with_quotes always_quote=True) + res = t.to_delimited format=(Delimited_Format.Delimited "," . with_quotes always_quote=True) res.should_equal expected @@ -149,4 +149,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Table_Tests/src/In_Memory/Main.enso b/test/Table_Tests/src/In_Memory/Main.enso index 29bcf2004ba1..29d1088e6eb1 100644 --- a/test/Table_Tests/src/In_Memory/Main.enso +++ b/test/Table_Tests/src/In_Memory/Main.enso @@ -12,6 +12,7 @@ import project.In_Memory.Fan_Out_Spec import project.In_Memory.Integer_Overflow_Spec import project.In_Memory.Lossy_Conversions_Spec import project.In_Memory.Parse_To_Table_Spec +import project.In_Memory.Single_Column_Table_Spec import project.In_Memory.Single_Value_Column_Spec import project.In_Memory.Split_Tokenize_Spec import project.In_Memory.Table_Spec @@ -34,6 +35,7 @@ add_specs suite_builder = Integer_Overflow_Spec.add_specs suite_builder Lossy_Conversions_Spec.add_specs suite_builder Parse_To_Table_Spec.add_specs suite_builder + Single_Column_Table_Spec.add_specs suite_builder Single_Value_Column_Spec.add_specs suite_builder Split_Tokenize_Spec.add_specs suite_builder Table_Conversion_Spec.add_specs suite_builder diff --git a/test/Table_Tests/src/In_Memory/Single_Column_Table_Spec.enso b/test/Table_Tests/src/In_Memory/Single_Column_Table_Spec.enso new file mode 100644 index 000000000000..1171fc47fa11 --- /dev/null +++ b/test/Table_Tests/src/In_Memory/Single_Column_Table_Spec.enso @@ -0,0 +1,106 @@ +from Standard.Base import all +import Standard.Base.Errors.Common.Type_Error + +from Standard.Table import Aggregate_Column, Column, Table + +from Standard.Test import all + +add_specs suite_builder = + suite_builder.group "single-column Table" group_builder-> + group_builder.specify "should be a Table but also a Column" <| + t1 = Table.new [["A", [1, 2, 3]]] + t1.should_be_a Table + (t1:Column).name.should_equal "A" + (t1:Column).to_vector.should_equal [1, 2, 3] + # Still has table methods + t1.column_names.should_equal ["A"] + + group_builder.specify "will not be a Column if it has more than one column" <| + t1 = Table.new [["A", [1, 2, 3]], ["B", [4, 5, 6]]] + t1.should_be_a Table + Test.expect_panic Type_Error (t1:Column) + + group_builder.specify "should allow passing the casted column to methods expecting column" <| + t1 = Table.new [["A", [1, 2, 3]]] + (t1:Column).zip (t1:Column) (+) . to_vector . should_equal [2, 4, 6] + + group_builder.specify "should also work for Tables returned from various operations" <| + t1 = Table.new [["A", [1, 2]], ["B", [3, 4]]] + + t2 = t1.select_columns ["A"] + t2.should_be_a Table + (t2:Column).name.should_equal "A" + + t3 = t1.remove_columns ["A"] + t3.should_be_a Table + (t3:Column).name.should_equal "B" + + t4 = t1.aggregate columns=[Aggregate_Column.Count] + t4.should_be_a Table + (t4:Column).name.should_equal "Count" + + group_builder.specify "should also act as individual value if the only column has only one row (Integer)" <| + t1 = Table.new [["A", [32]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [32] + (t1:Integer).should_equal 32 + (t1:Integer)+100 . should_equal 132 + (10 + (t1:Integer)) . should_equal 42 + + # But a table with more rows won't do that: + t2 = Table.new [["A", [32, 33]]] + t2.should_be_a Table + Test.expect_panic Type_Error (t2:Integer) + + group_builder.specify "should also act as individual value if the only column has only one row (Float)" <| + t1 = Table.new [["A", [1.5]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [1.5] + (t1:Float).should_equal 1.5 + (t1:Float)+100 . should_equal 101.5 + + group_builder.specify "should also act as individual value if the only column has only one row (Text)" <| + t1 = Table.new [["A", ["hello"]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal ["hello"] + (t1:Text).should_equal "hello" + ((t1:Text) + "!") . should_equal "hello!" + + group_builder.specify "should also act as individual value if the only column has only one row (Boolean)" <| + t1 = Table.new [["A", [True]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [True] + (t1:Boolean).should_equal True + (t1:Boolean).not . should_equal False + + group_builder.specify "should also act as individual value if the only column has only one row (Date)" <| + t1 = Table.new [["A", [Date.new 2025 1 1]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [Date.new 2025 1 1] + (t1:Date).should_equal (Date.new 2025 1 1) + (t1:Date).add_work_days 100 . should_equal (Date.new 2025 5 21) + + group_builder.specify "should also act as individual value if the only column has only one row (Time_Of_Day)" <| + t1 = Table.new [["A", [Time_Of_Day.new 12 0 0]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [Time_Of_Day.new 12 0 0] + (t1:Time_Of_Day).should_equal (Time_Of_Day.new 12 0 0) + + group_builder.specify "should also act as individual value if the only column has only one row (Date_Time)" <| + t1 = Table.new [["A", [Date_Time.new 2025 1 1 12 0 0]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [Date_Time.new 2025 1 1 12 0 0] + (t1:Date_Time).should_equal (Date_Time.new 2025 1 1 12 0 0) + + group_builder.specify "should also act as individual value if the only column has only one row (Decimal)" <| + t1 = Table.new [["A", [Decimal.new "3.5"]]] + t1.should_be_a Table + (t1:Column).to_vector.should_equal [Decimal.new "3.5"] + (t1:Decimal).should_equal (Decimal.new "3.5") + ((t1:Decimal) + (Decimal.new "1.5")) . should_equal (Decimal.new "5.0") + + +main filter=Nothing = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter filter diff --git a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso index da383ffa76ee..5c7b7b492ef6 100644 --- a/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Date_Spec.enso @@ -57,18 +57,17 @@ add_specs suite_builder = group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "prime_ministers.csv").read_text - delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix) + delimited = data.expected.to_delimited format=(..Delimited "," line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text group_builder.specify "should serialise dates with format" <| test_table = Table.new [data.c_from] expected_text = 'From\n04.05.1979\n28.11.1990\n02.05.1997\n27.06.2007\n11.05.2010\n13.07.2016\n24.07.2019\n' data_formatter = Data_Formatter.Value.with_datetime_formats date_formats=["dd.MM.yyyy"] - delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) + delimited = test_table.to_delimited format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso index 5350d3dbb1e9..e19efaf29165 100644 --- a/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Date_Time_Spec.enso @@ -50,18 +50,17 @@ add_specs suite_builder = expected_text = normalize_lines <| (enso_project.data / "datetime_sample_normalized_hours.csv").read_text data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["yyyy-MM-dd HH:mm:ss"] - delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter) + delimited = data.expected.to_delimited format=(..Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter) delimited.should_equal expected_text group_builder.specify "should serialise dates with format" <| test_table = Table.new [data.c_date] expected_text = 'Posting date\n05.01.2015 09-00\n05.01.2015 14-00\n06.01.2015 09-00\n07.01.2015 17-30\n05.01.2011 09-00\n09.01.2011 15-30\n' data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["dd.MM.yyyy HH-mm"] - delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) + delimited = test_table.to_delimited format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso index ecb35be906e0..a18d32f62d39 100644 --- a/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Time_Of_Day_Spec.enso @@ -51,18 +51,17 @@ add_specs suite_builder = group_builder.specify "should serialise back to input" <| expected_text = normalize_lines <| (enso_project.data / "time_of_day_sample_normalized_hours.csv").read_text - delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix) + delimited = data.expected.to_delimited format=(..Delimited "," line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text group_builder.specify "should serialise dates with format" <| test_table = Table.new [data.c_time] expected_text = 'Posting time\n09-00-00\n14-00-12\n09-00-00\n17-30-00\n09-00-04\n15-30-00\n' data_formatter = Data_Formatter.Value . with_datetime_formats time_formats=["HH-mm-ss"] - delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) + delimited = test_table.to_delimited format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix) delimited.should_equal expected_text main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter -