From 13aa43ebd49459adc68409b63d07c5d0b081d3ad Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 19 Dec 2024 10:06:30 +0000 Subject: [PATCH 01/15] Add missing entries. (#11914) * Add missing entries. * Remove typo --- app/ide-desktop/client/tasks/signArchivesMacOs.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/app/ide-desktop/client/tasks/signArchivesMacOs.ts b/app/ide-desktop/client/tasks/signArchivesMacOs.ts index 0e6fb9479db0..64310fad8816 100644 --- a/app/ide-desktop/client/tasks/signArchivesMacOs.ts +++ b/app/ide-desktop/client/tasks/signArchivesMacOs.ts @@ -101,6 +101,13 @@ async function ensoPackageSignables(resourcesDir: string): Promise { ], ], ['component/jna-*.jar', ['com/sun/jna/*/libjnidispatch.jnilib']], + [ + 'component/jline-*.jar', + [ + 'org/jline/nativ/Mac/arm64/libjlinenative.jnilib', + 'org/jline/nativ/Mac/x86_64/libjlinenative.jnilib', + ], + ], [ 'lib/Standard/Database/*/polyglot/java/sqlite-jdbc-*.jar', [ From c08a53dcc3efc50283cf723a0e858c24bed68066 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Thu, 19 Dec 2024 13:56:29 +0300 Subject: [PATCH 02/15] Allow checks on develop to complete (#11917) followup to #11723 Do not cancel checks when a new commit is pushed to develop. --- .github/workflows/engine-pull-request.yml | 2 +- .github/workflows/gui-checks.yml | 2 +- .github/workflows/gui-packaging-pull-request.yml | 2 +- .github/workflows/gui-pull-request.yml | 2 +- .github/workflows/storybook.yml | 2 +- .github/workflows/wasm-pull-request.yml | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/engine-pull-request.yml b/.github/workflows/engine-pull-request.yml index a886fedc149e..47b2d7adf5f2 100644 --- a/.github/workflows/engine-pull-request.yml +++ b/.github/workflows/engine-pull-request.yml @@ -10,7 +10,7 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.ref }}-engine-pull-request - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} permissions: checks: write diff --git a/.github/workflows/gui-checks.yml b/.github/workflows/gui-checks.yml index 730d49198748..5ac10409c741 100644 --- a/.github/workflows/gui-checks.yml +++ b/.github/workflows/gui-checks.yml @@ -6,7 +6,7 @@ on: workflow_call # Cancel in-progress workflows if a new one is started concurrency: group: ${{ github.workflow }}-${{ github.ref }}-gui-checks - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} permissions: contents: read # Read-only access to repository contents diff --git a/.github/workflows/gui-packaging-pull-request.yml b/.github/workflows/gui-packaging-pull-request.yml index c379bb18b3fb..f38628a231fe 100644 --- a/.github/workflows/gui-packaging-pull-request.yml +++ b/.github/workflows/gui-packaging-pull-request.yml @@ -10,7 +10,7 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.ref }}-gui-packaging-pull-request - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} jobs: gui-changed-files: diff --git a/.github/workflows/gui-pull-request.yml b/.github/workflows/gui-pull-request.yml index 36d7f77faf93..f360b23be5e5 100644 --- a/.github/workflows/gui-pull-request.yml +++ b/.github/workflows/gui-pull-request.yml @@ -10,7 +10,7 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.ref }}-gui-pull-request - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} permissions: contents: read # Read-only access to repository contents diff --git a/.github/workflows/storybook.yml b/.github/workflows/storybook.yml index 79b9ae96c629..f2abd5d3563b 100644 --- a/.github/workflows/storybook.yml +++ b/.github/workflows/storybook.yml @@ -7,7 +7,7 @@ on: workflow_call # Cancel in-progress workflows if a new one is started concurrency: group: ${{ github.workflow }}-${{ github.ref }}-chromatic - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} permissions: contents: read # Read-only access to repository contents diff --git a/.github/workflows/wasm-pull-request.yml b/.github/workflows/wasm-pull-request.yml index 16aecc6559f5..18a9c6d48a1d 100644 --- a/.github/workflows/wasm-pull-request.yml +++ b/.github/workflows/wasm-pull-request.yml @@ -10,7 +10,7 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.ref }}-wasm-pull-request - cancel-in-progress: true + cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} jobs: wasm-changed-files: From 014b562ca9693012b03a2c1389669944feba0f11 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Thu, 19 Dec 2024 12:59:35 +0100 Subject: [PATCH 03/15] Promote broken values instead of ignoring them (#11777) Partially fixes #5430 by propagating `DataflowError`s found during statement execution out of the method. # Important Notes This change [may affect behavior](https://github.com/enso-org/enso/pull/11673/files#r1871128327) of existing methods that ignore `DataflowError` as [discussed here](https://github.com/enso-org/enso/pull/11673/files#r1871128327). --- CHANGELOG.md | 2 + .../0.0.0-dev/src/Data/Index_Sub_Range.enso | 10 +- .../Base/0.0.0-dev/src/Data/Range.enso | 5 +- .../0.0.0-dev/src/Data/Time/Date_Range.enso | 5 +- .../src/Internal/Array_Like_Helpers.enso | 2 +- .../Test/0.0.0-dev/src/Extensions.enso | 105 ++++++++------ .../0.0.0-dev/src/Extensions_Helpers.enso | 14 ++ docs/types/errors.md | 134 ++++++++++++------ .../DataflowErrorPropagationTest.java | 86 +++++++++++ .../node/callable/function/BlockNode.java | 19 ++- test/Base_Tests/src/Data/Decimal_Spec.enso | 4 +- test/Base_Tests/src/Data/Function_Spec.enso | 1 - test/Base_Tests/src/Data/Range_Spec.enso | 32 +++-- .../src/Data/Time/Date_Range_Spec.enso | 8 +- test/Base_Tests/src/Random_Spec.enso | 1 - test/Base_Tests/src/System/File_Spec.enso | 2 +- .../Cross_Tab_Spec.enso | 1 - .../Common_Table_Operations/Nothing_Spec.enso | 3 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 27 ++-- .../Table_Tests/src/Database/Upload_Spec.enso | 2 +- test/Table_Tests/src/IO/Fetch_Spec.enso | 16 ++- .../src/In_Memory/Split_Tokenize_Spec.enso | 11 +- test/Table_Tests/src/Util.enso | 18 ++- test/Table_Tests/src/Util_Spec.enso | 66 ++++++++- test/Test_Tests/package.yaml | 7 + test/Test_Tests/src/Extensions_Spec.enso | 86 +++++++++++ test/Test_Tests/src/Helpers.enso | 17 +++ test/Test_Tests/src/Main.enso | 13 ++ 28 files changed, 540 insertions(+), 157 deletions(-) create mode 100644 distribution/lib/Standard/Test/0.0.0-dev/src/Extensions_Helpers.enso create mode 100644 engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/semantic/DataflowErrorPropagationTest.java create mode 100644 test/Test_Tests/package.yaml create mode 100644 test/Test_Tests/src/Extensions_Spec.enso create mode 100644 test/Test_Tests/src/Helpers.enso create mode 100644 test/Test_Tests/src/Main.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index b1e53b92c712..1d4db090dd76 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,11 +12,13 @@ #### Enso Language & Runtime +- [Promote broken values instead of ignoring them][11777]. - [Intersection types & type checks][11600] - A constructor or type definition with a single inline argument definition was previously allowed to use spaces in the argument definition without parentheses. [This is now a syntax error.][11856] +[11777]: https://github.com/enso-org/enso/pull/11777 [11600]: https://github.com/enso-org/enso/pull/11600 [11856]: https://github.com/enso-org/enso/pull/11856 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso index 95dc0af8ac17..360497b52657 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso @@ -204,9 +204,8 @@ take_helper length at single_slice slice_ranges range:(Index_Sub_Range | Range | Index_Sub_Range.First count -> single_slice 0 (length.min count) Index_Sub_Range.Last count -> single_slice length-count length Index_Sub_Range.While predicate -> - end = 0.up_to length . find i-> (predicate (at i)).not - true_end = if end.is_nothing then length else end - single_slice 0 true_end + end = 0.up_to length . find (i-> (predicate (at i)).not) if_missing=length + single_slice 0 end Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds, Illegal_Argument] <| indices = case one_or_many_descriptors of _ : Vector -> one_or_many_descriptors @@ -255,9 +254,8 @@ drop_helper length at single_slice slice_ranges range:(Index_Sub_Range | Range | Index_Sub_Range.First count -> single_slice count length Index_Sub_Range.Last count -> single_slice 0 length-count Index_Sub_Range.While predicate -> - end = 0.up_to length . find i-> (predicate (at i)).not - true_end = if end.is_nothing then length else end - single_slice true_end length + end = 0.up_to length . find (i-> (predicate (at i)).not) if_missing=length + single_slice end length Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds, Illegal_Argument] <| indices = case one_or_many_descriptors of _ : Vector -> one_or_many_descriptors diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso index 8c588cb434cd..2064d7927fdb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso @@ -7,6 +7,7 @@ import project.Data.Text.Text import project.Data.Vector.Vector import project.Error.Error import project.Errors.Common.Index_Out_Of_Bounds +import project.Errors.Common.Not_Found import project.Errors.Empty_Error.Empty_Error import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Illegal_State.Illegal_State @@ -397,7 +398,7 @@ type Range @condition range_default_filter_condition_widget any : (Filter_Condition | (Integer -> Boolean)) -> Boolean any self (condition : Filter_Condition | (Integer -> Boolean)) = - self.find condition . is_nothing . not + self.find condition if_missing=Nothing . is_nothing . not ## GROUP Selections ICON find @@ -422,7 +423,7 @@ type Range 1.up_to 100 . find (..Greater than=10) @condition range_default_filter_condition_widget find : (Filter_Condition | (Integer -> Boolean)) -> Integer -> Any -> Any - find self (condition : Filter_Condition | (Integer -> Boolean)) (start : Integer = 0) ~if_missing=Nothing = + find self (condition : Filter_Condition | (Integer -> Boolean)) (start : Integer = 0) ~if_missing=(Error.throw Not_Found) = predicate = unify_condition_or_predicate condition check_start_valid start self used_start-> result = find_internal self used_start predicate diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso index e51d9f12062e..3f0d9077dfba 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso @@ -11,6 +11,7 @@ import project.Data.Time.Period.Period import project.Data.Vector.Vector import project.Error.Error import project.Errors.Common.Index_Out_Of_Bounds +import project.Errors.Common.Not_Found import project.Errors.Empty_Error.Empty_Error import project.Errors.Illegal_Argument.Illegal_Argument import project.Function.Function @@ -418,7 +419,7 @@ type Date_Range @condition date_range_default_filter_condition_widget any : (Filter_Condition | (Date -> Boolean)) -> Boolean any self (condition : Filter_Condition | (Date -> Boolean)) = - self.find condition . is_nothing . not + self.find condition if_missing=Nothing . is_nothing . not ## GROUP Selections ICON find @@ -438,7 +439,7 @@ type Date_Range (Date.new 2020 10 01).up_to (Date.new 2020 10 31) . find (d-> d.day_of_week == Day_Of_Week.Monday) @condition date_range_default_filter_condition_widget find : (Filter_Condition | (Date -> Boolean)) -> Integer -> Any -> Any - find self (condition : Filter_Condition | (Date -> Boolean)) (start : Integer = 0) ~if_missing=Nothing = + find self (condition : Filter_Condition | (Date -> Boolean)) (start : Integer = 0) ~if_missing=(Error.throw Not_Found) = predicate = unify_condition_or_predicate condition index = self.index_of predicate start case index of diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso index 131d1de9fd9f..b2351bcde3b7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso @@ -216,7 +216,7 @@ find vector condition start ~if_missing = predicate = unify_condition_or_predicate condition self_len = vector.length check_start_valid start self_len used_start-> - found = used_start.up_to self_len . find (idx -> (predicate (vector.at idx))) + found = used_start.up_to self_len . find (idx -> (predicate (vector.at idx))) if_missing=Nothing if found.is_nothing then if_missing else vector.at found transpose vec_of_vecs = diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso index 46741a56a5c0..ff852368a507 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso @@ -1,9 +1,11 @@ from Standard.Base import all +import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.No_Such_Method import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import project.Spec_Result.Spec_Result import project.Test.Test +from project.Extensions_Helpers import rhs_error_check ## Expect a function to fail with the provided dataflow error. @@ -70,23 +72,25 @@ Error.should_fail_with self matcher frames_to_skip=0 unwrap_errors=True = example_should_equal = Examples.add_1_to 1 . should_equal 2 Any.should_equal : Any -> Integer -> Spec_Result -Any.should_equal self that frames_to_skip=0 = case self == that of - True -> Spec_Result.Success - False -> - loc = Meta.get_source_location 2+frames_to_skip - additional_comment = case self of - _ : Vector -> case that of - _ : Vector -> - case self.length == that.length of - True -> - diff = self.zip that . index_of p-> - p.first != p.second - "; first difference at index " + diff.to_text + " " - False -> "; lengths differ (" + self.length.to_text + " != " + that.length.to_text + ") " +Any.should_equal self that frames_to_skip=0 = + rhs_error_check that + loc = Meta.get_source_location 1+frames_to_skip + case self == that of + True -> Spec_Result.Success + False -> + additional_comment = case self of + _ : Vector -> case that of + _ : Vector -> + case self.length == that.length of + True -> + diff = self.zip that . index_of p-> + p.first != p.second + "; first difference at index " + diff.to_text + " " + False -> "; lengths differ (" + self.length.to_text + " != " + that.length.to_text + ") " + _ -> "" _ -> "" - _ -> "" - msg = self.pretty + " did not equal " + that.pretty + additional_comment + " (at " + loc + ")." - Test.fail msg + msg = self.pretty + " did not equal " + that.pretty + additional_comment + " (at " + loc + ")." + Test.fail msg ## Asserts that `self` value is equal to the expected type value. @@ -130,12 +134,13 @@ Error.should_equal_type self that frames_to_skip=0 = example_should_not_equal = Examples.add_1_to 1 . should_not_equal 2 Any.should_not_equal : Any -> Integer -> Spec_Result -Any.should_not_equal self that frames_to_skip=0 = case self != that of - True -> Spec_Result.Success - False -> - loc = Meta.get_source_location 2+frames_to_skip - msg = self.to_text + " did equal " + that.to_text + " (at " + loc + ")." - Test.fail msg +Any.should_not_equal self that frames_to_skip=0 = if that.is_error then (Panic.throw (Illegal_Argument.Error "Expected value provided as `that` for `should_not_equal` cannot be an error, but got: "+that.to_display_text)) else + loc = Meta.get_source_location 2+frames_to_skip + case self != that of + True -> Spec_Result.Success + False -> + msg = self.to_text + " did equal " + that.to_text + " (at " + loc + ")." + Test.fail msg ## Added so that dataflow errors are not silently lost. Error.should_not_equal self that frames_to_skip=0 = @@ -183,15 +188,16 @@ Error.should_not_equal_type self that frames_to_skip=0 = example_should_start_with = "Hello World!" . should_start_with "Hello" Any.should_start_with : Text -> Integer -> Spec_Result -Any.should_start_with self that frames_to_skip=0 = case self of - _ : Text -> if self.starts_with that then Spec_Result.Success else - loc = Meta.get_source_location 3+frames_to_skip - msg = self.to_text + " does not start with " + that.to_text + " (at " + loc + ")." - Test.fail msg - _ -> - loc = Meta.get_source_location 2+frames_to_skip - msg = self.to_text + " is not a `Text` value (at " + loc + ")." - Test.fail msg +Any.should_start_with self that frames_to_skip=0 = + rhs_error_check that + loc = Meta.get_source_location 1+frames_to_skip + case self of + _ : Text -> if self.starts_with that then Spec_Result.Success else + msg = self.to_text + " does not start with " + that.to_text + " (at " + loc + ")." + Test.fail msg + _ -> + msg = self.to_text + " is not a `Text` value (at " + loc + ")." + Test.fail msg ## Asserts that `self` value is a Text value and ends with `that`. @@ -207,15 +213,16 @@ Any.should_start_with self that frames_to_skip=0 = case self of example_should_end_with = "Hello World!" . should_end_with "ld!" Any.should_end_with : Text -> Integer -> Spec_Result -Any.should_end_with self that frames_to_skip=0 = case self of - _ : Text -> if self.ends_with that then Spec_Result.Success else - loc = Meta.get_source_location 3+frames_to_skip - msg = self.to_text + " does not end with " + that.to_text + " (at " + loc + ")." - Test.fail msg - _ -> - loc = Meta.get_source_location 2+frames_to_skip - msg = self.to_text + " is not a `Text` value (at " + loc + ")." - Test.fail msg +Any.should_end_with self that frames_to_skip=0 = + rhs_error_check that + loc = Meta.get_source_location 1+frames_to_skip + case self of + _ : Text -> if self.ends_with that then Spec_Result.Success else + msg = self.to_text + " does not end with " + that.to_text + " (at " + loc + ")." + Test.fail msg + _ -> + msg = self.to_text + " is not a `Text` value (at " + loc + ")." + Test.fail msg ## Asserts that `self` value is a Text value and starts with `that`. @@ -267,7 +274,7 @@ Error.should_end_with self that frames_to_skip=0 = example_should_equal = Examples.add_1_to 1 . should_equal 2 Error.should_equal : Any -> Integer -> Spec_Result Error.should_equal self that frames_to_skip=0 = - _ = [that] + rhs_error_check that Test.fail_match_on_unexpected_error self 1+frames_to_skip ## Asserts that `self` is within `epsilon` from `that`. @@ -294,13 +301,18 @@ Error.should_equal self that frames_to_skip=0 = 1.00000001 . should_equal 1.00000002 epsilon=0.0001 Number.should_equal : Float -> Float -> Integer -> Spec_Result Number.should_equal self that epsilon=0 frames_to_skip=0 = + rhs_error_check that + loc = Meta.get_source_location 1+frames_to_skip matches = case that of - n : Number -> self.equals n epsilon + n : Number -> self.equals n epsilon . catch Incomparable_Values _-> + ## Incomparable_Values is thrown if one of the values is NaN. + We fallback to is_same_object_as, + because in tests we actually NaN.should_equal NaN to succeed. + self.is_same_object_as n _ -> self==that case matches of True -> Spec_Result.Success False -> - loc = Meta.get_source_location 2+frames_to_skip msg = self.to_text + " did not equal " + that.to_text + " (at " + loc + ")." Test.fail msg @@ -313,6 +325,7 @@ Number.should_equal self that epsilon=0 frames_to_skip=0 = displayed as the source of this error. Decimal.should_equal : Number -> Float-> Float -> Integer -> Spec_Result Decimal.should_equal self that epsilon=0 frames_to_skip=0 = + rhs_error_check that self.to_float . should_equal that.to_float epsilon frames_to_skip+1 ## Asserts that `self` value is not an error. @@ -423,6 +436,7 @@ Error.should_be_false self = Test.fail_match_on_unexpected_error self 1 example_should_be_a = 1.should_be_a Boolean Any.should_be_a : Any -> Spec_Result Any.should_be_a self typ = + rhs_error_check typ loc = Meta.get_source_location 1 fail_on_wrong_arg_type = Panic.throw <| @@ -490,6 +504,8 @@ Any.should_be_a self typ = Any.should_equal_ignoring_order : Any -> Integer -> Spec_Result Any.should_equal_ignoring_order self that frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip + if that.is_a Vector . not then + Panic.throw (Illegal_Argument.Error "Expected a Vector, but got a "+that.to_display_text+" (at "+loc+").") that.each element-> if self.contains element . not then msg = "The collection (" + self.to_text + ") did not contain " + element.to_text + " (at " + loc + ")." @@ -556,6 +572,7 @@ Error.should_equal_ignoring_order self that frames_to_skip=0 = example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] Any.should_only_contain_elements_in : Any -> Integer -> Spec_Result Any.should_only_contain_elements_in self that frames_to_skip=0 = + rhs_error_check that loc = Meta.get_source_location 1+frames_to_skip self.each element-> if that.contains element . not then @@ -609,6 +626,7 @@ Error.should_only_contain_elements_in self that frames_to_skip=0 = example_should_equal = "foobar".should_contain "foo" Any.should_contain : Any -> Integer -> Spec_Result Any.should_contain self element frames_to_skip=0 = + rhs_error_check element loc = Meta.get_source_location 1+frames_to_skip contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else @@ -652,6 +670,7 @@ Error.should_contain self element frames_to_skip=0 = implementing a method `contains : a -> Boolean`. Any.should_not_contain : Any -> Integer -> Spec_Result Any.should_not_contain self element frames_to_skip=0 = + rhs_error_check element loc = Meta.get_source_location 1+frames_to_skip contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic-> if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions_Helpers.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions_Helpers.enso new file mode 100644 index 000000000000..73f4437ef8bf --- /dev/null +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions_Helpers.enso @@ -0,0 +1,14 @@ +private + +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +## PRIVATE + A helper that ensures that the expected value provided in some of the Test + operations is not an error. + The left-hand side may be an error and that will cause a test failure. + But the right-hand side being an error is bad test design and should be fixed. +rhs_error_check that = + if that.is_error then + msg = "Dataflow error ("+that.to_display_text+") provided as expected value. Use `should_fail_with` or change the test."+ ' Error stack trace was:\n'+that.get_stack_trace_text + Panic.throw (Illegal_Argument.Error msg) diff --git a/docs/types/errors.md b/docs/types/errors.md index 653645734301..8d0aeb4f3187 100644 --- a/docs/types/errors.md +++ b/docs/types/errors.md @@ -1,15 +1,17 @@ --- layout: developer-doc -title: Errors +title: Errors & Panics category: types tags: [types, errors] order: 12 --- -# Errors +# Errors & Panics -Enso supports two notions of errors. One is the standard exceptions model, while -the other is a theory of 'broken values' that propagate through computations. +Enso supports two notions of errors. One is the standard exceptions model (built +around `Panic.throw` and related methods), while the other is a theory of +_broken values_ that propagate through computations (represented by `Error` and +created by `Error.throw` method). > [!WARNING] The actionables for this section are: > @@ -19,65 +21,109 @@ the other is a theory of 'broken values' that propagate through computations. -- [Async Exceptions](#async-exceptions) +- [Exceptions/Panics](#errors--panics) - [Broken Values](#broken-values) -## Async Exceptions +## Exceptions/Panics > [!WARNING] The actionables for this section are: > -> - why is this called _"asynchronous"_ when the `Panic` is raised -> synchronously? -> - Formalise the model of async exceptions as implemented. +> - Formalise the model of `Panic.throw` as implemented. ## Broken Values -In Enso we have the notion of a 'broken' value: one which is in an invalid state -but not an error. While these may initially seem a touch useless, they are -actually key for the display of errors in the GUI. +In Enso we have the notion of a _broken value_: one which is in an invalid +state. Such values are very useful for displaying errors in the GUI. -Broken values can be thought of like checked monadic exceptions in Haskell, but -with an automatic propagation mechanism: +Broken values are fast to allocate and pass around the program. They record line +of their own creation - e.g. where `Error.throw` has happened. Shall that not be +enough, one can run with `-ea` flag, like: -- Broken values that aren't handled explicitly are automatically promoted - through the parent scope. This is trivial inference as no evidence discharge - will have occurred on the value. +```bash +enso$ JAVA_OPTS=-ea ./built-distribution/enso-engine-*/enso-*/bin/enso --run x.enso +``` - ```ruby - open : String -> String in IO ! IO.Exception - open = ... +to get full stack where the _broken value_ has been created. Collecting such +full stack trace however prevents the execution to run at _full speed_. - test = - print 'Opening the gates!' - txt = open 'gates.txt' - print 'Gates were opened!' - 7 - ``` +### Promotion of Broken Values - In the above example, the type of test is inferred to - `test : Int in IO ! IO.Exception`, because no evidence discharge has taken - place as the potential broken value hasn't been handled. +Broken values that aren't handled explicitly are automatically promoted through +the parent scope. Let's assume an `open` function that can yield a `Text` or +_broken value_ representing a `File_Error`: -- This allows for very natural error handling in the GUI. +```ruby +open file_name:Text -> Text ! File_Error = ... +``` -> [!WARNING] The actionables for this section are: -> -> - Determine what kinds of APIs we want to use async exceptions for, and which -> broken values are more suited for. -> - Ensure that we are okay with initially designing everything around async -> exceptions as broken values are very hard to support without a type checker. -> - Initially not supported for APIs. +Then imagine following `test` function trying to open a non-existing file +`gates.txt` + +```ruby +test = + IO.println 'Opening the gates!' + open 'gates.txt' + IO.println 'Gates were opened!' +``` + +Execution of such function will: + +- print `Opening the gates!` text +- finish with `File_Error` _broken value_ +- **not print** `Gates were opened!` + +E.g. the execution of a function body ends after first _uhandled broken value_. + +### Propagation of Broken Values -Broken values (implemented as `DataflowError` class in the interpreter) are fast -to allocate and pass around the program. They record line of their own -creation - e.g. where `Error.throw` has happened. Shall that not be enough, one -can run with `-ea` flag, like: +Let's modify the previous example a bit. Let's assign the read text (or _broken +value_) to a variable and return it from the `test` function: + +```ruby +test = + IO.println 'Opening the gates!' + content = open 'gates.txt' + IO.println 'Gates were opened!' + content +``` + +If the `gates.txt` file exists, its content is returned from the `test` +function. If a `File_Error` _broken value_ is returned from the `open` function, +then the variable `content` will contain such a _broken value_ and as `content` +is the return value from the `test` function, the `File_Error` will be returned +from the `test` function and propagated further as a _broken value_. + +In both situations (if the file exists or not) both `IO.println` statements are +executed and the execution of `test` function thus prints both +`Opening the gates!` as well as `Gates were opened!`. + +### Detection of Unused Broken Values + +Should the last statement (e.g. `content`) of the `test` function defined in +previous section be missing, then the _broken value_ assigned to `content` +variable might _"disappear"_ unnoticed. However in such a situation the Enso +compiler emits a _compile time warning_: ```bash -enso$ JAVA_OPTS=-ea ./built-distribution/enso-engine-*/enso-*/bin/enso --run x.enso +test.enso:3:3: warning: Unused variable content. + 3 | content = open 'gates.txt' + | ^~~~~~~ ``` -to get full stack where the _broken value_ has been created. Collecting such -full stack trace however prevents the execution to run at _full speed_. +The combination of _detection_, _propagation_ and _promotion_ of _broken values_ +ensures `File_Error` and other _broken values_ are **never lost** +(unintentionally). Should _loosing a broken value_ be a goal, one can change the +line in question to: + +```ruby + _ = open 'gates.txt' +``` + +e.g. assign it to anonymous variable. That signals to the system one doesn't +care about the result of the `open` function call. No _compiler warning_ is thus +reported and the _broken value_ gets lost during execution. + +To handle _broken values_ properly and recover from such an errorneous state, +use methods offered by `Standard.Base.Error` type like `catch`. diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/semantic/DataflowErrorPropagationTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/semantic/DataflowErrorPropagationTest.java new file mode 100644 index 000000000000..f5facc9d72d1 --- /dev/null +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/semantic/DataflowErrorPropagationTest.java @@ -0,0 +1,86 @@ +package org.enso.interpreter.test.semantic; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.enso.common.MethodNames; +import org.enso.test.utils.ContextUtils; +import org.graalvm.polyglot.Context; +import org.graalvm.polyglot.PolyglotException; +import org.graalvm.polyglot.Value; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class DataflowErrorPropagationTest { + private static Context ctx; + private static Value suppressError; + private static Value suppressErrorWithAssign; + + @BeforeClass + public static void prepareCtx() { + ctx = ContextUtils.createDefaultContext(); + var code = + """ + from Standard.Base import all + + private yield_error yes:Boolean -> Text = + if yes then Error.throw "Yielding an error" else + "OK" + + suppress_error yes:Boolean value = + yield_error yes + value + + suppress_error_with_assign yes:Boolean value = + _ = yield_error yes + value + """; + suppressError = + ctx.eval("enso", code).invokeMember(MethodNames.Module.EVAL_EXPRESSION, "suppress_error"); + suppressErrorWithAssign = + ctx.eval("enso", code) + .invokeMember(MethodNames.Module.EVAL_EXPRESSION, "suppress_error_with_assign"); + } + + @AfterClass + public static void disposeCtx() { + ctx.close(); + ctx = null; + } + + @Test + public void noErrorReturnValue() { + var value = suppressError.execute(false, 42); + assertTrue("It is a number", value.isNumber()); + assertEquals(42, value.asInt()); + } + + @Test + public void propagateErrorImmediatelly() { + var value = suppressError.execute(true, 42); + assertFalse("It is not a number", value.isNumber()); + assertTrue("It is an error", value.isException()); + try { + throw value.throwException(); + } catch (PolyglotException ex) { + assertEquals("Yielding an error", ex.getMessage()); + } + } + + @Test + public void noErrorReturnValueWithAssign() { + var value = suppressErrorWithAssign.execute(false, 42); + assertTrue("It is a number", value.isNumber()); + assertEquals(42, value.asInt()); + } + + @Test + public void errorIsAssignedAndThatIsEnoughReturnValue() { + var value = suppressErrorWithAssign.execute(true, 42); + assertTrue("It is a number", value.isNumber()); + assertFalse("Not an error", value.isException()); + assertEquals(42, value.asInt()); + } +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/function/BlockNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/function/BlockNode.java index 994b68b34149..41d51a54a9ee 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/function/BlockNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/function/BlockNode.java @@ -6,9 +6,12 @@ import com.oracle.truffle.api.instrumentation.Tag; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.NodeInfo; +import com.oracle.truffle.api.profiles.BranchProfile; import com.oracle.truffle.api.source.SourceSection; import java.util.Set; import org.enso.interpreter.node.ExpressionNode; +import org.enso.interpreter.runtime.EnsoContext; +import org.enso.interpreter.runtime.error.DataflowError; /** * This node defines the body of a function for execution, as well as the protocol for executing the @@ -16,11 +19,17 @@ */ @NodeInfo(shortName = "Block") public class BlockNode extends ExpressionNode { + private final BranchProfile unexpectedReturnValue; @Children private final ExpressionNode[] statements; @Child private ExpressionNode returnExpr; private BlockNode(ExpressionNode[] expressions, ExpressionNode returnExpr) { this.statements = expressions; + if (expressions.length > 0) { + this.unexpectedReturnValue = BranchProfile.create(); + } else { + this.unexpectedReturnValue = BranchProfile.getUncached(); + } this.returnExpr = returnExpr; } @@ -55,8 +64,16 @@ public static BlockNode buildSilent(ExpressionNode[] expressions, ExpressionNode @Override @ExplodeLoop public Object executeGeneric(VirtualFrame frame) { + var ctx = EnsoContext.get(this); + var nothing = ctx.getBuiltins().nothing(); for (ExpressionNode statement : statements) { - statement.executeGeneric(frame); + var result = statement.executeGeneric(frame); + if (result != nothing) { + unexpectedReturnValue.enter(); + if (result instanceof DataflowError err) { + return err; + } + } } return returnExpr.executeGeneric(frame); } diff --git a/test/Base_Tests/src/Data/Decimal_Spec.enso b/test/Base_Tests/src/Data/Decimal_Spec.enso index 3465ce9cd85a..98a9d096b215 100644 --- a/test/Base_Tests/src/Data/Decimal_Spec.enso +++ b/test/Base_Tests/src/Data/Decimal_Spec.enso @@ -484,10 +484,10 @@ add_specs suite_builder = (Decimal.from_integer -29388920982834 . subtract (Decimal.from_integer 842820) (Math_Context.new 7)) . should_equal (Decimal.from_integer -29388920000000) (Decimal.new "-8273762787.3535345" . subtract (Decimal.new "76287273.23434535") (Math_Context.new 10)) . should_equal (Decimal.new "-8350050061") - (Decimal.from_integer 7297927982888383 . multiply (Decimal.from_integer 828737) (Math_Context.new 6)) . should_equal (Decimal.from_integer 6048060000000000000000 ) + (Decimal.from_integer 7297927982888383 . multiply (Decimal.from_integer 828737) (Math_Context.new 6)) . should_equal (Decimal.from_integer 6048060000000000000000) (Decimal.new "893872388.3535345" . multiply (Decimal.new "72374727737.23434535") (Math_Context.new 14)) . should_equal (Decimal.new "64693770738918000000") - (Decimal.new "909678645268840" . divide (Decimal.new "28029830") (Math_Context.new 6)) . should_equal (Decimal.new "32453900 ") + (Decimal.new "909678645268840" . divide (Decimal.new "28029830") (Math_Context.new 6)) . should_equal (Decimal.new "32453900") (Decimal.new "384456406.7860325392609633764" . divide (Decimal.new "24556.125563546") (Math_Context.new 7)) . should_equal (Decimal.new "15656.23") (Decimal.from_integer 3948539458034580838458034803485 . add (Decimal.from_integer 237957498573948579387495837459837) (Math_Context.new 20)) . should_equal (Decimal.from_integer 241906038031983160230000000000000) diff --git a/test/Base_Tests/src/Data/Function_Spec.enso b/test/Base_Tests/src/Data/Function_Spec.enso index b0cd9099e11a..c808ce77d9c8 100644 --- a/test/Base_Tests/src/Data/Function_Spec.enso +++ b/test/Base_Tests/src/Data/Function_Spec.enso @@ -90,4 +90,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Base_Tests/src/Data/Range_Spec.enso b/test/Base_Tests/src/Data/Range_Spec.enso index a80a5cd52a0f..5bb3ecc8446f 100644 --- a/test/Base_Tests/src/Data/Range_Spec.enso +++ b/test/Base_Tests/src/Data/Range_Spec.enso @@ -3,6 +3,7 @@ import Standard.Base.Data.Vector.Builder import Standard.Base.Errors.Empty_Error.Empty_Error import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Common.No_Such_Method +import Standard.Base.Errors.Common.Not_Found import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Common.Unsupported_Argument_Types import Standard.Base.Errors.Illegal_Argument.Illegal_Argument @@ -250,9 +251,10 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> group_builder.specify "should find elements" <| 1.up_to 10 . find (> 5) . should_equal 6 1.up_to 10 . find (..Greater 5) . should_equal 6 - 1.up_to 10 . find (> 10) . should_be_a Nothing + 1.up_to 10 . find (> 10) . should_fail_with Not_Found 1.up_to 10 . find (v-> v%4 == 0) start=6 . should_equal 8 - 1.up_to 10 . find (< 5) start=6 . should_be_a Nothing + 1.up_to 10 . find (< 5) start=6 if_missing=Nothing . should_be_a Nothing + 1.up_to 10 . find (< 5) start=6 . should_fail_with Not_Found 1.up_to 10 . find (< 5) start=10 . should_fail_with Index_Out_Of_Bounds 1.up_to 10 . find (< 5) start=10 . catch . should_equal (Index_Out_Of_Bounds.Error 10 10) Test.expect_panic_with (1.up_to 10 . find "invalid arg") Type_Error @@ -343,7 +345,8 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r.fold 0 (+) . should_equal 0 r.any _->True . should_equal False r.all _->False . should_equal True - r.find _->True . should_equal Nothing + r.find _->True if_missing=Nothing . should_equal Nothing + r.find _->True . should_fail_with Not_Found verify_contains r [] [-1, 0, 1, 2, 10] check_empty_range (0.up_to 0) @@ -370,7 +373,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r1.all (_ == 10) . should_equal True r1.all (_ == 11) . should_equal False r1.find (x-> x*x == 100) . should_equal 10 - r1.find (x-> x*x == 25) . should_equal Nothing + r1.find (x-> x*x == 25) if_missing=Nothing . should_equal Nothing verify_contains r1 [10] [-1, 0, 1, 2, 9, 11, 12] group_builder.specify "should behave correctly with step greater than 1" <| @@ -387,7 +390,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r1.all (x-> x % 2 == 0) . should_equal True r1.all (_ == 2) . should_equal False r1.find (x-> x*x == 16) . should_equal 4 - r1.find (x-> x*x == 25) . should_equal Nothing + r1.find (x-> x*x == 25) if_missing=Nothing . should_equal Nothing verify_contains r1 [0, 2, 4, 6, 8] [-3, -2, -1, 1, 3, 5, 7, 11, 12, 13, 14] r2 = Range.Between 0 3 2 @@ -402,7 +405,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r2.any (_ == 3) . should_equal False r2.all (x-> x % 2 == 0) . should_equal True r2.all (_ == 2) . should_equal False - r2.find (x-> x*x == 16) . should_equal Nothing + r2.find (x-> x*x == 16) . should_fail_with Not_Found r2.find (x-> x*x == 4) . should_equal 2 verify_contains r2 [0, 2] [-3, -2, -1, 1, 3, 4, 5] @@ -418,7 +421,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r3.any (_ == 3) . should_equal False r3.all (_ == 5) . should_equal True r3.all (_ == 3) . should_equal False - r3.find (x-> x*x == 16) . should_equal Nothing + r3.find (x-> x*x == 16) . should_fail_with Not_Found r3.find (x-> x*x == 25) . should_equal 5 verify_contains r3 [5] [0, 1, 4, 6, 7, 10] @@ -435,7 +438,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r4.all (x-> x % 2 == 1) . should_equal True r4.all (_ == 5) . should_equal False r4.find (x-> x*x == 25) . should_equal 5 - r4.find (x-> x*x == 4) . should_equal Nothing + r4.find (x-> x*x == 4) if_missing=Nothing . should_equal Nothing verify_contains r4 [5, 7] [0, 1, 4, 6, 8, 10] r5 = Range.Between 5 7 2 @@ -451,7 +454,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r5.all (x-> x % 2 == 1) . should_equal True r5.all (_ == 5) . should_equal True r5.find (x-> x*x == 25) . should_equal 5 - r5.find (x-> x*x == 4) . should_equal Nothing + r5.find (x-> x*x == 4) if_missing=Nothing . should_equal Nothing verify_contains r5 [5] [0, 1, 4, 6, 7, 10] r6 = Range.Between 0 10 3 @@ -467,7 +470,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r6.all (x-> x % 2 == 0) . should_equal False r6.all (x-> x % 3 == 0) . should_equal True r6.find (x-> x*x == 9) . should_equal 3 - r6.find (x-> x*x == 25) . should_equal Nothing + r6.find (x-> x*x == 25) if_missing=Nothing . should_equal Nothing r6.filter (_ < 4) . should_equal [0, 3] verify_contains r6 [0, 3, 6, 9] [-3, -2, -1, 1, 2, 4, 5, 7, 8, 10, 11] @@ -485,7 +488,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r1.all (x-> x % 2 == 0) . should_equal False r1.all (_ > 0) . should_equal True r1.find (x-> x*x == 16) . should_equal 4 - r1.find (x-> x*x == 0) . should_equal Nothing + r1.find (x-> x*x == 0) if_missing=Nothing . should_equal Nothing verify_contains r1 [4, 3, 2, 1] [-2, -1, 0, 5, 6, 7, 10] r2 = Range.Between 4 0 -2 @@ -501,7 +504,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r2.all (x-> x % 2 == 0) . should_equal True r2.all (_ > 2) . should_equal False r2.find (x-> x*x == 16) . should_equal 4 - r2.find (x-> x*x == 0) . should_equal Nothing + r2.find (x-> x*x == 0) . should_fail_with Not_Found verify_contains r2 [4, 2] [-2, -1, 0, 1, 3, 5, 6, 7, 10] r3 = Range.Between 4 0 -10 @@ -517,7 +520,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r3.all (x-> x % 2 == 0) . should_equal True r3.all (_ > 4) . should_equal False r3.find (x-> x*x == 16) . should_equal 4 - r3.find (x-> x*x == 0) . should_equal Nothing + r3.find (x-> x*x == 0) . should_fail_with Not_Found verify_contains r3 [4] [-2, -1, 0, 1, 2, 3, 5, 6, 7, 10] r4 = Range.Between 3 0 -3 @@ -533,7 +536,7 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> r4.all (x-> x % 2 == 0) . should_equal False r4.all (_ > 0) . should_equal True r4.find (x-> x*x == 9) . should_equal 3 - r4.find (x-> x*x == 0) . should_equal Nothing + r4.find (x-> x*x == 0) . should_fail_with Not_Found verify_contains r4 [3] [-3, -2, -1, 0, 1, 2, 4, 5, 6, 7, 10] group_builder.specify "should report errors if trying to set step to 0" <| @@ -582,4 +585,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso index b2b469ae1c61..9d74f3604c7b 100644 --- a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Errors.Common.Not_Found import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Empty_Error.Empty_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument @@ -157,7 +158,7 @@ add_specs suite_builder = r.partition p . should_equal (r.to_vector.partition p) r.all p . should_equal (r.to_vector.all p) r.any p . should_equal (r.to_vector.any p) - r.find p . should_equal (r.to_vector.find p) + r.find p if_missing="not found" . should_equal (r.to_vector.find p if_missing="not found") r.index_of p . should_equal (r.to_vector.index_of p) r.last_index_of p . should_equal (r.to_vector.last_index_of p) count_mondays acc date = @@ -170,7 +171,7 @@ add_specs suite_builder = r.partition fc . should_equal (r.to_vector.partition fc) r.all fc . should_equal (r.to_vector.all fc) r.any fc . should_equal (r.to_vector.any fc) - r.find fc . should_equal (r.to_vector.find fc) + r.find fc if_missing="not found" . should_equal (r.to_vector.find fc if_missing="not found") r.index_of fc . should_equal (r.to_vector.index_of fc) r.last_index_of fc . should_equal (r.to_vector.last_index_of fc) @@ -182,6 +183,9 @@ add_specs suite_builder = Test.expect_panic_with (r.index_of invalid_arg) Type_Error Test.expect_panic_with (r.last_index_of invalid_arg) Type_Error + # If `if_missing` is not provided, it defaults to `Not_Found` dataflow error + r.find (== 123) . should_fail_with Not_Found + reducer x y = if x > y then x else y case r.length of diff --git a/test/Base_Tests/src/Random_Spec.enso b/test/Base_Tests/src/Random_Spec.enso index ecb2b6ac07cb..980dc33707a2 100644 --- a/test/Base_Tests/src/Random_Spec.enso +++ b/test/Base_Tests/src/Random_Spec.enso @@ -157,4 +157,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Base_Tests/src/System/File_Spec.enso b/test/Base_Tests/src/System/File_Spec.enso index 4f56688efa4f..826771ed3de6 100644 --- a/test/Base_Tests/src/System/File_Spec.enso +++ b/test/Base_Tests/src/System/File_Spec.enso @@ -391,7 +391,7 @@ add_specs suite_builder = subdir.should_succeed cleanup = Enso_User.flush_caches - subdir.delete + subdir.delete recursive=True Panic.with_finalizer cleanup <| Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_PROJECT_DIRECTORY_PATH" subdir.path <| # Flush caches to ensure fresh dir is used diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index c0d17b70a245..ad79702d5c8b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -252,7 +252,6 @@ add_specs suite_builder setup = r1.catch.to_display_text . should_contain "cannot contain the NUL character" r2 = data.table2.cross_tab [] "Key" values=[Aggregate_Column.Average "Value" as='x\0'] - r2.print r2.should_fail_with Invalid_Column_Names r2.catch.to_display_text . should_contain "cannot contain the NUL character" diff --git a/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso index 43596f79bbd8..e929623dc265 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Nothing_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Errors.Common.Incomparable_Values from Standard.Table import all @@ -257,7 +258,7 @@ add_nothing_specs suite_builder setup = other_value = triple.at 1 value_type = triple.at 2 - is_comparable = case Ordering.compare value other_value of + is_comparable = case Ordering.compare value other_value . catch Incomparable_Values of _:Ordering -> True _ -> False diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 1c10b7d3b1ea..5ea4e8ce960b 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -3,6 +3,7 @@ import Standard.Base.Runtime.Ref.Ref from Standard.Base.Runtime import assert import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Context from Standard.Table import Table, Value_Type, Bits from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names @@ -54,8 +55,8 @@ type Metadata_Data [connection, tinfo, t] teardown self = - self.connection.drop_table self.t.name - self.connection.drop_table self.tinfo + self.connection.drop_table self.t.name if_exists=True + self.connection.drop_table self.tinfo if_exists=True self.connection.close @@ -81,9 +82,9 @@ type Tables_And_Table_Types_Data [connection, tinfo, vinfo, temporary_table] teardown self = - self.connection.drop_table self.tinfo - self.connection.drop_table self.vinfo - self.connection.drop_table self.temporary_table + self.connection.drop_table self.tinfo if_exists=True + self.connection.execute_update "DROP VIEW IF EXISTS '"+self.vinfo+"'" + self.connection.drop_table self.temporary_table if_exists=True self.connection.close @@ -377,9 +378,7 @@ type Database_File Value ~file create = Database_File.Value <| - transient_dir = enso_project.data / "transient" - assert transient_dir.exists ("The directory " + transient_dir.path + " should exist (ensured by containing a `.gitignore` file).") - f = transient_dir / "sqlite_test.db" + f = File.create_temporary_file "sqlite-test" ".db" f.delete_if_exists f @@ -388,7 +387,10 @@ create_inmem_connection = create_file_connection file = connection = Database.connect (SQLite.From_File file) - connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' + ## We need to re-enable the context because this initializer may be executed + lazily in a context where Output was disabled (e.g. Upload_Spec). + Context.Output.with_enabled <| + connection.execute_update 'CREATE TABLE IF NOT EXISTS "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)' connection type File_Connection @@ -401,10 +403,6 @@ type File_Connection assert tmp_file.exists tmp_file - teardown self = - assert self.file.exists - self.file.delete - add_specs suite_builder = in_file_prefix = "[SQLite File] " @@ -425,9 +423,6 @@ add_specs suite_builder = suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> data = File_Connection.setup database_file - group_builder.teardown <| - data.teardown - group_builder.specify "should recognise a SQLite database file" <| Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index ffaa385e9e3c..dce15a1d1fbb 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -498,7 +498,7 @@ add_specs suite_builder setup make_new_connection persistent_connector=True = copied_table = db_table.select_into_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=False copied_table.is_trivial_query . should_be_true name = copied_table.name - Panic.with_finalizer (data.connection.drop_table name) <| + Panic.with_finalizer (data.connection.drop_table name if_exists=True) <| setup.expect_integer_type <| copied_table.at "X" copied_table.at "Y" . value_type . is_text . should_be_true copied_table.at "Z" . value_type . is_floating_point . should_be_true diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index 0e27747890c4..c4b11c1a27c6 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -4,8 +4,10 @@ import Standard.Base.Errors.Common.Response_Too_Large import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy +import Standard.Base.Network.HTTP.HTTP_Error.HTTP_Error import Standard.Base.Network.HTTP.Request.Request import Standard.Base.Network.HTTP.Request_Body.Request_Body +import Standard.Base.Network.HTTP.Request_Error import Standard.Base.Network.HTTP.Response.Response import Standard.Base.Runtime.Context import Standard.Base.Runtime.Ref.Ref @@ -412,11 +414,11 @@ add_specs suite_builder = group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <| with_default_cache <| - HTTP.fetch url0 . decode_as_text + HTTP.fetch url0 . decode_as_text . should_succeed get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'crash' . decode_as_text + HTTP.fetch base_url_with_slash+'crash' . decode_as_text . should_fail_with Request_Error get_num_response_cache_entries . should_equal 1 - HTTP.fetch base_url_with_slash+'nonexistent_endpoint' . decode_as_text + HTTP.fetch base_url_with_slash+'nonexistent_endpoint' . decode_as_text . should_fail_with HTTP_Error get_num_response_cache_entries . should_equal 1 cloud_setup = Cloud_Tests_Setup.prepare @@ -437,9 +439,9 @@ add_specs suite_builder = . add_query_argument "arg1" secret2 . add_query_argument "arg2" "plain value" - HTTP.fetch url1 . decode_as_text + HTTP.fetch url1 . decode_as_text . should_succeed get_num_response_cache_entries . should_equal 1 - HTTP.fetch uri2 . decode_as_text + HTTP.fetch uri2 . decode_as_text . should_succeed get_num_response_cache_entries . should_equal 2 group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <| @@ -455,9 +457,9 @@ add_specs suite_builder = headers1 = [Header.new "A-Header" secret1] headers2 = [Header.new "A-Header" secret2] - HTTP.fetch headers=headers1 uri . decode_as_text + HTTP.fetch headers=headers1 uri . decode_as_text . should_succeed get_num_response_cache_entries . should_equal 1 - HTTP.fetch headers=headers2 uri . decode_as_text + HTTP.fetch headers=headers2 uri . decode_as_text . should_succeed get_num_response_cache_entries . should_equal 2 group_builder.specify "Does not attempt to make room for the maximum file size when that is larger than the total cache size" pending=pending_has_url <| Test.with_retries <| diff --git a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso index 2f365d7ce65c..bfdb332f3199 100644 --- a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso @@ -2,7 +2,7 @@ from Standard.Base import all import Standard.Test.Extensions -from Standard.Table import Table +from Standard.Table import Table, Value_Type from Standard.Table.Columns_To_Add import Columns_To_Add from Standard.Table.Errors import Invalid_Value_Type, Column_Count_Exceeded, Duplicate_Output_Column_Names, No_Such_Column from Standard.Test import all @@ -204,7 +204,7 @@ add_specs suite_builder = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, Nothing], [1, "c", "d", "ef", Nothing], [2, "gh", "ij", "u", Nothing]] - expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 4"] expected_rows . cast "bar 4" Value_Type.Char t2 = t.split_to_columns "bar" "b" column_count=(Columns_To_Add.First 4) t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -213,7 +213,7 @@ add_specs suite_builder = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, Nothing], [1, "c", "d", "ef", Nothing], [2, "gh", "ij", "u", Nothing]] - expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 4"] expected_rows . cast "bar 4" Value_Type.Char t2 = t.split_to_columns "bar" "b" column_count=4 t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -262,7 +262,7 @@ add_specs suite_builder = cols = [["foo", [0, 1, 2]], ["bar", ["ghbijbu", "cbdbef", "abc"]]] t = Table.new cols expected_rows = [[0, "gh", "ij", "u", Nothing], [1, "c", "d", "ef", Nothing], [2, "a", "c", Nothing, Nothing]] - expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 4"] expected_rows . cast "bar 4" Value_Type.Char t2 = t.split_to_columns "bar" "b" column_count=(Columns_To_Add.First 4) t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -271,7 +271,7 @@ add_specs suite_builder = cols = [["foo", [0, 1, 2]], ["bar", ["ghbijbu", "cbdbef", "abc"]]] t = Table.new cols expected_rows = [[0, "gh", "ij", "u", Nothing], [1, "c", "d", "ef", Nothing], [2, "a", "c", Nothing, Nothing]] - expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 4"] expected_rows . cast "bar 4" Value_Type.Char t2 = t.split_to_columns "bar" "b" column_count=4 t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -433,4 +433,3 @@ main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder suite.run_with_filter filter - diff --git a/test/Table_Tests/src/Util.enso b/test/Table_Tests/src/Util.enso index b956fe84dd66..2976f150d6f9 100644 --- a/test/Table_Tests/src/Util.enso +++ b/test/Table_Tests/src/Util.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Database.DB_Table.DB_Table import Standard.Database.DB_Column.DB_Column @@ -11,23 +12,27 @@ polyglot java import org.enso.base_test_helpers.FileSystemHelper Table.should_equal : Any -> Integer -> Any Table.should_equal self expected frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip + rhs_error_check expected Panic.catch Test_Failure_Error (table_should_equal_impl self expected loc) error-> - Test.fail error.payload.message + Test.fail error.payload.message Column.should_equal : Any -> Integer -> Any Column.should_equal self expected frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip + rhs_error_check expected Panic.catch Test_Failure_Error (column_should_equal_impl self expected loc) error-> Test.fail error.payload.message DB_Table.should_equal : DB_Table -> Integer -> Any DB_Table.should_equal self expected frames_to_skip=0 = + rhs_error_check expected t0 = self.read t1 = expected.read t0 . should_equal t1 frames_to_skip+1 DB_Column.should_equal : DB_Column -> Integer -> Any DB_Column.should_equal self expected frames_to_skip=0 = + rhs_error_check expected t0 = self.read t1 = expected.read t0 . should_equal t1 frames_to_skip+1 @@ -142,3 +147,14 @@ Error.should_have_relative_ordering self example = loc = Meta.get_source_location 1 _ = example Test.fail "Expected a vector but got a dataflow error "+self.catch.to_display_text+" (at "+loc+")." + + +## PRIVATE + A helper that ensures that the expected value provided in some of the Test + operations is not an error. + The left-hand side may be an error and that will cause a test failure. + But the right-hand side being an error is bad test design and should be fixed. +rhs_error_check that = + if that.is_error then + msg = "Dataflow error ("+that.to_display_text+") provided as expected value. Use `should_fail_with` or change the test."+ ' Error stack trace was:\n'+that.get_stack_trace_text + Panic.throw (Illegal_Argument.Error msg) diff --git a/test/Table_Tests/src/Util_Spec.enso b/test/Table_Tests/src/Util_Spec.enso index f8f02507d2a0..d51f3a6c8d9c 100644 --- a/test/Table_Tests/src/Util_Spec.enso +++ b/test/Table_Tests/src/Util_Spec.enso @@ -1,10 +1,59 @@ from Standard.Base import all + from Standard.Table import Column, Table -from project.Util import all + +from Standard.Database import all + from Standard.Test import all +from enso_dev.Test_Tests.Helpers import expect_test_failure + +from project.Util import all + add_specs suite_builder = - suite_builder.group "Column should_equal" group_builder-> + suite_builder.group "Table/Column.should_equal helpers" group_builder-> + group_builder.specify "should report correct location for Table" <| + r1 = expect_test_failure <| + (Table.new [["X", [1]]]) . should_equal (Table.new [["X", [2]]]) + r1.message.should_contain "Util_Spec.enso:17" + + r2 = expect_test_failure <| + (Table.new [["X", [1]]]) . should_equal (Table.new [["A", [1]]]) + r2.message.should_contain "Util_Spec.enso:21" + + group_builder.specify "should report correct location for Column" <| + r1 = expect_test_failure <| + Column.from_vector "X" [1] . should_equal (Column.from_vector "X" [2]) + r1.message.should_contain "Util_Spec.enso:26" + + r2 = expect_test_failure <| + Column.from_vector "X" [1] . should_equal (Column.from_vector "A" [1]) + r2.message.should_contain "Util_Spec.enso:30" + + group_builder.specify "should report correct location for DB_Table" <| + tables = DB_Tables.make + r1 = expect_test_failure <| + tables.t1 . should_equal tables.t2 + r1.message.should_contain "Util_Spec.enso:36" + + r2 = expect_test_failure <| + tables.t1 . should_equal tables.tA + r2.message.should_contain "Util_Spec.enso:40" + + group_builder.specify "should report correct location for DB_Column" <| + tables = DB_Tables.make + c1 = tables.t1.at "X" + c2 = tables.t2.at "X" + cA = tables.tA.at "A" + + r1 = expect_test_failure <| + c1 . should_equal c2 + r1.message.should_contain "Util_Spec.enso:50" + + r2 = expect_test_failure <| + c1 . should_equal cA + r2.message.should_contain "Util_Spec.enso:54" + group_builder.specify "Two Columns Are Equal" <| expected_column = Column.from_vector "Col" ["Quis", "custodiet", "ipsos", "custodes?"] actual_column = Column.from_vector "Col" ["Quis", "custodiet", "ipsos", "custodes?"] @@ -44,7 +93,7 @@ add_specs suite_builder = expected_column = Column.from_vector "Col" [1.0, 2.0, Number.nan] actual_column = Column.from_vector "Col" [1.0, 2.0, Number.nan] actual_column.should_equal expected_column - suite_builder.group "Table should_equal" group_builder-> + group_builder.specify "Two Tables Are Equal" <| expected_table = Table.new [Column.from_vector "Col1" ["Quis", "custodiet", "ipsos", "custodes?"], Column.from_vector "Col2" ["Who", "guards", "the", "guards?"]] actual_table = Table.new [Column.from_vector "Col1" ["Quis", "custodiet", "ipsos", "custodes?"], Column.from_vector "Col2" ["Who", "guards", "the", "guards?"]] @@ -75,6 +124,17 @@ add_specs suite_builder = res = Panic.recover Test_Failure_Error (table_should_equal_impl actual_table expected_table "LOCATION_PATH") res.catch.message.should_equal "Got a Table, but expected a 42 (at LOCATION_PATH)." +type DB_Tables + Value t1 t2 tA + + make = + connection = Database.connect ..In_Memory + t1 = (Table.new [["X", [1]]]).select_into_database_table connection "t1" + t2 = (Table.new [["X", [2]]]).select_into_database_table connection "t2" + tA = (Table.new [["A", [1]]]).select_into_database_table connection "tA" + DB_Tables.Value t1 t2 tA + + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder diff --git a/test/Test_Tests/package.yaml b/test/Test_Tests/package.yaml new file mode 100644 index 000000000000..2d35ea3d05a9 --- /dev/null +++ b/test/Test_Tests/package.yaml @@ -0,0 +1,7 @@ +name: Test_Tests +namespace: enso_dev +version: 0.0.1 +license: MIT +author: enso-dev@enso.org +maintainer: enso-dev@enso.org +prefer-local-libraries: true diff --git a/test/Test_Tests/src/Extensions_Spec.enso b/test/Test_Tests/src/Extensions_Spec.enso new file mode 100644 index 000000000000..afee91d94597 --- /dev/null +++ b/test/Test_Tests/src/Extensions_Spec.enso @@ -0,0 +1,86 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +from Standard.Test import all + +from project.Helpers import expect_test_failure + +main filter=Nothing = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter filter + +add_specs suite_builder = + suite_builder.group "should_equal extension method" group_builder-> + group_builder.specify "should report correct location for Text" <| + r1 = expect_test_failure <| + "a".should_equal "b" + r1.message.should_contain "Extensions_Spec.enso:17" + + group_builder.specify "should report correct location for numbers" <| + r1 = expect_test_failure <| + 1.should_equal 2 + r1.message.should_contain "Extensions_Spec.enso:22" + + r2 = expect_test_failure <| + 1.0 . should_equal 2 + r2.message.should_contain "Extensions_Spec.enso:26" + + r3 = expect_test_failure <| + 1.to_decimal . should_equal 2 + r3.message.should_contain "Extensions_Spec.enso:30" + + r4 = expect_test_failure <| + Number.nan.should_equal 2 + r4.message.should_contain "Extensions_Spec.enso:34" + + group_builder.specify "should report correct location for errors" <| + error = Error.throw (Illegal_Argument.Error "foo") + r1 = expect_test_failure <| + error.should_equal 10 + r1.message.should_contain "Extensions_Spec.enso:40" + + group_builder.specify "should panic if error is expected" <| + error = Error.throw (Illegal_Argument.Error "foo") + Test.expect_panic Illegal_Argument <| + 10.should_equal error + + suite_builder.group "should_not_equal extension method" group_builder-> + group_builder.specify "should report correct location" <| + r1 = expect_test_failure <| + 1.should_not_equal 1 + r1.message.should_contain "Extensions_Spec.enso:51" + + group_builder.specify "should report correct location for errors" <| + error = Error.throw (Illegal_Argument.Error "foo") + r1 = expect_test_failure <| + error.should_not_equal 1 + r1.message.should_contain "Extensions_Spec.enso:57" + + suite_builder.group "should_contain extension method" group_builder-> + group_builder.specify "should report correct location" <| + r1 = expect_test_failure <| + [1, 2].should_contain 3 + r1.message.should_contain "Extensions_Spec.enso:63" + + r2 = expect_test_failure <| + "abc".should_contain "d" + r2.message.should_contain "Extensions_Spec.enso:67" + + suite_builder.group "should_not_contain extension method" group_builder-> + group_builder.specify "should report correct location" <| + r1 = expect_test_failure <| + [1, 2].should_not_contain 2 + r1.message.should_contain "Extensions_Spec.enso:73" + + suite_builder.group "should_start_with extension method" group_builder-> + group_builder.specify "should report correct location" <| + r1 = expect_test_failure <| + "abc".should_start_with "d" + r1.message.should_contain "Extensions_Spec.enso:79" + + suite_builder.group "should_end_with extension method" group_builder-> + group_builder.specify "should report correct location" <| + r1 = expect_test_failure <| + "abc".should_end_with "d" + r1.message.should_contain "Extensions_Spec.enso:85" diff --git a/test/Test_Tests/src/Helpers.enso b/test/Test_Tests/src/Helpers.enso new file mode 100644 index 000000000000..beaa75f34fcb --- /dev/null +++ b/test/Test_Tests/src/Helpers.enso @@ -0,0 +1,17 @@ +from Standard.Base import all + +import Standard.Test.Spec_Result.Spec_Result +from Standard.Test import Test + +## Expects the inner action to report a test failure exception and returns its payload. +expect_test_failure ~action -> Spec_Result = + loc = Meta.get_source_location 1 + handle_panic caught_panic = + result = caught_panic.payload + case result of + Spec_Result.Failure _ _ -> result + _ -> Test.fail "Expected test failure, but "+result.to_text+" was raised as error." + + Panic.catch Spec_Result handler=handle_panic <| + action + Test.fail "Expected the inner action to fail, but there was no failure (at "+loc+")." diff --git a/test/Test_Tests/src/Main.enso b/test/Test_Tests/src/Main.enso new file mode 100644 index 000000000000..a4542f7d522b --- /dev/null +++ b/test/Test_Tests/src/Main.enso @@ -0,0 +1,13 @@ +from Standard.Base import all + +from Standard.Test import Test + +import project.Extensions_Spec + +add_specs suite_builder = + Extensions_Spec.add_specs suite_builder + +main filter=Nothing = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter filter From c336c765cdd39bd4f0542ac75dc54463f26e125c Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 19 Dec 2024 13:07:56 +0000 Subject: [PATCH 04/15] Missed one :( (#11918) --- app/ide-desktop/client/tasks/signArchivesMacOs.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/app/ide-desktop/client/tasks/signArchivesMacOs.ts b/app/ide-desktop/client/tasks/signArchivesMacOs.ts index 64310fad8816..979fa51c65bc 100644 --- a/app/ide-desktop/client/tasks/signArchivesMacOs.ts +++ b/app/ide-desktop/client/tasks/signArchivesMacOs.ts @@ -106,6 +106,7 @@ async function ensoPackageSignables(resourcesDir: string): Promise { [ 'org/jline/nativ/Mac/arm64/libjlinenative.jnilib', 'org/jline/nativ/Mac/x86_64/libjlinenative.jnilib', + 'org/jline/nativ/Mac/x86/libjlinenative.jnilib', ], ], [ From 6ddd4d312aa6ead21bfa60f7185b23a2c5814329 Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Thu, 19 Dec 2024 18:27:58 +0300 Subject: [PATCH 05/15] Remove query key (#11920) This PR removes obsolet query key from URL. Closes: https://github.com/enso-org/enso/issues/11912 --- app/gui/src/dashboard/pages/authentication/Setup/Setup.tsx | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/app/gui/src/dashboard/pages/authentication/Setup/Setup.tsx b/app/gui/src/dashboard/pages/authentication/Setup/Setup.tsx index b9218f4376c0..435928ae0449 100644 --- a/app/gui/src/dashboard/pages/authentication/Setup/Setup.tsx +++ b/app/gui/src/dashboard/pages/authentication/Setup/Setup.tsx @@ -331,11 +331,7 @@ const BASE_STEPS: Step[] = [ iconPosition="end" onPress={() => queryClient.invalidateQueries().then(() => { - navigate( - DASHBOARD_PATH + - '?' + - new URLSearchParams({ startModalDefaultOpen: 'true' }).toString(), - ) + navigate(DASHBOARD_PATH) }) } > From 0f88ffd7dc2acb9c0fdbd0f39756bcb698d77921 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 19 Dec 2024 16:10:19 +0000 Subject: [PATCH 06/15] Small fixes from loading doc site. (#11921) Fixes from loading docs. --- .../AWS/0.0.0-dev/src/S3/S3_File.enso | 2 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 2 + .../Base/0.0.0-dev/src/Data/Decimal.enso | 48 +++++++++---------- .../Database/0.0.0-dev/src/DB_Table.enso | 4 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 3 +- .../Standard/Table/0.0.0-dev/src/Table.enso | 28 +++++------ 6 files changed, 45 insertions(+), 42 deletions(-) diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso index 7687176f6f1d..56ddae721bfc 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso @@ -357,7 +357,7 @@ type S3_File ## GROUP Output ICON folder_add Within S3 the concept of creating directories has slightly different - meaning than on other filesystems. Text + meaning than on other filesystems. The `create_directory` method is provided on `S3_File` only for compatibility - to allow easy switching between file-systems. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index 64a3c3cdcbf9..b6342f1277c6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -189,6 +189,8 @@ read_text path=(Missing_Argument.throw "path") (encoding : Encoding = Encoding.d files from the subdirectories. If set to `False` (the default), only the immediate children of the listed directory are considered. + ? Name Filter Rules + The `name_filter` can contain the following special characters: - `"?"` - which matches a single filename character (so it will not match a `"/"`). diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Decimal.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Decimal.enso index f1aeebc7a78f..01074902e679 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Decimal.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Decimal.enso @@ -1246,41 +1246,41 @@ type Decimal ## GROUP Math ICON input_number - Construct a `Decimal` from a `Text`, `Integer` or `Float`. + Construct a `Decimal` from a `Text`, `Integer` or `Float`. - Arguments: - - x: The `Text`, `Integer`, or `Float` to construct a `Decimal` from. - - mc: The `Math_Context` to use to specify precision and `Rounding_Mode`. - If a `Math_Context` is used, there is a possibility of a loss of - precision. + Arguments: + - x: The `Text`, `Integer`, or `Float` to construct a `Decimal` from. + - mc: The `Math_Context` to use to specify precision and `Rounding_Mode`. + If a `Math_Context` is used, there is a possibility of a loss of + precision. - ? Number Format + ? Number Format - The textual format for a Decimal is defined at - https://docs.oracle.com/javase/8/docs/api/java/math/BigDecimal.html#BigDecimal-java.lang.String-. + The textual format for a Decimal is defined at + https://docs.oracle.com/javase/8/docs/api/java/math/BigDecimal.html#BigDecimal-java.lang.String-. - ! Error Conditions + ! Error Conditions - - If the `Text` argument is incorrectly formatted, a `Number_Parse_Error` - is thrown. - - If the construction of the Decimal results in a loss of precision, a - `Loss_Of_Numeric_Precision` warning is attached. This can only happen - if a `Math_Context` value is explicitly passed. + - If the `Text` argument is incorrectly formatted, a `Number_Parse_Error` + is thrown. + - If the construction of the Decimal results in a loss of precision, a + `Loss_Of_Numeric_Precision` warning is attached. This can only happen + if a `Math_Context` value is explicitly passed. - ^ Example - Create a `Decimal` from a `Text`. + ^ Example + Create a `Decimal` from a `Text`. - c = dec "12.345" + c = dec "12.345" - ^ Example - Create a `Decimal` from an `Integer`. + ^ Example + Create a `Decimal` from an `Integer`. - c = dec 12345 + c = dec 12345 - ^ Example - Create a `Decimal` from a `Float`. + ^ Example + Create a `Decimal` from a `Float`. - c = dec 12.345 + c = dec 12.345 dec : Text | Integer | Float -> Math_Context | Nothing -> Decimal ! Arithmetic_Error | Number_Parse_Error dec (x : Text | Integer | Float) (mc : Math_Context | Nothing = Nothing) -> Decimal ! Arithmetic_Error | Number_Parse_Error = handle_java_exception <| diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso index c7f658c6f780..d5f921f15d24 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso @@ -8,8 +8,8 @@ import Standard.Base.Errors.Common.Additional_Warnings import Standard.Base.Errors.Common.Floating_Point_Equality import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Index_Out_Of_Bounds -import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Common.Missing_Argument +import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Deprecated.Deprecated import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument @@ -927,7 +927,7 @@ type DB_Table updated_table = renamed_table.updated_columns (renamed_table.internal_columns + [new_column]) updated_table.as_subquery - ## PRIVATE add group column, group id, bucket, tile + ## ALIAS add group column, bucket, group id, tile GROUP Standard.Base.Values ICON column_add Adds a new column to the table enumerating groups of rows, assigning each diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 8587e5449244..6b6a0b302829 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -588,7 +588,8 @@ replace_columns_with_transformed_columns table selectors transformer error_on_mi replace_columns_with_columns table columns new_columns -## Takes a table and a list of counts and returns a new table with the rows +## PRIVATE + Takes a table and a list of counts and returns a new table with the rows duplicated according to the counts. The vector should have the same number of elements as the number of rows in the table. duplicate_rows (table : Table) (counts : Vector Integer) -> Table = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 19fa076122a4..6c90131501af 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -691,7 +691,7 @@ type Table result = Table.new new_columns Warning.attach (Deprecated.Warning "Standard.Table.Table.Table" "remove_columns_by_type" "Deprecated: use `remove_columns` with a `By_Type` instead.") result - ## ALIAS drop_missing_rows, dropna, remove_blank_rows, remove_empty_rows, remove_missing_rows, filter_empty_rows, drop_empty_rows + ## ALIAS drop_empty_rows, drop_missing_rows, dropna, filter_empty_rows, remove_blank_rows, remove_empty_rows, remove_missing_rows GROUP Standard.Base.Selections ICON select_column @@ -731,7 +731,7 @@ type Table new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank Table.new new_columns - ## ALIAS drop_missing_columns, drop_na, select_blank_columns, select_blank_fields, select_missing_columns, select_na, filter_blank_columns + ## ALIAS drop_missing_columns, drop_na, filter_blank_columns, select_blank_columns, select_blank_fields, select_missing_columns, select_na GROUP Standard.Base.Selections ICON select_column @@ -1038,7 +1038,7 @@ type Table Warning.with_suspended new_names names-> Table.new (self.columns.map c-> c.rename (names.at c.name)) - ## ALIAS rename, header + ## ALIAS header, rename GROUP Standard.Base.Metadata ICON table_edit Returns a new table with the columns renamed based on entries in the @@ -1078,7 +1078,7 @@ type Table problem_builder.attach_problems_before on_problems <| self.drop (..First 1) . rename_columns new_names_cleaned on_problems=on_problems - ## ALIAS group by, summarize, count, count distinct, sum, average, mean, median, percentile, mode, standard deviation, variance, minimum, maximum, first, last, shortest, longest + ## ALIAS average, count, count distinct, first, group by, last, longest, maximum, mean, median, minimum, mode, percentile, shortest, standard deviation, sum, summarize, variance GROUP Standard.Base.Calculations ICON transform4 @@ -1856,7 +1856,7 @@ type Table Warning.attach (Deprecated.Warning "Standard.Table.Table.Table" "auto_value_types" "Deprecated: `auto_value_types` has been replaced by `auto_cast`.") <| self.auto_cast columns shrink_types error_on_missing_columns on_problems - ## ALIAS tokenize, parse + ## ALIAS parse, tokenize GROUP Standard.Base.Conversions ICON split Creates a new table by splitting the chosen column of text, by the @@ -1887,7 +1887,7 @@ type Table split_to_columns self column delimiter="," (column_count : Columns_To_Add = ..All_Columns) on_problems:Problem_Behavior=..Report_Warning = Split_Tokenize.split_to_columns self column delimiter column_count=column_count on_problems - ## ALIAS tokenize, parse + ## ALIAS parse, tokenize GROUP Standard.Base.Conversions ICON split Creates a new table by splitting the chosen column of text, by the @@ -1907,7 +1907,7 @@ type Table split_to_rows self column delimiter="," = Split_Tokenize.split_to_rows self column delimiter - ## ALIAS split, parse, regex + ## ALIAS parse, regex, split GROUP Standard.Base.Conversions ICON split @@ -1940,7 +1940,7 @@ type Table tokenize_to_columns self column pattern="." case_sensitivity:Case_Sensitivity=..Sensitive (column_count : Columns_To_Add = ..All_Columns) on_problems:Problem_Behavior=..Report_Warning = Split_Tokenize.tokenize_to_columns self column pattern case_sensitivity column_count on_problems - ## ALIAS split, parse, regex + ## ALIAS parse, regex, split GROUP Standard.Base.Conversions ICON split @@ -1969,7 +1969,7 @@ type Table tokenize_to_rows self column pattern="." case_sensitivity:Case_Sensitivity=..Sensitive at_least_one_row:Boolean=False = Split_Tokenize.tokenize_to_rows self column pattern case_sensitivity at_least_one_row - ## ALIAS split, tokenize, regex + ## ALIAS regex, split, tokenize GROUP Standard.Base.Conversions ICON split Converts a Text column into new columns using a regular expression @@ -2325,7 +2325,7 @@ type Table add_row_number self (name:Text="Row") (from:Integer=0) (step:Integer=1) (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=..Report_Warning) = Incomparable_Values.handle_errors <| Add_Row_Number.add_row_number self name from step group_by order_by on_problems - ## PRIVATE add group column, group id, bucket, tile + ## ALIAS add group column, bucket, group id, tile GROUP Standard.Base.Values ICON column_add Adds a new column to the table enumerating groups of rows, assigning each @@ -2637,7 +2637,7 @@ type Table if self.row_count == 0 then Error.throw (Index_Out_Of_Bounds.Error 0 0) else Row.Value self (self.row_count-1) - ## ALIAS merge, xlookup, vlookup, hlookup, lookup + ## ALIAS hlookup, lookup, merge, vlookup, xlookup GROUP Standard.Base.Calculations ICON join Joins two tables according to the specified join conditions. @@ -2820,7 +2820,7 @@ type Table self.java_table.crossJoin right.java_table right_prefix java_aggregator Table.Value new_java_table - ## ALIAS join, xlookup, vlookup, hlookup, lookup + ## ALIAS hlookup, join, lookup, vlookup, xlookup GROUP Standard.Base.Calculations ICON join Merges this table with a lookup table @@ -3430,7 +3430,7 @@ type Table mask = OrderMask.reverse self.row_count Table.Value <| self.java_table.applyMask mask - ## ALIAS export, save, output, to_file + ## ALIAS export, output, save, to_file GROUP Standard.Base.Output ICON data_output This function writes a table from memory into a file. @@ -3744,7 +3744,7 @@ type Table transformer col = col.text_cleanse remove Table_Helpers.replace_columns_with_transformed_columns self from transformer - ## ALIAS cumulative, count, sum, total, minimum, maximum, sum, mean, product, variance, standard deviation + ## ALIAS count, cumulative, maximum, mean, minimum, product, standard deviation, sum, sum, total, variance GROUP Standard.Base.Values ICON column_add Adds a new column to the table with a running calculation. From 071a14a894ffbcc1ed53f3c5c7a450ed661cbf52 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Thu, 19 Dec 2024 19:25:36 +0100 Subject: [PATCH 07/15] Ensure isAllTypes is compilation constant (#11913) Fixes #11901 up by making sure `isAllTypes()` is a compilation constant. --- .../node/typecheck/AbstractTypeCheckNode.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/typecheck/AbstractTypeCheckNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/typecheck/AbstractTypeCheckNode.java index 1dba321b3649..1246a3f89b90 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/typecheck/AbstractTypeCheckNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/typecheck/AbstractTypeCheckNode.java @@ -1,7 +1,9 @@ package org.enso.interpreter.node.typecheck; +import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; import java.util.List; import org.enso.interpreter.node.ExpressionNode; @@ -29,11 +31,16 @@ abstract Object executeCheckOrConversion( abstract String expectedTypeMessage(); + @ExplodeLoop final boolean isAllTypes() { Node p = this; + CompilerAsserts.compilationConstant(p); for (; ; ) { if (p instanceof TypeCheckValueNode vn) { - return vn.isAllTypes(); + CompilerAsserts.compilationConstant(vn); + var allTypes = vn.isAllTypes(); + CompilerAsserts.compilationConstant(allTypes); + return allTypes; } p = p.getParent(); } From 6f211799da80db8b63c3e5080a98dec704c9a693 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Fri, 20 Dec 2024 00:26:57 +0300 Subject: [PATCH 08/15] Update required checks (#11919) * update: required checks * update: rename GUI Packaging to IDE --- .github/workflows/engine-pull-request.yml | 2 +- .github/workflows/gui-pull-request.yml | 2 +- ...ptional.yml => ide-packaging-optional.yml} | 4 ++-- .../{gui-packaging.yml => ide-packaging.yml} | 4 ++-- ...-pull-request.yml => ide-pull-request.yml} | 20 +++++++++---------- .github/workflows/wasm-pull-request.yml | 2 +- build/build/paths.yaml | 4 ++-- build/build/src/ci_gen.rs | 16 +++++++-------- 8 files changed, 27 insertions(+), 27 deletions(-) rename .github/workflows/{gui-packaging-optional.yml => ide-packaging-optional.yml} (99%) rename .github/workflows/{gui-packaging.yml => ide-packaging.yml} (99%) rename .github/workflows/{gui-packaging-pull-request.yml => ide-pull-request.yml} (73%) diff --git a/.github/workflows/engine-pull-request.yml b/.github/workflows/engine-pull-request.yml index 47b2d7adf5f2..7231da64327d 100644 --- a/.github/workflows/engine-pull-request.yml +++ b/.github/workflows/engine-pull-request.yml @@ -36,7 +36,7 @@ jobs: secrets: inherit required-checks: - name: Required Checks + name: Engine Required Checks runs-on: ubuntu-latest needs: [engine-checks] if: always() diff --git a/.github/workflows/gui-pull-request.yml b/.github/workflows/gui-pull-request.yml index f360b23be5e5..56040e07291f 100644 --- a/.github/workflows/gui-pull-request.yml +++ b/.github/workflows/gui-pull-request.yml @@ -79,7 +79,7 @@ jobs: secrets: inherit required-checks: - name: Required Checks + name: GUI Required Checks runs-on: ubuntu-latest needs: [prettier, gui-checks, storybook] if: always() diff --git a/.github/workflows/gui-packaging-optional.yml b/.github/workflows/ide-packaging-optional.yml similarity index 99% rename from .github/workflows/gui-packaging-optional.yml rename to .github/workflows/ide-packaging-optional.yml index 4f5ab0bfd79a..300955745908 100644 --- a/.github/workflows/gui-packaging-optional.yml +++ b/.github/workflows/ide-packaging-optional.yml @@ -1,7 +1,7 @@ # This file is auto-generated. Do not edit it manually! # Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`. -name: GUI Packaging (Optional) +name: IDE Packaging (Optional) on: workflow_dispatch: inputs: @@ -12,7 +12,7 @@ on: default: false workflow_call: {} concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-gui-packaging-optional + group: ${{ github.workflow }}-${{ github.ref }}-ide-packaging-optional cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} jobs: enso-build-ci-gen-job-build-backend-macos-amd64: diff --git a/.github/workflows/gui-packaging.yml b/.github/workflows/ide-packaging.yml similarity index 99% rename from .github/workflows/gui-packaging.yml rename to .github/workflows/ide-packaging.yml index 52fcced5cb4e..b6a94a5150d8 100644 --- a/.github/workflows/gui-packaging.yml +++ b/.github/workflows/ide-packaging.yml @@ -1,7 +1,7 @@ # This file is auto-generated. Do not edit it manually! # Edit the enso_build::ci_gen module instead and run `cargo run --package enso-build-ci-gen`. -name: GUI Packaging +name: IDE Packaging on: workflow_dispatch: inputs: @@ -12,7 +12,7 @@ on: default: false workflow_call: {} concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-gui-packaging + group: ${{ github.workflow }}-${{ github.ref }}-ide-packaging cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} jobs: enso-build-ci-gen-job-build-backend-linux-amd64: diff --git a/.github/workflows/gui-packaging-pull-request.yml b/.github/workflows/ide-pull-request.yml similarity index 73% rename from .github/workflows/gui-packaging-pull-request.yml rename to .github/workflows/ide-pull-request.yml index f38628a231fe..f21d23266998 100644 --- a/.github/workflows/gui-packaging-pull-request.yml +++ b/.github/workflows/ide-pull-request.yml @@ -1,6 +1,6 @@ # This file is not auto-generated. Feel free to edit it. -name: ✨ GUI Packaging +name: ✨ IDE on: push: @@ -9,7 +9,7 @@ on: pull_request: concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-gui-packaging-pull-request + group: ${{ github.workflow }}-${{ github.ref }}-ide-pull-request cancel-in-progress: ${{ github.ref != 'refs/heads/develop' }} jobs: @@ -23,31 +23,31 @@ jobs: uses: ./.github/workflows/engine-changed-files.yml secrets: inherit - gui-packaging: + ide-packaging: name: 📦 Package - uses: ./.github/workflows/gui-packaging.yml + uses: ./.github/workflows/ide-packaging.yml needs: [gui-changed-files, engine-changed-files] if: needs.gui-changed-files.outputs.any_changed == 'true' || needs.engine-changed-files.outputs.any_changed == 'true' || github.ref == 'refs/heads/develop' secrets: inherit - gui-packaging-optional: + ide-packaging-optional: name: 📦 Package (Optional) - uses: ./.github/workflows/gui-packaging-optional.yml + uses: ./.github/workflows/ide-packaging-optional.yml needs: [gui-changed-files, engine-changed-files] if: needs.gui-changed-files.outputs.any_changed == 'true' || needs.engine-changed-files.outputs.any_changed == 'true' || github.ref == 'refs/heads/develop' secrets: inherit required-checks: - name: Required Checks + name: IDE Required Checks runs-on: ubuntu-latest - needs: [gui-packaging] + needs: [ide-packaging] if: always() steps: - name: Checks Summary run: | - echo "GUI Packaging: ${{ needs.gui-packaging.result }}" + echo "IDE: ${{ needs.ide-packaging.result }}" - if [[ "${{ needs.gui-packaging.result }}" == "failure" ]]; then + if [[ "${{ needs.ide-packaging.result }}" == "failure" ]]; then exit 1 fi diff --git a/.github/workflows/wasm-pull-request.yml b/.github/workflows/wasm-pull-request.yml index 18a9c6d48a1d..2f0a22b8443b 100644 --- a/.github/workflows/wasm-pull-request.yml +++ b/.github/workflows/wasm-pull-request.yml @@ -26,7 +26,7 @@ jobs: secrets: inherit required-checks: - name: Required Checks + name: WASM Required Checks runs-on: ubuntu-latest needs: [wasm-checks] if: always() diff --git a/build/build/paths.yaml b/build/build/paths.yaml index 288c22dd6efa..f5539e706ce8 100644 --- a/build/build/paths.yaml +++ b/build/build/paths.yaml @@ -11,8 +11,8 @@ engine-checks-optional.yml: engine-checks.yml: extra-nightly-tests.yml: - gui-packaging-optional.yml: - gui-packaging.yml: + ide-packaging-optional.yml: + ide-packaging.yml: nightly.yml: promote.yml: release.yml: diff --git a/build/build/src/ci_gen.rs b/build/build/src/ci_gen.rs index 6163c84baf54..f6f831b9e503 100644 --- a/build/build/src/ci_gen.rs +++ b/build/build/src/ci_gen.rs @@ -687,15 +687,15 @@ pub fn typical_check_triggers() -> Event { } } -pub fn gui_packaging() -> Result { +pub fn ide_packaging() -> Result { let on = Event { workflow_dispatch: Some(manual_workflow_dispatch()), workflow_call: Some(default()), ..default() }; let mut workflow = Workflow { - name: "GUI Packaging".into(), - concurrency: Some(concurrency("gui-packaging")), + name: "IDE Packaging".into(), + concurrency: Some(concurrency("ide-packaging")), on, ..default() }; @@ -710,15 +710,15 @@ pub fn gui_packaging() -> Result { Ok(workflow) } -pub fn gui_packaging_optional() -> Result { +pub fn ide_packaging_optional() -> Result { let on = Event { workflow_dispatch: Some(manual_workflow_dispatch()), workflow_call: Some(default()), ..default() }; let mut workflow = Workflow { - name: "GUI Packaging (Optional)".into(), - concurrency: Some(concurrency("gui-packaging-optional")), + name: "IDE Packaging (Optional)".into(), + concurrency: Some(concurrency("ide-packaging-optional")), on, ..default() }; @@ -906,8 +906,8 @@ pub fn generate( (repo_root.engine_checks_optional_yml.to_path_buf(), engine_checks_optional()?), (repo_root.engine_checks_nightly_yml.to_path_buf(), engine_checks_nightly()?), (repo_root.extra_nightly_tests_yml.to_path_buf(), extra_nightly_tests()?), - (repo_root.gui_packaging_yml.to_path_buf(), gui_packaging()?), - (repo_root.gui_packaging_optional_yml.to_path_buf(), gui_packaging_optional()?), + (repo_root.ide_packaging_yml.to_path_buf(), ide_packaging()?), + (repo_root.ide_packaging_optional_yml.to_path_buf(), ide_packaging_optional()?), (repo_root.wasm_checks_yml.to_path_buf(), wasm_checks()?), (repo_root.engine_benchmark_yml.to_path_buf(), engine_benchmark()?), (repo_root.std_libs_benchmark_yml.to_path_buf(), std_libs_benchmark()?), From ddc2f3f6caffa0e0723da5ec08116f7896aff347 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Fri, 20 Dec 2024 15:26:21 +0300 Subject: [PATCH 09/15] Update gui-changed-files workflow (#11931) followup to #11723 Add packaging workflows to GUI sources. --- .github/workflows/gui-changed-files.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/gui-changed-files.yml b/.github/workflows/gui-changed-files.yml index d16036ae0093..7c88edd9ffb3 100644 --- a/.github/workflows/gui-changed-files.yml +++ b/.github/workflows/gui-changed-files.yml @@ -36,9 +36,8 @@ jobs: .prettierrc.js .prettierignore vitest.workspace.ts - .github/workflows/gui-changed-files.yml - .github/workflows/gui-checks.yml - .github/workflows/gui-pull-request.yml + .github/workflows/gui* + .github/workflows/ide* .github/workflows/storybook.yml files_ignore: | app/ide-desktop/** From e5a1c5a6fa42f583521f2f322c0e1197e5491cf6 Mon Sep 17 00:00:00 2001 From: AdRiley Date: Fri, 20 Dec 2024 13:48:09 +0000 Subject: [PATCH 10/15] Fix snowflake sort tests (#11905) * Fix snowflake tests * Rename flag to make code clearer * Add new sort dialect flags * Fix path for Snowflake --- .../Database/Redshift/Internal/Redshift_Dialect.enso | 4 +++- .../Standard/Database/0.0.0-dev/src/Dialect_Flag.enso | 10 ++++++++-- .../src/Internal/Postgres/Postgres_Dialect.enso | 4 +++- .../0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso | 4 +++- .../0.0.0-dev/src/Internal/SQLServer_Dialect.enso | 4 +++- .../0.0.0-dev/src/Internal/Snowflake_Dialect.enso | 6 ++++-- .../src/Common_Table_Operations/Order_By_Spec.enso | 6 +++--- test/Table_Tests/src/Database/Upload_Spec.enso | 2 +- test/Table_Tests/src/In_Memory/Common_Spec.enso | 2 +- 9 files changed, 29 insertions(+), 13 deletions(-) diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso index 2a753d901a8d..4d7c649c1f9e 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso @@ -198,8 +198,10 @@ type Redshift_Dialect Dialect_Flag.Supports_Infinity -> True Dialect_Flag.Case_Sensitive_Text_Comparison -> True Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False - Dialect_Flag.Case_Insensitive_Ordering -> False + Dialect_Flag.Supports_Case_Insensitive_Ordering -> False Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True + Dialect_Flag.Order_By_Unicode_Normalization_When_Case_Insensitive -> True + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> True Dialect_Flag.Allows_Mixed_Type_Comparisons -> False Dialect_Flag.Supports_Unicode_Normalization -> False Dialect_Flag.NaN_Non_Comparable -> True diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Dialect_Flag.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Dialect_Flag.enso index 89f0e29ea453..6bd38ba440eb 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Dialect_Flag.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Dialect_Flag.enso @@ -34,10 +34,16 @@ type Dialect_Flag Supports_Sort_Digits_As_Numbers ## PRIAVTE Specifies if the backend supports case insensitive ordering. - Case_Insensitive_Ordering + Supports_Case_Insensitive_Ordering ## PRIVATE - Specifies if the backend supports unicode normalization in its default ordering. + Specifies if the backend uses case insensitive sort in its default ordering. + Case_Insensitive_Ordering_By_Default + ## PRIVATE + Specifies if the backend uses unicode normalization in its default ordering. Order_By_Unicode_Normalization_By_Default + ## PRIVATE + Specifies if the backend uses unicode normalization when doing case insentive ordering. + Order_By_Unicode_Normalization_When_Case_Insensitive ## PRIVATE Specifies if mixed operations comparing mixed types are allowed by a given backend. Some backends will allow diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 8ebd32a200e0..533712091822 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -270,8 +270,10 @@ type Postgres_Dialect Dialect_Flag.Supports_Infinity -> True Dialect_Flag.Case_Sensitive_Text_Comparison -> True Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False - Dialect_Flag.Case_Insensitive_Ordering -> True + Dialect_Flag.Supports_Case_Insensitive_Ordering -> True Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True + Dialect_Flag.Order_By_Unicode_Normalization_When_Case_Insensitive -> True + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> True Dialect_Flag.Allows_Mixed_Type_Comparisons -> False Dialect_Flag.Supports_Unicode_Normalization -> False Dialect_Flag.NaN_Non_Comparable -> False diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 9e0bcca0a1fb..efbc22421dc4 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -281,8 +281,10 @@ type SQLite_Dialect Dialect_Flag.Supports_Infinity -> True Dialect_Flag.Case_Sensitive_Text_Comparison -> True Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False - Dialect_Flag.Case_Insensitive_Ordering -> False + Dialect_Flag.Supports_Case_Insensitive_Ordering -> True Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> False + Dialect_Flag.Order_By_Unicode_Normalization_When_Case_Insensitive -> False + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> False Dialect_Flag.Allows_Mixed_Type_Comparisons -> True Dialect_Flag.Supports_Unicode_Normalization -> False Dialect_Flag.NaN_Non_Comparable -> True diff --git a/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso b/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso index 781f2ce6e2bd..a681cc9fdeb1 100644 --- a/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso +++ b/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso @@ -259,8 +259,10 @@ type SQLServer_Dialect Dialect_Flag.Supports_Infinity -> False Dialect_Flag.Case_Sensitive_Text_Comparison -> False Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False - Dialect_Flag.Case_Insensitive_Ordering -> True + Dialect_Flag.Supports_Case_Insensitive_Ordering -> True Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True + Dialect_Flag.Order_By_Unicode_Normalization_When_Case_Insensitive -> True + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> True Dialect_Flag.Allows_Mixed_Type_Comparisons -> False Dialect_Flag.Supports_Unicode_Normalization -> False Dialect_Flag.NaN_Non_Comparable -> True diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso index 3338d5262dfc..778a3609dcac 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso @@ -259,8 +259,10 @@ type Snowflake_Dialect Dialect_Flag.Supports_Infinity -> True Dialect_Flag.Case_Sensitive_Text_Comparison -> True Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False - Dialect_Flag.Case_Insensitive_Ordering -> True - Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True + Dialect_Flag.Supports_Case_Insensitive_Ordering -> True + Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> False + Dialect_Flag.Order_By_Unicode_Normalization_When_Case_Insensitive -> True + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> False Dialect_Flag.Allows_Mixed_Type_Comparisons -> False Dialect_Flag.Supports_Unicode_Normalization -> False Dialect_Flag.NaN_Non_Comparable -> False diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index 8598078cfea8..b1e660caab8e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -267,9 +267,9 @@ add_order_specs suite_builder setup = t2.at "delta" . to_vector . should_equal ["a03", "a1", "a10", "a2"] t2.at "alpha" . to_vector . should_equal [0, 2, 3, 1] - if setup.flagged ..Case_Insensitive_Ordering then group_builder.specify "should support case insensitive ordering" <| + if setup.flagged ..Supports_Case_Insensitive_Ordering then group_builder.specify "should support case insensitive ordering" <| t1 = data.table.sort [..Name "eta"] text_ordering=(..Case_Insensitive) - expected = case setup.flagged ..Order_By_Unicode_Normalization_By_Default of + expected = case setup.flagged ..Order_By_Unicode_Normalization_When_Case_Insensitive of False -> ["Aleph", "alpha", "Beta", "bądź"] True -> ["Aleph", "alpha", "bądź", "Beta"] t1.at "eta" . to_vector . should_equal expected @@ -298,7 +298,7 @@ add_order_specs suite_builder setup = t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"] t4 = data.table.sort [..Name "psi"] - case setup.flagged ..Case_Insensitive_Ordering of + case setup.flagged ..Case_Insensitive_Ordering_By_Default of True -> t4.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"] False -> t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"] diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index dce15a1d1fbb..e0d0f15a8fd2 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -242,7 +242,7 @@ add_specs suite_builder setup make_new_connection persistent_connector=True = group_builder.specify "should not issue a DELETE statement for the original table name in dry run mode, even if the table does not exist" <| original_table_name = Name_Generator.random_name "no-delete-test" - log_file = enso_project.data / "transient" / "sql.log" + log_file = (Project_Description.new enso_dev.Table_Tests).data / "transient" / "sql.log" log_file.delete_if_exists Test_Environment.unsafe_with_environment_override "ENSO_SQL_LOG_PATH" log_file.absolute.path <| Context.Output.with_disabled <| diff --git a/test/Table_Tests/src/In_Memory/Common_Spec.enso b/test/Table_Tests/src/In_Memory/Common_Spec.enso index a3b1eae0711a..9d30533f6314 100644 --- a/test/Table_Tests/src/In_Memory/Common_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Common_Spec.enso @@ -41,7 +41,7 @@ in_memory_setup = True flagged_fn flag:Dialect_Flag = case flag of - Dialect_Flag.Case_Insensitive_Ordering -> False + Dialect_Flag.Case_Insensitive_Ordering_By_Default -> False Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> False _ -> True From d87484b9b2eb2395499a19b59be41866c223b781 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 20 Dec 2024 15:42:56 +0100 Subject: [PATCH 11/15] Do not report exceptions on long running Excel reads (#11916) * Do not report exceptions on long running Excel reads This change introduces two modifications: - `ClosedByInterruptException` is wrapped in `InterruptedException` instead of `RuntimeException` - when instrumentation encounters `InterruptedException` it bails early Having `ClosedByInterruptException` wrapped in `RuntimeException` meant that it is being reported as a regular `HostException` in the engine and to the user. Instead it should be treated specially since we know that it is caused by cancelling a long-running job. Since it is a statically checked exception it has to be declared and the information has to be propagated through various lambda constructs (thanks Java!). The above change alone meant that an error is not reported for `Data.read` nodes but any values dependent on it would still report `No_Such_Method` error when the exception is reported as a value. Hence the early bail out mechanism. * Send `PendingInterrupted` on interrupt The information could be used in GUI to indicate pending execution that will take tad longer. * Prettify * Test `PendingInterrupted` payload * Add `wasInterrupted` flag to `Pending` Reduce `PendingInterrupted` to a flag in `Pending` * fmt --- .../protocol-language-server.md | 4 +- .../runtime/ContextEventsListener.scala | 4 +- .../runtime/ContextRegistryProtocol.scala | 26 +++++++- .../org/enso/polyglot/runtime/Runtime.scala | 9 ++- .../job/ProgramExecutionSupport.scala | 59 +++++++++++++++++++ .../instrument/RuntimeAsyncCommandsTest.scala | 7 +-- .../test/instrument/TestMessages.scala | 29 +++++++++ .../control/ThreadInterruptedException.java | 8 ++- .../enso/table/excel/ExcelConnectionPool.java | 10 ++-- .../java/org/enso/table/excel/ExcelRange.java | 5 +- .../java/org/enso/table/excel/ExcelSheet.java | 6 +- .../table/excel/ReadOnlyExcelConnection.java | 6 +- .../excel/xssfreader/XSSFReaderSheet.java | 13 ++-- .../excel/xssfreader/XSSFReaderWorkbook.java | 20 ++++--- .../java/org/enso/table/read/ExcelReader.java | 28 +++++---- .../table/util/ConsumerWithException.java | 45 ++++++++++++++ .../table/util/FunctionWithException.java | 51 ++++++++++++++++ .../org/enso/table/write/ExcelWriter.java | 25 +++++--- 18 files changed, 299 insertions(+), 56 deletions(-) create mode 100644 std-bits/table/src/main/java/org/enso/table/util/ConsumerWithException.java create mode 100644 std-bits/table/src/main/java/org/enso/table/util/FunctionWithException.java diff --git a/docs/language-server/protocol-language-server.md b/docs/language-server/protocol-language-server.md index 7006d840adf4..e47567953a2a 100644 --- a/docs/language-server/protocol-language-server.md +++ b/docs/language-server/protocol-language-server.md @@ -392,7 +392,7 @@ interface ExpressionUpdate { An information about the computed value. ```typescript -type ExpressionUpdatePayload = Value | DatafalowError | Panic | Pending; +type ExpressionUpdatePayload = Value | DataflowError | Panic | Pending; /** Indicates that the expression was computed to a value. */ interface Value { @@ -424,6 +424,8 @@ interface Pending { /** Optional amount of already done work as a number between `0.0` to `1.0`. */ progress?: number; + /** Indicates whether the computation of the expression has been interrupted and will be retried. */ + wasInterrupted: boolean; } /** Information about warnings associated with the value. */ diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala index 154a4328a8e5..e6596b450334 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala @@ -230,8 +230,8 @@ final class ContextEventsListener( functionSchema.map(toProtocolFunctionSchema) ) - case Api.ExpressionUpdate.Payload.Pending(m, p) => - ContextRegistryProtocol.ExpressionUpdate.Payload.Pending(m, p) + case Api.ExpressionUpdate.Payload.Pending(m, p, i) => + ContextRegistryProtocol.ExpressionUpdate.Payload.Pending(m, p, i) case Api.ExpressionUpdate.Payload.DataflowError(trace) => ContextRegistryProtocol.ExpressionUpdate.Payload.DataflowError(trace) diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala index 29e083f50699..aef9df5d3167 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala @@ -231,8 +231,17 @@ object ContextRegistryProtocol { ) } - case class Pending(message: Option[String], progress: Option[Double]) - extends Payload + /** Indicates that an expression is pending a computation + */ + case class Pending( + message: Option[String], + progress: Option[Double], + wasInterrupted: Boolean + ) extends Payload + + /** Indicates that an expression's computation has been interrupted and shall be retried. + */ + case object PendingInterrupted extends Payload /** Indicates that the expression was computed to an error. * @@ -258,6 +267,8 @@ object ContextRegistryProtocol { val Pending = "Pending" + val PendingInterrupted = "PendingInterrupted" + val DataflowError = "DataflowError" val Panic = "Panic" @@ -291,6 +302,14 @@ object ContextRegistryProtocol { .deepMerge( Json.obj(CodecField.Type -> PayloadType.Pending.asJson) ) + case m: Payload.PendingInterrupted.type => + Encoder[Payload.PendingInterrupted.type] + .apply(m) + .deepMerge( + Json.obj( + CodecField.Type -> PayloadType.PendingInterrupted.asJson + ) + ) } implicit val decoder: Decoder[Payload] = @@ -307,6 +326,9 @@ object ContextRegistryProtocol { case PayloadType.Pending => Decoder[Payload.Pending].tryDecode(cursor) + + case PayloadType.PendingInterrupted => + Decoder[Payload.PendingInterrupted.type].tryDecode(cursor) } } } diff --git a/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala b/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala index 9d9ab4ccab90..cec87900947b 100644 --- a/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala +++ b/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala @@ -158,11 +158,14 @@ object Runtime { ) } - /** TBD + /** Indicates that an expression is pending a computation */ @named("expressionUpdatePayloadPending") - case class Pending(message: Option[String], progress: Option[Double]) - extends Payload; + case class Pending( + message: Option[String], + progress: Option[Double], + wasInterrupted: Boolean = false + ) extends Payload /** Indicates that the expression was computed to an error. * diff --git a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala index 283ed9c2893d..53d89c8eaa7d 100644 --- a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala +++ b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala @@ -95,6 +95,21 @@ object ProgramExecutionSupport { val onComputedValueCallback: Consumer[ExpressionValue] = { value => if (callStack.isEmpty) { logger.log(Level.FINEST, s"ON_COMPUTED ${value.getExpressionId}") + + if (VisualizationResult.isInterruptedException(value.getValue)) { + value.getValue match { + case e: AbstractTruffleException => + sendInterruptedExpressionUpdate( + contextId, + executionFrame.syncState, + value + ) + // Bail out early. Any references to this value that do not expect + // Interrupted error will likely return `No_Such_Method` otherwise. + throw new ThreadInterruptedException(e); + case _ => + } + } sendExpressionUpdate(contextId, executionFrame.syncState, value) sendVisualizationUpdates( contextId, @@ -377,6 +392,50 @@ object ProgramExecutionSupport { Api.ExecutionResult.Failure(ex.getMessage, None) } + private def sendInterruptedExpressionUpdate( + contextId: ContextId, + syncState: UpdatesSynchronizationState, + value: ExpressionValue + )(implicit ctx: RuntimeContext): Unit = { + val expressionId = value.getExpressionId + val methodCall = toMethodCall(value) + if ( + !syncState.isExpressionSync(expressionId) || + (methodCall.isDefined && !syncState.isMethodPointerSync( + expressionId + )) + ) { + val payload = + Api.ExpressionUpdate.Payload.Pending(None, None, wasInterrupted = true) + ctx.endpoint.sendToClient( + Api.Response( + Api.ExpressionUpdates( + contextId, + Set( + Api.ExpressionUpdate( + value.getExpressionId, + Option(value.getTypes).map(_.toVector), + methodCall, + value.getProfilingInfo.map { case e: ExecutionTime => + Api.ProfilingInfo.ExecutionTime(e.getNanoTimeElapsed) + }.toVector, + value.wasCached(), + value.isTypeChanged || value.isFunctionCallChanged, + payload + ) + ) + ) + ) + ) + + syncState.setExpressionSync(expressionId) + ctx.state.expressionExecutionState.setExpressionExecuted(expressionId) + if (methodCall.isDefined) { + syncState.setMethodPointerSync(expressionId) + } + } + } + private def sendExpressionUpdate( contextId: ContextId, syncState: UpdatesSynchronizationState, diff --git a/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/RuntimeAsyncCommandsTest.scala b/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/RuntimeAsyncCommandsTest.scala index 59d323a64a31..a86699d67d83 100644 --- a/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/RuntimeAsyncCommandsTest.scala +++ b/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/RuntimeAsyncCommandsTest.scala @@ -500,16 +500,15 @@ class RuntimeAsyncCommandsTest responses should contain theSameElementsAs Seq( Api.Response(requestId, Api.RecomputeContextResponse(contextId)), - TestMessages.update( + TestMessages.pendingInterrupted( contextId, - vId, - ConstantsGen.INTEGER, methodCall = Some( MethodCall( MethodPointer("Enso_Test.Test.Main", "Enso_Test.Test.Main", "loop"), Vector(1) ) - ) + ), + vId ), context.executionComplete(contextId) ) diff --git a/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala b/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala index e0dd02287428..b997f500651c 100644 --- a/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala +++ b/engine/runtime-integration-tests/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala @@ -479,4 +479,33 @@ object TestMessages { ) ) + /** Create an pending interrupted response. + * + * @param contextId an identifier of the context + * @param expressionIds a list of pending expressions + * @return the expression update response + */ + def pendingInterrupted( + contextId: UUID, + methodCall: Option[Api.MethodCall], + expressionIds: UUID* + ): Api.Response = + Api.Response( + Api.ExpressionUpdates( + contextId, + expressionIds.toSet.map { expressionId => + Api.ExpressionUpdate( + expressionId, + None, + methodCall, + Vector(Api.ProfilingInfo.ExecutionTime(0)), + false, + true, + Api.ExpressionUpdate.Payload + .Pending(None, None, wasInterrupted = true) + ) + } + ) + ) + } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/control/ThreadInterruptedException.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/control/ThreadInterruptedException.java index 2693b6ee1411..cd004b800607 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/control/ThreadInterruptedException.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/control/ThreadInterruptedException.java @@ -1,4 +1,10 @@ package org.enso.interpreter.runtime.control; /** Thrown when guest code discovers a thread interrupt. */ -public class ThreadInterruptedException extends RuntimeException {} +public class ThreadInterruptedException extends RuntimeException { + public ThreadInterruptedException() {} + + public ThreadInterruptedException(Throwable e) { + super(e); + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelConnectionPool.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelConnectionPool.java index af8a43263ac3..5221576f113a 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ExcelConnectionPool.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelConnectionPool.java @@ -22,6 +22,7 @@ import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.enso.base.cache.ReloadDetector; import org.enso.table.excel.xssfreader.XSSFReaderWorkbook; +import org.enso.table.util.FunctionWithException; public class ExcelConnectionPool { public static final ExcelConnectionPool INSTANCE = new ExcelConnectionPool(); @@ -29,7 +30,7 @@ public class ExcelConnectionPool { private ExcelConnectionPool() {} public ReadOnlyExcelConnection openReadOnlyConnection(File file, ExcelFileFormat format) - throws IOException { + throws IOException, InterruptedException { synchronized (this) { if (isCurrentlyWriting) { throw new IllegalStateException( @@ -134,7 +135,7 @@ public R writeWorkbook(File file, Function writeAction) throws */ public R lockForWriting( File file, ExcelFileFormat format, File[] accompanyingFiles, Function action) - throws IOException { + throws IOException, InterruptedException { synchronized (this) { if (isCurrentlyWriting) { throw new IllegalStateException( @@ -242,7 +243,8 @@ static class ConnectionRecord { private ExcelWorkbook workbook; private IOException initializationException = null; - T withWorkbook(Function action) throws IOException { + T withWorkbook(FunctionWithException action) + throws IOException, InterruptedException { synchronized (this) { return action.apply(accessCurrentWorkbook()); } @@ -258,7 +260,7 @@ public void close() throws IOException { } } - void reopen(boolean throwOnFailure) throws IOException { + void reopen(boolean throwOnFailure) throws IOException, InterruptedException { synchronized (this) { if (workbook != null) { throw new IllegalStateException("The workbook is already open."); diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelRange.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelRange.java index 552f3385fd27..4d6526829d1d 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ExcelRange.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelRange.java @@ -181,7 +181,8 @@ public static ExcelRange forRows(String sheetName, int topRow, int bottomRow) { * @param sheet ExcelSheet containing the range refers to. * @return Expanded range covering the connected table of cells. */ - public static ExcelRange expandSingleCell(ExcelRange excelRange, ExcelSheet sheet) { + public static ExcelRange expandSingleCell(ExcelRange excelRange, ExcelSheet sheet) + throws InterruptedException { ExcelRow currentRow = sheet.get(excelRange.getTopRow()); if (currentRow == null || currentRow.isEmpty(excelRange.getLeftColumn())) { return new ExcelRange( @@ -337,7 +338,7 @@ public int getRowCount() { return isWholeColumn() ? Integer.MAX_VALUE : bottomRow - topRow + 1; } - public int getLastNonEmptyRow(ExcelSheet sheet) { + public int getLastNonEmptyRow(ExcelSheet sheet) throws InterruptedException { int lastRow = Math.min(sheet.getLastRow(), isWholeColumn() ? sheet.getLastRow() : bottomRow) + 1; diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelSheet.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelSheet.java index 4d2dd42a2a32..4dbe433d5d62 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ExcelSheet.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelSheet.java @@ -12,10 +12,10 @@ public interface ExcelSheet { String getName(); /** Gets the initial row index within the sheet (1-based). */ - int getFirstRow(); + int getFirstRow() throws InterruptedException; /** Gets the final row index within the sheet (1-based). */ - int getLastRow(); + int getLastRow() throws InterruptedException; /** * Gets the row at the given index within the sheet (1-based) @@ -23,7 +23,7 @@ public interface ExcelSheet { * @param row the row index (1-based)/ * @return the row object or null if the row index is out of range or doesn't exist. */ - ExcelRow get(int row); + ExcelRow get(int row) throws InterruptedException; /** Gets the underlying Apache POI Sheet object - may be null. Provided for Writer use only. */ Sheet getSheet(); diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ReadOnlyExcelConnection.java b/std-bits/table/src/main/java/org/enso/table/excel/ReadOnlyExcelConnection.java index 3cbac859648a..894771c96670 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ReadOnlyExcelConnection.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ReadOnlyExcelConnection.java @@ -1,7 +1,7 @@ package org.enso.table.excel; import java.io.IOException; -import java.util.function.Function; +import org.enso.table.util.FunctionWithException; public class ReadOnlyExcelConnection implements AutoCloseable { @@ -27,7 +27,9 @@ public synchronized void close() throws IOException { record = null; } - public synchronized T withWorkbook(Function f) throws IOException { + public synchronized T withWorkbook( + FunctionWithException f) + throws IOException, InterruptedException { if (record == null) { throw new IllegalStateException("ReadOnlyExcelConnection is being used after it was closed."); } diff --git a/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderSheet.java b/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderSheet.java index cdb79cbdbd5b..2fc288ab0716 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderSheet.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderSheet.java @@ -1,6 +1,7 @@ package org.enso.table.excel.xssfreader; import java.io.IOException; +import java.nio.channels.ClosedByInterruptException; import java.util.HashMap; import java.util.Map; import java.util.SortedMap; @@ -33,7 +34,7 @@ public XSSFReaderSheet(int sheetIdx, String sheetName, String relId, XSSFReaderW this.parent = parent; } - private synchronized void ensureReadSheetData() { + private synchronized void ensureReadSheetData() throws InterruptedException { if (hasReadSheetData) { return; } @@ -70,6 +71,8 @@ protected void onCell(int rowNumber, short columnNumber, String ref, CellValue v try { var sheet = reader.getSheet(relId); xmlReader.parse(new InputSource(sheet)); + } catch (ClosedByInterruptException e) { + throw new InterruptedException(e.getMessage()); } catch (SAXException | InvalidFormatException | IOException e) { throw new RuntimeException(e); } @@ -94,25 +97,25 @@ public String getName() { return sheetName; } - public String getDimensions() { + public String getDimensions() throws InterruptedException { ensureReadSheetData(); return dimensions; } @Override - public int getFirstRow() { + public int getFirstRow() throws InterruptedException { ensureReadSheetData(); return firstRow; } @Override - public int getLastRow() { + public int getLastRow() throws InterruptedException { ensureReadSheetData(); return lastRow; } @Override - public ExcelRow get(int row) { + public ExcelRow get(int row) throws InterruptedException { ensureReadSheetData(); if (!rowData.containsKey(row)) { diff --git a/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderWorkbook.java b/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderWorkbook.java index 6502057ff416..46f1d441914f 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderWorkbook.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/xssfreader/XSSFReaderWorkbook.java @@ -1,6 +1,7 @@ package org.enso.table.excel.xssfreader; import java.io.IOException; +import java.nio.channels.ClosedByInterruptException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -8,7 +9,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.Consumer; import javax.xml.XMLConstants; import javax.xml.namespace.NamespaceContext; import javax.xml.xpath.XPathConstants; @@ -26,6 +26,7 @@ import org.apache.poi.xssf.usermodel.XSSFRelation; import org.enso.table.excel.ExcelSheet; import org.enso.table.excel.ExcelWorkbook; +import org.enso.table.util.ConsumerWithException; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; @@ -90,7 +91,7 @@ public Iterator getPrefixes(String namespaceURI) { private SharedStrings sharedStrings; private XSSFReaderFormats styles; - public XSSFReaderWorkbook(String path) throws IOException { + public XSSFReaderWorkbook(String path) throws IOException, InterruptedException { this.path = path; // Read the workbook data @@ -101,7 +102,8 @@ public String getPath() { return path; } - void withReader(Consumer action) throws IOException { + void withReader(ConsumerWithException action) + throws IOException, InterruptedException { try (var pkg = OPCPackage.open(path, PackageAccess.READ)) { var reader = new XSSFReader(pkg); action.accept(reader); @@ -115,7 +117,7 @@ private record SheetInfo(int index, int sheetId, String name, String relID, bool private record NamedRange(String name, String formula) {} - private void readWorkbookData() throws IOException { + private void readWorkbookData() throws IOException, InterruptedException { withReader( reader -> { try { @@ -124,6 +126,8 @@ private void readWorkbookData() throws IOException { read1904DateSetting(workbookDoc); readSheetInfo(workbookDoc); readNamedRanges(workbookDoc); + } catch (ClosedByInterruptException e) { + throw new InterruptedException(e.getMessage()); } catch (SAXException | IOException | InvalidFormatException @@ -171,7 +175,7 @@ private void read1904DateSetting(Document workbookDoc) throws XPathExpressionExc } } - private synchronized void ensureReadShared() { + private synchronized void ensureReadShared() throws InterruptedException { if (hasReadShared) { return; } @@ -207,6 +211,8 @@ public int getUniqueCount() { styles = new XSSFReaderFormats(stylesTable); hasReadShared = true; + } catch (ClosedByInterruptException e) { + throw new InterruptedException(e.getMessage()); } catch (InvalidFormatException | IOException e) { throw new RuntimeException(e); } @@ -258,12 +264,12 @@ public String getNameFormula(String name) { return namedRange == null ? null : namedRange.formula; } - public SharedStrings getSharedStrings() { + public SharedStrings getSharedStrings() throws InterruptedException { ensureReadShared(); return sharedStrings; } - public XSSFReaderFormats getStyles() { + public XSSFReaderFormats getStyles() throws InterruptedException { ensureReadShared(); return styles; } diff --git a/std-bits/table/src/main/java/org/enso/table/read/ExcelReader.java b/std-bits/table/src/main/java/org/enso/table/read/ExcelReader.java index 6f6b289e8998..f4587fc6cb1e 100644 --- a/std-bits/table/src/main/java/org/enso/table/read/ExcelReader.java +++ b/std-bits/table/src/main/java/org/enso/table/read/ExcelReader.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.poi.ss.util.CellReference; @@ -24,6 +23,7 @@ import org.enso.table.excel.ExcelWorkbook; import org.enso.table.excel.ReadOnlyExcelConnection; import org.enso.table.problems.ProblemAggregator; +import org.enso.table.util.FunctionWithException; import org.graalvm.polyglot.Context; /** A table reader for MS Excel files. */ @@ -36,7 +36,8 @@ public class ExcelReader { * @return a String[] containing the sheet names. * @throws IOException when the action fails */ - public static String[] readSheetNames(File file, ExcelFileFormat format) throws IOException { + public static String[] readSheetNames(File file, ExcelFileFormat format) + throws IOException, InterruptedException { return withWorkbook(file, format, ExcelReader::readSheetNames); } @@ -65,7 +66,8 @@ public static String[] readSheetNames(ExcelWorkbook workbook) { * @return a String[] containing the range names. * @throws IOException when the action fails */ - public static String[] readRangeNames(File file, ExcelFileFormat format) throws IOException { + public static String[] readRangeNames(File file, ExcelFileFormat format) + throws IOException, InterruptedException { return withWorkbook(file, format, ExcelWorkbook::getRangeNames); } @@ -89,7 +91,7 @@ public static Table readSheetByName( Integer row_limit, ExcelFileFormat format, ProblemAggregator problemAggregator) - throws IOException, InvalidLocationException { + throws IOException, InvalidLocationException, InterruptedException { return withWorkbook( file, format, @@ -130,7 +132,7 @@ public static Table readSheetByIndex( Integer row_limit, ExcelFileFormat format, ProblemAggregator problemAggregator) - throws IOException, InvalidLocationException { + throws IOException, InvalidLocationException, InterruptedException { return withWorkbook( file, format, @@ -175,7 +177,7 @@ public static Table readRangeByName( Integer row_limit, ExcelFileFormat format, ProblemAggregator problemAggregator) - throws IOException, InvalidLocationException { + throws IOException, InvalidLocationException, InterruptedException { return withWorkbook( file, format, @@ -202,7 +204,7 @@ public static Table readRangeByName( int skip_rows, Integer row_limit, ProblemAggregator problemAggregator) - throws InvalidLocationException { + throws InvalidLocationException, InterruptedException { int sheetIndex = workbook.getSheetIndex(rangeNameOrAddress); if (sheetIndex != -1) { return readTable( @@ -247,7 +249,7 @@ public static Table readRange( Integer row_limit, ExcelFileFormat format, ProblemAggregator problemAggregator) - throws IOException, InvalidLocationException { + throws IOException, InvalidLocationException, InterruptedException { return withWorkbook( file, format, @@ -256,7 +258,10 @@ public static Table readRange( } private static T withWorkbook( - File file, ExcelFileFormat format, Function action) throws IOException { + File file, + ExcelFileFormat format, + FunctionWithException action) + throws IOException, InterruptedException { try (ReadOnlyExcelConnection connection = ExcelConnectionPool.INSTANCE.openReadOnlyConnection(file, format)) { return connection.withWorkbook(action); @@ -270,7 +275,7 @@ public static Table readRange( int skip_rows, Integer row_limit, ProblemAggregator problemAggregator) - throws InvalidLocationException { + throws InvalidLocationException, InterruptedException { int sheetIndex = workbook.getSheetIndex(excelRange.getSheetName()); if (sheetIndex == -1) { throw new InvalidLocationException( @@ -294,7 +299,8 @@ private static Table readTable( ExcelHeaders.HeaderBehavior headers, int skipRows, int rowCount, - ProblemAggregator problemAggregator) { + ProblemAggregator problemAggregator) + throws InterruptedException { ExcelSheet sheet = workbook.getSheetAt(sheetIndex); diff --git a/std-bits/table/src/main/java/org/enso/table/util/ConsumerWithException.java b/std-bits/table/src/main/java/org/enso/table/util/ConsumerWithException.java new file mode 100644 index 000000000000..9f9343854bbd --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/util/ConsumerWithException.java @@ -0,0 +1,45 @@ +package org.enso.table.util; + +import java.util.Objects; + +/** + * Same as {@link java.util.function.Consumer} except that a one can declare a checked exception, E. + * Represents an operation that accepts a single input argument and returns no result. Unlike most + * other functional interfaces, {@code Consumer} is expected to operate via side-effects. + * + *

This is a functional interface whose functional method is + * {@link #accept(Object)}. + * + * @param the type of the input to the operation + * @param the type of the checked exception + */ +@FunctionalInterface +public interface ConsumerWithException { + + /** + * Performs this operation on the given argument. + * + * @param t the input argument + */ + void accept(T t) throws E; + + /** + * Returns a composed {@code Consumer} that performs, in sequence, this operation followed by the + * {@code after} operation. If performing either operation throws an exception, it is relayed to + * the caller of the composed operation. If performing this operation throws an exception, the + * {@code after} operation will not be performed. + * + * @param after the operation to perform after this operation + * @return a composed {@code Consumer} that performs in sequence this operation followed by the + * {@code after} operation + * @throws NullPointerException if {@code after} is null + */ + default ConsumerWithException andThen(java.util.function.Consumer after) + throws E { + Objects.requireNonNull(after); + return (T t) -> { + accept(t); + after.accept(t); + }; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/util/FunctionWithException.java b/std-bits/table/src/main/java/org/enso/table/util/FunctionWithException.java new file mode 100644 index 000000000000..5d41f4686cb2 --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/util/FunctionWithException.java @@ -0,0 +1,51 @@ +package org.enso.table.util; + +import java.util.Objects; +import java.util.function.Function; + +/** + * Same as {@link Function} except that a one can declare a checked exception, E. Represents a + * function that accepts one argument and produces a result. + * + *

This is a functional interface whose functional method is + * {@link #apply(Object)}. + * + * @param the type of the input to the function + * @param the type of the result of the function + * @param the type of the checked exception + */ +@FunctionalInterface +public interface FunctionWithException { + + /** + * Applies this function to the given argument. + * + * @param t the function argument + * @return the function result + */ + R apply(T t) throws E; + + default FunctionWithException compose( + FunctionWithException before) { + Objects.requireNonNull(before); + return (V v) -> apply(before.apply(v)); + } + + /** + * Returns a composed function that first applies this function to its input, and then applies the + * {@code after} function to the result. If evaluation of either function throws an exception, it + * is relayed to the caller of the composed function. + * + * @param the type of output of the {@code after} function, and of the composed function + * @param after the function to apply after this function is applied + * @return a composed function that first applies this function and then applies the {@code after} + * function + * @throws NullPointerException if after is null + * @see #compose(Function) + */ + default FunctionWithException andThen( + FunctionWithException after) { + Objects.requireNonNull(after); + return (T t) -> after.apply(apply(t)); + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java index 2ced1f7a65bd..48dbc780c444 100644 --- a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java +++ b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java @@ -53,7 +53,8 @@ public static void writeTableToSheet( ExistingDataException, IllegalStateException, ColumnNameMismatchException, - ColumnCountMismatchException { + ColumnCountMismatchException, + InterruptedException { if (sheetIndex == 0 || sheetIndex > workbook.getNumberOfSheets()) { int i = 1; while (workbook.getSheet("Sheet" + i) != null) { @@ -116,7 +117,8 @@ public static void writeTableToSheet( ExistingDataException, IllegalStateException, ColumnNameMismatchException, - ColumnCountMismatchException { + ColumnCountMismatchException, + InterruptedException { int sheetIndex = workbook.getNumberOfSheets() == 0 ? -1 : workbook.getSheetIndex(sheetName); if (sheetIndex == -1) { writeTableToSheet( @@ -169,7 +171,8 @@ public static void writeTableToRange( RangeExceededException, ExistingDataException, ColumnNameMismatchException, - ColumnCountMismatchException { + ColumnCountMismatchException, + InterruptedException { Name name = workbook.getName(rangeNameOrAddress); ExcelRange excelRange; try { @@ -194,7 +197,8 @@ public static void writeTableToRange( RangeExceededException, ExistingDataException, ColumnNameMismatchException, - ColumnCountMismatchException { + ColumnCountMismatchException, + InterruptedException { int sheetIndex = workbook.getSheetIndex(range.getSheetName()); if (sheetIndex == -1) { throw new InvalidLocationException( @@ -263,7 +267,8 @@ private static void appendRangeWithTable( throws RangeExceededException, ExistingDataException, ColumnNameMismatchException, - ColumnCountMismatchException { + ColumnCountMismatchException, + InterruptedException { Table mappedTable = switch (existingDataMode) { case APPEND_BY_INDEX -> ColumnMapper.mapColumnsByPosition( @@ -333,7 +338,7 @@ private static void updateRangeWithTable( Long rowLimit, ExcelHeaders.HeaderBehavior headers, ExcelSheet sheet) - throws RangeExceededException, ExistingDataException { + throws RangeExceededException, ExistingDataException, InterruptedException { boolean writeHeaders = headers == ExcelHeaders.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS; int requiredRows = Math.min(table.rowCount(), rowLimit == null ? Integer.MAX_VALUE : rowLimit.intValue()) @@ -383,7 +388,8 @@ private static void updateRangeWithTable( * @param sheet Sheet containing the range. * @return True if range is empty and clear is False, otherwise returns False. */ - private static boolean rangeIsNotEmpty(Workbook workbook, ExcelRange range, ExcelSheet sheet) { + private static boolean rangeIsNotEmpty(Workbook workbook, ExcelRange range, ExcelSheet sheet) + throws InterruptedException { ExcelRange fullRange = range.getAbsoluteRange(workbook); for (int row = fullRange.getTopRow(); row <= fullRange.getBottomRow(); row++) { ExcelRow excelRow = sheet.get(row); @@ -401,7 +407,8 @@ private static boolean rangeIsNotEmpty(Workbook workbook, ExcelRange range, Exce * @param range The range to clear. * @param sheet Sheet containing the range. */ - private static void clearRange(Workbook workbook, ExcelRange range, ExcelSheet sheet) { + private static void clearRange(Workbook workbook, ExcelRange range, ExcelSheet sheet) + throws InterruptedException { ExcelRange fullRange = range.getAbsoluteRange(workbook); for (int row = fullRange.getTopRow(); row <= fullRange.getBottomRow(); row++) { ExcelRow excelRow = sheet.get(row); @@ -547,7 +554,7 @@ private static void writeValueToCell( * @return EXCEL_COLUMN_NAMES if the range has headers, otherwise USE_FIRST_ROW_AS_HEADERS. */ private static ExcelHeaders.HeaderBehavior shouldWriteHeaders( - ExcelSheet excelSheet, int topRow, int startCol, int endCol) { + ExcelSheet excelSheet, int topRow, int startCol, int endCol) throws InterruptedException { ExcelRow row = excelSheet.get(topRow); // If the first row is missing or empty, should write headers. From e8f781afbfe58537c730caa2c4c3e7a79f36b7b7 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Sat, 21 Dec 2024 08:07:54 +0100 Subject: [PATCH 12/15] Symetric, transitive and reflexive equality for intersection types (#11897) Fixes #11845 by comparing all the types an `EnsoMultiValue` _has been cast to_. --- CHANGELOG.md | 2 + docs/types/intersection-types.md | 24 +++- .../meta/TypeOfNodeMultiValueTest.java | 3 + .../org/enso/interpreter/test/AnyToTest.java | 120 ++++++++++++++++++ .../test/EnsoMultiValueInteropTest.java | 6 + .../test/EqualsMultiValueTest.java | 29 +++-- .../interpreter/test/ValuesGenerator.java | 40 ++++++ .../interpreter/test/hash/HashCodeTest.java | 1 + .../node/callable/InvokeConversionNode.java | 43 +++++-- .../node/callable/InvokeMethodNode.java | 11 +- .../builtin/meta/EqualsSimpleNode.java | 59 ++++++++- .../expression/builtin/meta/HashCodeNode.java | 31 ++++- .../src/Semantic/Conversion_Spec.enso | 28 ++-- .../Semantic/Multi_Value_Convert_Spec.enso | 67 ++++++---- 14 files changed, 390 insertions(+), 74 deletions(-) create mode 100644 engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/AnyToTest.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d4db090dd76..4dd78add11bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,10 +17,12 @@ - A constructor or type definition with a single inline argument definition was previously allowed to use spaces in the argument definition without parentheses. [This is now a syntax error.][11856] +- Symetric, transitive and reflexive [equality for intersection types][11897] [11777]: https://github.com/enso-org/enso/pull/11777 [11600]: https://github.com/enso-org/enso/pull/11600 [11856]: https://github.com/enso-org/enso/pull/11856 +[11897]: https://github.com/enso-org/enso/pull/11897 # Next Release diff --git a/docs/types/intersection-types.md b/docs/types/intersection-types.md index dd053d845709..603db3e40d17 100644 --- a/docs/types/intersection-types.md +++ b/docs/types/intersection-types.md @@ -68,7 +68,7 @@ Just as demonstrated at https://github.com/enso-org/enso/commit/3d8a0e1b90b20cfdfe5da8d2d3950f644a4b45b8#diff-c6ef852899778b52ce6a11ebf9564d102c273021b212a4848b7678e120776287R23 --> -## Narrowing Type Check +### Narrowing Type Check When an _intersection type_ value is being downcast to _some of the types it already represents_, these types become its _visible_ types. Any additional @@ -160,9 +160,9 @@ Table.join self right:Table -> Table = ... Such a `Table&Column` value can be returned from the above `Table.join` function and while having only `Table` behavior by default, still being able to be -explicitly casted by the visual environment to `Column`. +explicitly cast by the visual environment to `Column`. -## Converting Type Check +### Converting Type Check When an _intersection type_ is being checked against a type it doesn't represent, any of its component types can be used for @@ -180,3 +180,21 @@ case it looses its `Float` type and `ct:Float` would fail. In short: when a [conversion](../syntax/conversions.md) is needed to satisfy a type check a new value is created to satisfy just the types requested in the check. + +## Equality & Hash Code + +A value of an intersection type is equal with other value, if all values _it has +been cast to_ are equal to the other value. E.g. a value of `Complex&Float` is +equal to some other value only if its `Complex` part and `Float` part are equal +to the other value. The _hidden_ types of a value (e.g. those that it _can be +cast to_, if any) aren't considered in the equality check. + +The order of types isn't important for equality. E.g. `Complex&Float` value can +be equal to `Float&Complex` if the individual components (values _it has been +cast to_) match. As implied by (custom) +[equality rules](../syntax/functions.md#custom-equality) the `hash` of a value +of _intersection type_ must thus be a sum of `hash` values of all the values it +_has been cast to_. As a special case any value wrapped into an _intersection +type_, but _cast down_ to the original type is `==` and has the same `hash` as +the original value. E.g. `4.2 : Complex&Float : Float` is `==` and has the same +`hash` as `4.2` (in spite it _can be cast to_ `Complex`). diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/meta/TypeOfNodeMultiValueTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/meta/TypeOfNodeMultiValueTest.java index 835c24f62f24..b82ec6b2acf9 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/meta/TypeOfNodeMultiValueTest.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/meta/TypeOfNodeMultiValueTest.java @@ -88,6 +88,9 @@ private static void registerValue( if (!polyValue.isNull()) { assertTrue("Type of " + polyValue + " is " + t, t.isMetaObject()); var rawValue = ContextUtils.unwrapValue(ctx(), polyValue); + if (rawValue instanceof EnsoMultiValue) { + return; + } var rawType = ContextUtils.unwrapValue(ctx(), t); if (rawType instanceof Type type) { var singleMultiValue = EnsoMultiValue.create(new Type[] {type}, 1, new Object[] {rawValue}); diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/AnyToTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/AnyToTest.java new file mode 100644 index 000000000000..20311946ad6e --- /dev/null +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/AnyToTest.java @@ -0,0 +1,120 @@ +package org.enso.interpreter.test; + +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import org.enso.interpreter.runtime.data.EnsoMultiValue; +import org.enso.interpreter.runtime.data.Type; +import org.enso.interpreter.runtime.data.text.Text; +import org.enso.test.utils.ContextUtils; +import org.graalvm.polyglot.Context; +import org.graalvm.polyglot.Source; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +public class AnyToTest { + private static Context ctx; + + private static final ByteArrayOutputStream out = new ByteArrayOutputStream(); + + @BeforeClass + public static void initCtx() { + ctx = ContextUtils.createDefaultContext(out); + } + + @AfterClass + public static void disposeCtx() { + ctx.close(); + ctx = null; + } + + @Before + public void resetOutput() { + out.reset(); + } + + private String getStdOut() { + return out.toString(StandardCharsets.UTF_8); + } + + @Test + public void multiValueToInteger() throws Exception { + var ensoCtx = ContextUtils.leakContext(ctx); + var types = + new Type[] {ensoCtx.getBuiltins().number().getInteger(), ensoCtx.getBuiltins().text()}; + var code = + """ + from Standard.Base import all + + private eq a b = a == b + + conv style v = case style of + 0 -> v.to Integer + 1 -> v:Integer + 99 -> eq + + """; + var conv = + ContextUtils.evalModule(ctx, Source.newBuilder("enso", code, "conv.enso").build(), "conv"); + var both = EnsoMultiValue.create(types, types.length, new Object[] {2L, Text.create("Two")}); + var eq = + ContextUtils.executeInContext( + ctx, + () -> { + var bothValue = ctx.asValue(both); + var asIntegerTo = conv.execute(0, bothValue); + var asIntegerCast = conv.execute(1, bothValue); + var equals = conv.execute(99, null); + return equals.execute(asIntegerTo, asIntegerCast); + }); + assertTrue("Any.to and : give the same result", eq.asBoolean()); + } + + @Test + @Ignore + public void multiValueToText() throws Exception { + multiValueToText(2); + } + + @Test + @Ignore + public void multiValueToTextHidden() throws Exception { + multiValueToText(1); + } + + private void multiValueToText(int dispatchLength) throws Exception { + var ensoCtx = ContextUtils.leakContext(ctx); + var types = + new Type[] {ensoCtx.getBuiltins().number().getInteger(), ensoCtx.getBuiltins().text()}; + var code = + """ + from Standard.Base import all + + private eq a b = a == b + + conv style:Integer v = case style of + 2 -> v.to Text + 3 -> v:Text + 99 -> eq + + """; + var conv = + ContextUtils.evalModule(ctx, Source.newBuilder("enso", code, "conv.enso").build(), "conv"); + var both = EnsoMultiValue.create(types, dispatchLength, new Object[] {2L, Text.create("Two")}); + var eq = + ContextUtils.executeInContext( + ctx, + () -> { + var bothValue = ctx.asValue(both); + var asIntegerCast = conv.execute(3, bothValue); + var asIntegerTo = conv.execute(2, bothValue); + var equals = conv.execute(99, null); + return equals.execute(asIntegerTo, asIntegerCast); + }); + assertTrue("Any.to and : give the same result", eq.asBoolean()); + } +} diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EnsoMultiValueInteropTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EnsoMultiValueInteropTest.java index dad3a6100481..530122ceb651 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EnsoMultiValueInteropTest.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EnsoMultiValueInteropTest.java @@ -57,7 +57,13 @@ private static void registerValue( var rawT2 = ContextUtils.unwrapValue(ctx(), t2); if (rawT1 instanceof Type typ1 && rawT2 instanceof Type typ2) { var r1 = ContextUtils.unwrapValue(ctx, v1); + if (r1 instanceof EnsoMultiValue) { + return; + } var r2 = ContextUtils.unwrapValue(ctx, v2); + if (r2 instanceof EnsoMultiValue) { + return; + } var both = EnsoMultiValue.create(new Type[] {typ1, typ2}, 2, new Object[] {r1, r2}); data.add(new Object[] {both}); } diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EqualsMultiValueTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EqualsMultiValueTest.java index 57d60662bb43..90b001ebd16f 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EqualsMultiValueTest.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/EqualsMultiValueTest.java @@ -111,15 +111,24 @@ public void testEqualityIntegerAndMultiValueWithBoth() { var intType = builtins.number().getInteger(); var textText = builtins.text(); var hi = Text.create("Hi"); - var fourExtraText = + var textFour = EnsoMultiValue.create(new Type[] {textText, intType}, 2, new Object[] {hi, 4L}); - - assertTrue("4 == 4t", equalityCheck(4L, fourExtraText)); - assertFalse("5 != 4t", equalityCheck(5L, fourExtraText)); - assertTrue("4t == 4", equalityCheck(fourExtraText, 4L)); - assertFalse("4t != 5", equalityCheck(fourExtraText, 5L)); - assertTrue("4t == 'Hi'", equalityCheck(fourExtraText, hi)); - assertTrue("'Hi' == 4t", equalityCheck(hi, fourExtraText)); + var textFive = + EnsoMultiValue.create(new Type[] {textText, intType}, 2, new Object[] {hi, 5L}); + var fourText = + EnsoMultiValue.create(new Type[] {intType, textText}, 2, new Object[] {4L, hi}); + + assertFalse("4 != t", equalityCheck(4L, hi)); + assertFalse("4 != 4t", equalityCheck(4L, textFour)); + assertFalse("5 != 4t", equalityCheck(5L, textFour)); + assertFalse("5t != 4t", equalityCheck(textFive, textFour)); + assertFalse("4t != 4", equalityCheck(textFour, 4L)); + assertFalse("4t != 5", equalityCheck(textFour, 5L)); + assertFalse("4t != 'Hi'", equalityCheck(textFour, hi)); + assertFalse("'Hi' != 4t", equalityCheck(hi, textFour)); + + assertTrue("t4 == 4t", equalityCheck(textFour, fourText)); + assertTrue("4t == t4", equalityCheck(fourText, textFour)); return null; }); @@ -137,9 +146,9 @@ public void testEqualityIntegerAndMultiValueWithIntText() { EnsoMultiValue.create( new Type[] {intType, textText}, 2, new Object[] {4L, Text.create("Hi")}); - assertTrue("4 == 4t", equalityCheck(4L, fourExtraText)); + assertFalse("4 != 4t", equalityCheck(4L, fourExtraText)); assertFalse("5 != 4t", equalityCheck(5L, fourExtraText)); - assertTrue("4t == 4", equalityCheck(fourExtraText, 4L)); + assertFalse("4t != 4", equalityCheck(fourExtraText, 4L)); assertFalse("4t != 5", equalityCheck(fourExtraText, 5L)); return null; diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java index 4b23c3300299..a163a184655e 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java @@ -25,6 +25,11 @@ import java.util.TimeZone; import org.enso.common.MethodNames; import org.enso.common.MethodNames.Module; +import org.enso.interpreter.node.expression.foreign.HostValueToEnsoNode; +import org.enso.interpreter.runtime.data.EnsoMultiValue; +import org.enso.interpreter.runtime.data.EnsoObject; +import org.enso.interpreter.runtime.data.Type; +import org.enso.test.utils.ContextUtils; import org.graalvm.polyglot.Context; import org.graalvm.polyglot.PolyglotException; import org.graalvm.polyglot.Value; @@ -865,6 +870,41 @@ public List problemBehaviors() { return collect; } + public List numbersMultiText() { + var leak = ContextUtils.leakContext(ctx); + var numberTextTypes = + new Type[] { + leak.getBuiltins().number().getInteger(), leak.getBuiltins().text(), + }; + var textNumberTypes = + new Type[] { + leak.getBuiltins().text(), leak.getBuiltins().number().getInteger(), + }; + var collect = new ArrayList(); + var toEnso = HostValueToEnsoNode.getUncached(); + for (var n : numbers()) { + for (var t : textual()) { + var rawN = toEnso.execute(ContextUtils.unwrapValue(ctx, n)); + var rawT = ContextUtils.unwrapValue(ctx, t); + if (!(rawT instanceof EnsoObject)) { + continue; + } + addMultiToCollect(collect, numberTextTypes, 2, rawN, rawT); + addMultiToCollect(collect, numberTextTypes, 1, rawN, rawT); + addMultiToCollect(collect, textNumberTypes, 2, rawT, rawN); + addMultiToCollect(collect, textNumberTypes, 1, rawT, rawN); + } + } + return collect; + } + + private void addMultiToCollect( + List collect, Type[] types, int dispatchTypes, Object... values) { + var raw = EnsoMultiValue.create(types, dispatchTypes, values); + var wrap = ctx.asValue(raw); + collect.add(wrap); + } + public List noWrap() { var collect = new ArrayList(); if (languages.contains(Language.ENSO)) { diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/hash/HashCodeTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/hash/HashCodeTest.java index 558c3f85deb2..31f3d28d2bbb 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/hash/HashCodeTest.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/hash/HashCodeTest.java @@ -73,6 +73,7 @@ private static Object[] fetchAllUnwrappedValues() { values.addAll(valGenerator.numbers()); values.addAll(valGenerator.booleans()); values.addAll(valGenerator.textual()); + values.addAll(valGenerator.numbersMultiText()); values.addAll(valGenerator.arrayLike()); values.addAll(valGenerator.vectors()); values.addAll(valGenerator.maps()); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java index 2d0e4777fbad..4675a3d88882 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java @@ -100,7 +100,10 @@ private Type extractType(Object self) { return extractType(this, self); } - static boolean hasType(TypeOfNode typeOfNode, Object value) { + static boolean hasTypeNoMulti(TypeOfNode typeOfNode, Object value) { + if (value instanceof EnsoMultiValue) { + return false; + } return typeOfNode.hasType(value); } @@ -109,7 +112,11 @@ static boolean isDataflowError(Object value) { } @Specialization( - guards = {"hasType(dispatch, that)", "!isDataflowError(self)", "!isDataflowError(that)"}) + guards = { + "hasTypeNoMulti(dispatch, that)", + "!isDataflowError(self)", + "!isDataflowError(that)" + }) Object doConvertFrom( VirtualFrame frame, State state, @@ -181,15 +188,23 @@ Object doMultiValue( Object self, EnsoMultiValue that, Object[] arguments, + @Shared("typeOfNode") @Cached TypeOfNode dispatch, @Cached EnsoMultiValue.CastToNode castTo) { var type = extractType(self); - var result = castTo.findTypeOrNull(type, that, true, true); - if (result == null) { - throw new PanicException( - EnsoContext.get(this).getBuiltins().error().makeNoSuchConversion(type, self, conversion), - this); + var hasBeenCastTo = dispatch.findAllTypesOrNull(that, false); + if (hasBeenCastTo != null) { + for (var t : hasBeenCastTo) { + var val = castTo.findTypeOrNull(t, that, false, false); + assert val != null; + var result = execute(frame, state, conversion, self, val, arguments); + if (result != null) { + return result; + } + } } - return result; + throw new PanicException( + EnsoContext.get(this).getBuiltins().error().makeNoSuchConversion(type, self, conversion), + this); } @Specialization @@ -265,7 +280,7 @@ Object doConvertText( @Specialization( guards = { - "!hasType(typeOfNode, that)", + "!hasTypeNoMulti(typeOfNode, that)", "!interop.isTime(that)", "interop.isDate(that)", }) @@ -287,7 +302,7 @@ Object doConvertDate( @Specialization( guards = { - "!hasType(typeOfNode, that)", + "!hasTypeNoMulti(typeOfNode, that)", "interop.isTime(that)", "!interop.isDate(that)", }) @@ -309,7 +324,7 @@ Object doConvertTime( @Specialization( guards = { - "!hasType(typeOfNode, that)", + "!hasTypeNoMulti(typeOfNode, that)", "interop.isTime(that)", "interop.isDate(that)", }) @@ -331,7 +346,7 @@ Object doConvertDateTime( @Specialization( guards = { - "!hasType(typeOfNode, that)", + "!hasTypeNoMulti(typeOfNode, that)", "interop.isDuration(that)", }) Object doConvertDuration( @@ -352,7 +367,7 @@ Object doConvertDuration( @Specialization( guards = { - "!hasType(typeOfNode, thatMap)", + "!hasTypeNoMulti(typeOfNode, thatMap)", "interop.hasHashEntries(thatMap)", }) Object doConvertMap( @@ -374,7 +389,7 @@ Object doConvertMap( return invokeFunctionNode.execute(function, frame, state, arguments); } - @Specialization(guards = {"!hasType(methods, that)", "!interop.isString(that)"}) + @Specialization(guards = {"!hasTypeNoMulti(methods, that)", "!interop.isString(that)"}) Object doFallback( VirtualFrame frame, State state, diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index 1d8c977c6e2c..179bef3b3e24 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -299,10 +299,13 @@ Object doMultiValue( @Cached EnsoMultiValue.CastToNode castTo) { var fnAndType = self.resolveSymbol(methodResolverNode, symbol); if (fnAndType != null) { - var unwrapSelf = castTo.findTypeOrNull(fnAndType.getRight(), self, false, false); - if (unwrapSelf != null) { - assert arguments[0] == self; - arguments[0] = unwrapSelf; + var ctx = EnsoContext.get(this); + if (ctx.getBuiltins().any() != fnAndType.getRight()) { + var unwrapSelf = castTo.findTypeOrNull(fnAndType.getRight(), self, false, false); + if (unwrapSelf != null) { + assert arguments[0] == self; + arguments[0] = unwrapSelf; + } } return invokeFunctionNode.execute(fnAndType.getLeft(), frame, state, arguments); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/EqualsSimpleNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/EqualsSimpleNode.java index aaf800c5d961..2846a92baedc 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/EqualsSimpleNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/EqualsSimpleNode.java @@ -155,7 +155,7 @@ EqualsAndInfo equalsDoubleText(double self, Text other) { return EqualsAndInfo.FALSE; } - @Specialization + @Specialization(guards = "!isMulti(other)") EqualsAndInfo equalsDoubleInterop( double self, Object other, @@ -323,7 +323,54 @@ EqualsAndInfo equalsAtoms( } } + static boolean isMulti(Object obj) { + return obj instanceof EnsoMultiValue; + } + @Specialization + EqualsAndInfo equalsMultiValueMultiValue( + VirtualFrame frame, + EnsoMultiValue self, + EnsoMultiValue other, + @Shared("multiCast") @Cached EnsoMultiValue.CastToNode castNode, + @Shared("multiType") @Cached TypeOfNode typesNode, + @Shared("multiEquals") @Cached EqualsSimpleNode delegate) { + if (self == other) { + return EqualsAndInfo.TRUE; + } + + var typesSelf = typesNode.findAllTypesOrNull(self, false); + var typesOther = typesNode.findAllTypesOrNull(other, false); + assert typesSelf != null; + assert typesOther != null; + for (var t : typesSelf) { + var selfValue = castNode.findTypeOrNull(t, self, false, false); + assert selfValue != null; + var otherValue = castNode.findTypeOrNull(t, other, false, false); + if (otherValue == null) { + return EqualsAndInfo.FALSE; + } + var res = delegate.execute(frame, selfValue, otherValue); + if (!res.isTrue()) { + return res; + } + } + for (var t : typesOther) { + var selfValue = castNode.findTypeOrNull(t, self, false, false); + if (selfValue == null) { + return EqualsAndInfo.FALSE; + } + var otherValue = castNode.findTypeOrNull(t, other, false, false); + assert otherValue != null; + var res = delegate.execute(frame, selfValue, otherValue); + if (!res.isTrue()) { + return res; + } + } + return EqualsAndInfo.TRUE; + } + + @Specialization(guards = "!isMulti(other)") EqualsAndInfo equalsMultiValue( VirtualFrame frame, EnsoMultiValue self, @@ -339,14 +386,14 @@ EqualsAndInfo equalsMultiValue( continue; } var res = delegate.execute(frame, value, other); - if (res.isTrue()) { + if (!res.isTrue()) { return res; } } - return EqualsAndInfo.FALSE; + return EqualsAndInfo.TRUE; } - @Specialization + @Specialization(guards = "!isMulti(self)") EqualsAndInfo equalsMultiValueReversed( VirtualFrame frame, Object self, @@ -440,6 +487,10 @@ static boolean isPrimitiveValue(Object object) { return object instanceof Boolean || object instanceof Long || object instanceof Double; } + static boolean isEnsoObject(Object v) { + return v instanceof EnsoObject; + } + static boolean isNotMulti(Object v) { return !(v instanceof EnsoMultiValue); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/HashCodeNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/HashCodeNode.java index 1127cf61beef..1af1a8026f45 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/HashCodeNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/HashCodeNode.java @@ -40,11 +40,13 @@ import org.enso.interpreter.runtime.callable.argument.CallArgumentInfo; import org.enso.interpreter.runtime.callable.function.Function; import org.enso.interpreter.runtime.data.EnsoFile; +import org.enso.interpreter.runtime.data.EnsoMultiValue; import org.enso.interpreter.runtime.data.Type; import org.enso.interpreter.runtime.data.atom.Atom; import org.enso.interpreter.runtime.data.atom.AtomConstructor; import org.enso.interpreter.runtime.data.atom.StructsLibrary; import org.enso.interpreter.runtime.data.text.Text; +import org.enso.interpreter.runtime.library.dispatch.TypeOfNode; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; import org.enso.interpreter.runtime.number.EnsoBigInteger; import org.enso.interpreter.runtime.scope.ModuleScope; @@ -106,7 +108,7 @@ long hashCodeForBigInteger( return hashCodeForDouble(bigInteger.getValue().doubleValue()); } - @Specialization(guards = {"interop.fitsInBigInteger(v)"}) + @Specialization(guards = {"interop.fitsInBigInteger(v)", "!isMulti(v)"}) @TruffleBoundary long hashCodeForBigInteger( Object v, @Shared("interop") @CachedLibrary(limit = "10") InteropLibrary interop) { @@ -451,6 +453,29 @@ long hashCodeForText( } } + @Specialization + long hashCodeForMultiValue( + EnsoMultiValue value, + @Cached TypeOfNode typesNode, + @Cached EnsoMultiValue.CastToNode castNode, + @Shared("hashCodeNode") @Cached HashCodeNode hashCodeNode) { + // multi value with single "has been cast to value" + // needs the same hash as the "has been cast to value" + // hence the sum has to start from 0L + var hash = 0L; + var types = typesNode.findAllTypesOrNull(value, false); + assert types != null; + for (var t : types) { + var v = castNode.findTypeOrNull(t, value, false, false); + assert v != null; + var vHash = hashCodeNode.execute(v); + // ordering of types in multivalue doesn't matter + // need commutative operation here + hash = hash + vHash; + } + return hash; + } + @TruffleBoundary @Specialization( guards = {"interop.isString(selfStr)"}, @@ -636,4 +661,8 @@ boolean isJavaObject(Object object) { boolean isJavaFunction(Object object) { return EnsoContext.get(this).isJavaPolyglotFunction(object); } + + static boolean isMulti(Object obj) { + return obj instanceof EnsoMultiValue; + } } diff --git a/test/Base_Tests/src/Semantic/Conversion_Spec.enso b/test/Base_Tests/src/Semantic/Conversion_Spec.enso index 4228168ccaa4..75116ff2f00e 100644 --- a/test/Base_Tests/src/Semantic/Conversion_Spec.enso +++ b/test/Base_Tests/src/Semantic/Conversion_Spec.enso @@ -320,10 +320,10 @@ add_specs suite_builder = x==x . should_be_true (x:Integer)==42 . should_be_true (x:Fool)==42 . should_be_false - x==42 . should_be_true + x==42 . should_be_false 42==(x.to Integer) . should_be_true 42==(x.to Fool) . should_be_false - 42==x . should_be_true + 42==x . should_be_false 100+(x:Integer) . should_equal 142 (x:Integer)+100 . should_equal 142 x+100 . should_equal 142 @@ -341,10 +341,10 @@ add_specs suite_builder = x==x . should_be_true (x:Float)==42.3 . should_be_true (x:Fool)==42.3 . should_be_false - x==42.3 . should_be_true + x==42.3 . should_be_false 42.3==(x.to Float) . should_be_true 42.3==(x.to Fool) . should_be_false - 42.3==x . should_be_true + 42.3==x . should_be_false 100+(x:Float) . should_equal 142.3 (x:Float)+100 . should_equal 142.3 x+100 . should_equal 142.3 @@ -378,10 +378,10 @@ add_specs suite_builder = x==x . should_be_true (x:Text)=="Hello" . should_be_true (x:Fool)=="Hello" . should_be_false - x=="Hello" . should_be_true + x=="Hello" . should_be_false "Hello"==(x:Text) . should_be_true "Hello"==(x:Fool) . should_be_false - "Hello"==x . should_be_true + "Hello"==x . should_be_false x.to_text . should_equal "Hello" (x:Fool).to_text . should_equal "{FOOL Hello}" (x:Text).to_text . should_equal "Hello" @@ -397,10 +397,10 @@ add_specs suite_builder = x==x . should_be_true (x:Time_Of_Day)==now . should_be_true (x:Fool)==now . should_be_false - x==now . should_be_true + x==now . should_be_false now==(x:Time_Of_Day) . should_be_true now==(x:Fool) . should_be_false - now==x . should_be_true + now==x . should_be_false x.to_text . should_equal now.to_text do_time now @@ -413,10 +413,10 @@ add_specs suite_builder = x==x . should_be_true (x:Date)==now . should_be_true (x:Fool)==now . should_be_false - x==now . should_be_true + x==now . should_be_false now==(x:Date) . should_be_true now==(x:Fool) . should_be_false - now==x . should_be_true + now==x . should_be_false x.to_text . should_equal "{FOOL "+now.to_text+"}" do_date now @@ -429,10 +429,10 @@ add_specs suite_builder = x==x . should_be_true (x:Date_Time)==now . should_be_true (x:Fool)==now . should_be_false - x==now . should_be_true + x==now . should_be_false now==(x:Date_Time) . should_be_true now==(x:Fool) . should_be_false - now==x . should_be_true + now==x . should_be_false x.to_text . should_equal now.to_text do_time now @@ -445,10 +445,10 @@ add_specs suite_builder = x==x . should_be_true (x:Duration)==now . should_be_true (x:Fool)==now . should_be_false - x==now . should_be_true + x==now . should_be_false now==(x:Duration) . should_be_true now==(x:Fool) . should_be_false - now==x . should_be_true + now==x . should_be_false x.to_text . should_equal "{FOOL "+now.to_text+"}" do_duration now diff --git a/test/Base_Tests/src/Semantic/Multi_Value_Convert_Spec.enso b/test/Base_Tests/src/Semantic/Multi_Value_Convert_Spec.enso index 81fcdd680410..ae94e0dd5d12 100644 --- a/test/Base_Tests/src/Semantic/Multi_Value_Convert_Spec.enso +++ b/test/Base_Tests/src/Semantic/Multi_Value_Convert_Spec.enso @@ -85,47 +85,66 @@ add_specs suite_builder = c1 . should_equal "c" suite_builder.group "Equals and hash" group_builder-> - group_builder.specify "Dictionary with value and multi value" <| - pi = 3.14 - a = pi : A - b = pi : B - c = pi : C - abc = pi : A&B&C - downcast_a = abc : A - downcast_ab = abc : A&B - downcast_ba = abc : B&A - downcast_b = abc : B - downcast_c = abc : C - + pi = 3.14 + a = pi : A + b = pi : B + c = pi : C + abc = pi : A&B&C + downcast_a = abc : A + downcast_ab = abc : A&B + downcast_ba = abc : B&A + downcast_b = abc : B + downcast_c = abc : C + + group_builder.specify "Ordering and multi value" <| Ordering.compare a b . catch Any e-> e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error a b) Ordering.compare a downcast_a . should_equal Ordering.Equal - Ordering.compare a downcast_ab . should_equal Ordering.Equal - Ordering.compare a abc . should_equal Ordering.Equal - Ordering.compare a downcast_ba . should_equal Ordering.Equal - Ordering.compare downcast_ba b . should_equal Ordering.Equal - # if a == downcast_ba && downcast_ba == b then # due to transitivity - # Ordering.compare a b . should_equal Ordering.Equal + Ordering.hash a . should_equal (Ordering.hash downcast_a) + + Ordering.compare a downcast_ab . catch Any e-> + e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error a downcast_ab) + Ordering.compare a abc . catch Any e-> + e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error a abc) + + Ordering.compare a downcast_ba . catch Any e-> + e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error a downcast_ba) + Ordering.compare downcast_ba b . catch Any e-> + e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error downcast_ba b) + Ordering.compare a b . catch Any e-> + e.should_equal (Standard.Base.Errors.Common.Incomparable_Values.Error a b) + group_builder.specify "Dictionary with value and multi value" <| dict = Dictionary.empty . insert a "A" . insert b "B" . insert c "C" + . insert downcast_ab "AB_" + . insert downcast_ba "BA_" . insert downcast_a "A_" . insert downcast_b "B_" . insert downcast_c "C_" . insert abc "Multi" - # dict . get a . should_equal "A" # sometimes it is A and sometimes it is Multi - # dict . get b . should_equal "B" - # dict . get c . should_equal "C" # sometimes it is C and sometimes it is Multi - dict . get downcast_a . should_equal "Multi" - dict . get downcast_b . should_equal "Multi" - dict . get downcast_c . should_equal "Multi" + # downcast single value is equal to the value + dict . get downcast_a . should_equal "A_" + dict . get downcast_b . should_equal "B_" + dict . get downcast_c . should_equal "C_" + + # hence "A" ,"B", "C" were replaced + dict . get a . should_equal "A_" + dict . get b . should_equal "B_" + dict . get c . should_equal "C_" + + # multi value must be equal to all its values dict . get abc . should_equal "Multi" + # order of types in multi value isn't important + dict . get downcast_ab . should_equal "BA_" + dict . get downcast_ba . should_equal "BA_" + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder From 31772e35652a63f6d5557989f2ab6f69b083ffc1 Mon Sep 17 00:00:00 2001 From: AdRiley Date: Sat, 21 Dec 2024 15:40:28 +0000 Subject: [PATCH 13/15] SQLServer Aggregate Support (#11811) * 40 red * 18 Red * 31 Red * 20 red * 18 Red * 15 red * 9 Red * 7 * Comment out broken test for now * Green * Cleanup * Changelog * Update check_aggregate_support * Cleanup * Reenable test * Fix tests * Doc comment --- CHANGELOG.md | 2 + .../Redshift/Internal/Redshift_Dialect.enso | 6 + .../src/Internal/Aggregate_Helper.enso | 2 +- .../src/Internal/Base_Generator.enso | 7 +- .../Internal/Postgres/Postgres_Dialect.enso | 6 + .../src/Internal/SQLite/SQLite_Dialect.enso | 6 + .../src/Internal/SQLServer_Dialect.enso | 132 +++++++++--------- .../src/Internal/Snowflake_Dialect.enso | 6 + test/Microsoft_Tests/src/SQLServer_Spec.enso | 4 +- .../Aggregate_Spec.enso | 8 +- 10 files changed, 101 insertions(+), 78 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4dd78add11bb..5b5e023d18db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -136,6 +136,7 @@ - [Enhance Managed_Resource to allow implementation of in-memory caches][11577] - [Added `add_group_number` to the in-memory database.[11818] - [The reload button clears the HTTP cache.][11673] +- [SQL Server Support for Aggregate][11811] [11235]: https://github.com/enso-org/enso/pull/11235 [11255]: https://github.com/enso-org/enso/pull/11255 @@ -146,6 +147,7 @@ [11577]: https://github.com/enso-org/enso/pull/11577 [11818]: https://github.com/enso-org/enso/pull/11818 [11673]: https://github.com/enso-org/enso/pull/11673 +[11811]: https://github.com/enso-org/enso/pull/11811 #### Enso Language & Runtime diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso index 4d7c649c1f9e..36752731167d 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso @@ -157,6 +157,12 @@ type Redshift_Dialect _ = [op_kind, args] expression + ## PRIVATE + Add an extra cast to adjust the output type of aggregate operations. + Some DBs do CAST(SUM(x) AS FLOAT) others do SUM(CAST(x AS FLOAT)). + cast_aggregate_columns self op_kind:Text columns:(Vector Internal_Column) = + self.cast_op_type op_kind columns (SQL_Expression.Operation op_kind (columns.map c->c.expression)) + ## PRIVATE prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement prepare_fetch_types_query self expression context = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso index b13515f13277..812ed2422802 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso @@ -35,7 +35,7 @@ from project.Errors import Aggregagtion_Requires_Order make_aggregate_column : DB_Table -> Aggregate_Column -> Text -> Dialect -> (Text -> Vector -> SQL_Expression -> SQL_Type_Reference) -> Problem_Builder -> Internal_Column make_aggregate_column table aggregate as dialect infer_return_type problem_builder -> Internal_Column = simple_aggregate op_kind columns = - expression = dialect.cast_op_type op_kind columns (SQL_Expression.Operation op_kind (columns.map c->c.expression)) + expression = dialect.cast_aggregate_columns op_kind columns sql_type_ref = infer_return_type op_kind columns expression Internal_Column.Value as sql_type_ref expression diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index 8a4b233782d6..ecdc797ebc85 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -179,6 +179,7 @@ type SQL_Generator generate_select_query_sql : Dialect -> Vector (Pair Text SQL_Expression) -> Context -> SQL_Builder generate_select_query_sql self dialect columns ctx = gen_exprs exprs = exprs.map (expr-> dialect.generate_expression self expr for_select=False) + gen_group_exprs exprs = exprs.map (expr-> dialect.generate_expression self expr for_select=True) gen_column pair = (dialect.generate_expression self expr=pair.second for_select=True) ++ alias dialect pair.first generated_columns = case columns of @@ -187,7 +188,7 @@ type SQL_Generator from_part = self.generate_from_part dialect ctx.from_spec where_part = (SQL_Builder.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE " - group_part = (SQL_Builder.join ", " (gen_exprs ctx.groups)) . prefix_if_present " GROUP BY " + group_part = (SQL_Builder.join ", " (gen_group_exprs ctx.groups)) . prefix_if_present " GROUP BY " orders = ctx.orders.map (self.generate_order dialect) order_part = (SQL_Builder.join ", " orders) . prefix_if_present " ORDER BY " @@ -663,14 +664,14 @@ preprocess_query (query : Query) -> Query = column expression; it should be provided only if `has_quote` is `True` and must not be empty then. If the quote character occurs in the expression, it is escaped by doubling each occurrence. -make_concat make_raw_concat_expr make_contains_expr has_quote args = +make_concat make_raw_concat_expr make_contains_expr has_quote args append_char="||" = expected_args = if has_quote then 5 else 4 if args.length != expected_args then Error.throw (Illegal_State.Error "Unexpected number of arguments for the concat operation.") else expr = args.at 0 separator = args.at 1 prefix = args.at 2 suffix = args.at 3 - append = " || " + append = " " + append_char + " " possibly_quoted = case has_quote of True -> quote = args.at 4 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 533712091822..b99fa8bc3c49 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -231,6 +231,12 @@ type Postgres_Dialect if cast_to.is_nothing then expression else SQL_Expression.Operation "CAST" [expression, SQL_Expression.Literal cast_to] + ## PRIVATE + Add an extra cast to adjust the output type of aggregate operations. + Some DBs do CAST(SUM(x) AS FLOAT) others do SUM(CAST(x AS FLOAT)). + cast_aggregate_columns self op_kind:Text columns:(Vector Internal_Column) = + self.cast_op_type op_kind columns (SQL_Expression.Operation op_kind (columns.map c->c.expression)) + ## PRIVATE prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement prepare_fetch_types_query self expression context = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index efbc22421dc4..69e4d50e201d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -215,6 +215,12 @@ type SQLite_Dialect _ = [op_kind, args] expression + ## PRIVATE + Add an extra cast to adjust the output type of aggregate operations. + Some DBs do CAST(SUM(x) AS FLOAT) others do SUM(CAST(x AS FLOAT)). + cast_aggregate_columns self op_kind:Text columns:(Vector Internal_Column) = + self.cast_op_type op_kind columns (SQL_Expression.Operation op_kind (columns.map c->c.expression)) + ## PRIVATE prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement prepare_fetch_types_query self expression context = diff --git a/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso b/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso index a681cc9fdeb1..59585a7649b5 100644 --- a/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso +++ b/distribution/lib/Standard/Microsoft/0.0.0-dev/src/Internal/SQLServer_Dialect.enso @@ -212,8 +212,27 @@ type SQLServer_Dialect is used only to override the type in cases where the default one that the database uses is not what we want. cast_op_type self (op_kind:Text) (args:(Vector Internal_Column)) (expression:SQL_Expression) = - _ = [op_kind, args] - expression + is_int ic = + typeid = ic.sql_type_reference.get.typeid + typeid == Java_Types.SMALLINT || typeid == Java_Types.INTEGER || typeid == Java_Types.BIGINT + + cast_to = case op_kind of + "AVG" -> + if is_int (args.at 0) then "FLOAT" else Nothing + "STDDEV_POP" -> + if is_int (args.at 0) then "FLOAT" else Nothing + "STDDEV_SAMP" -> + if is_int (args.at 0) then "FLOAT" else Nothing + _ -> Nothing + + if cast_to.is_nothing then expression else + SQL_Expression.Operation "CAST" [expression, SQL_Expression.Literal cast_to] + + ## PRIVATE + Add an extra cast to adjust the output type of aggregate operations. + Some DBs do CAST(SUM(x) AS FLOAT) others do SUM(CAST(x AS FLOAT)). + cast_aggregate_columns self op_kind:Text columns:(Vector Internal_Column) = + SQL_Expression.Operation op_kind (columns.map c->(self.cast_op_type op_kind columns (Internals_Access.column_expression c))) ## PRIVATE prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement @@ -224,10 +243,32 @@ type SQLServer_Dialect generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name quote_char="" ## PRIVATE - check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation - check_aggregate_support self aggregate = - _ = aggregate - True + check_aggregate_support self aggregate:Aggregate_Column -> Boolean ! Unsupported_Database_Operation = + unsupported name = + Error.throw (Unsupported_Database_Operation.Error name) + case aggregate of + Group_By _ _ -> True + Count _ -> True + Count_Distinct columns _ _ -> + if columns.length == 1 then True else + unsupported "Count_Distinct on multiple columns" + Count_Not_Nothing _ _ -> True + Count_Nothing _ _ -> True + Count_Not_Empty _ _ -> True + Count_Empty _ _ -> True + Percentile _ _ _ -> unsupported "Percentile" + Mode _ _ -> unsupported "Mode" + First _ _ _ _ -> unsupported "First" + Last _ _ _ _ -> unsupported "Last" + Maximum _ _ -> True + Minimum _ _ -> True + Shortest _ _ -> unsupported "Shortest" + Longest _ _ -> unsupported "Longest" + Standard_Deviation _ _ _ -> True + Concatenate _ _ _ _ _ _ -> True + Sum _ _ -> True + Average _ _ -> True + Median _ _ -> unsupported "Median" ## PRIVATE Checks if an operation is supported by the dialect. @@ -243,6 +284,7 @@ type SQLServer_Dialect Feature.Filter -> True Feature.Join -> True Feature.Union -> True + Feature.Aggregate -> True _ -> False ## PRIVATE @@ -401,6 +443,7 @@ private _generate_expression dialect base_gen expr expression_kind:Expression_Ki pair final_expr null_checks_result query : Query -> pair (base_gen.generate_sub_query dialect query) [] + descriptor : Order_Descriptor -> pair (base_gen.generate_order dialect descriptor) [] ## PRIVATE type Expression_Kind @@ -437,7 +480,7 @@ private _op_return_kind op -> Expression_Kind = if return_bool_ops.contains op then Expression_Kind.Boolean_Condition else Expression_Kind.Value private _op_needs_to_materialize_null_checks op -> Boolean = - ["FILL_NULL", "COALESCE"].contains op + ["FILL_NULL", "COALESCE", "COUNT_IS_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT", "SUM", "AVG", "LONGEST", "SHORTEST", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "STDDEV_POP", "STDDEV_SAMP", "CONCAT", "CONCAT_QUOTE_IF_NEEDED", "MIN", "MAX"].contains op ## PRIVATE make_dialect_operations = @@ -447,13 +490,13 @@ make_dialect_operations = arith_extensions = [floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]] bool = [bool_or] - stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"] - stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"] - stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp] + stddev_pop = ["STDDEV_POP", Base_Generator.make_function "STDEVP"] + stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "STDEV"] + stats = [stddev_pop, stddev_samp] date_ops = [["year", Base_Generator.make_function "year"], make_datepart "quarter", ["month", Base_Generator.make_function "month"], make_datepart "week" "iso_week", ["day", Base_Generator.make_function "day"], make_datepart "hour", make_datepart "minute", make_datepart "day_of_year" "dayofyear", make_day_of_week, make_datepart "second", make_datepart "millisecond", make_extract_microsecond, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]] special_overrides = [is_empty, ["IIF", _make_iif]] other = [["RUNTIME_ERROR", make_runtime_error_op]] - my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other + my_mappings = text + counts + arith_extensions + bool + stats + date_ops + special_overrides + other base = Base_Generator.base_dialect_operations . extend_with my_mappings Base_Generator.Dialect_Operations.Value (base.operations_dict.remove "IS_IN") @@ -469,68 +512,29 @@ private _make_iif arguments:Vector -> SQL_Builder = ## PRIVATE agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg-> - SQL_Builder.code "SUM(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 ELSE 0 END)" + SQL_Builder.code "COALESCE(SUM(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 ELSE 0 END), 0)" ## PRIVATE agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg-> - SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 ELSE 0 END)" + SQL_Builder.code "COALESCE(SUM(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 ELSE 0 END), 0)" ## PRIVATE agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg-> - SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 ELSE 0 END)" - - -## PRIVATE -agg_median = Base_Generator.lift_unary_op "MEDIAN" arg-> - median = SQL_Builder.code "MEDIAN(" ++ arg ++ ")" - has_nan = SQL_Builder.code "BOOLOR_AGG(" ++ arg ++ " = 'NaN'::Double)" - SQL_Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN'::Double ELSE " ++ median ++ " END" - -## PRIVATE -agg_mode = Base_Generator.lift_unary_op "MODE" arg-> - SQL_Builder.code "MODE(" ++ arg ++ ")" - -## PRIVATE -agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr-> - percentile = SQL_Builder.code "percentile_cont(" ++ p ++ ") WITHIN GROUP (ORDER BY " ++ expr ++ ")" - has_nan = SQL_Builder.code "BOOLOR_AGG(" ++ expr ++ " = 'NaN'::Double)" - SQL_Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ percentile ++ " END" - -## PRIVATE - These are written in a not most-efficient way, but a way that makes them - compatible with other group-by aggregations out-of-the-box. In the future, we - may want to consider some alternative solutions. -first_last_aggregators = - first = make_first_aggregator reverse=False ignore_null=False - first_not_null = make_first_aggregator reverse=False ignore_null=True - last = make_first_aggregator reverse=True ignore_null=False - last_not_null = make_first_aggregator reverse=True ignore_null=True - [["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]] - -## PRIVATE -make_first_aggregator reverse ignore_null args = - if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else - result_expr = args.first - order_bys = args.drop 1 - - method_name = if reverse then "LAST_VALUE" else "FIRST_VALUE" - filter_clause = if ignore_null then ") IGNORE NULLS OVER" else ") OVER" - order_clause = SQL_Builder.code " ORDER BY " ++ SQL_Builder.join "," order_bys - SQL_Builder.code (method_name + "(") ++ result_expr ++ filter_clause ++ order_clause + SQL_Builder.code "COALESCE(SUM(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 ELSE 0 END), 0)" ## PRIVATE agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg-> - SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ "))" + SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LEN(" ++ arg ++ "))" ## PRIVATE agg_longest = Base_Generator.lift_unary_op "LONGEST" arg-> - SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ ") DESC)" + SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LEN(" ++ arg ++ ") DESC)" ## PRIVATE concat_ops = make_raw_concat_expr expr separator = SQL_Builder.code "string_agg(" ++ expr ++ ", " ++ separator ++ ")" - concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr + concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr append_char="+" [["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]] ## PRIVATE @@ -554,14 +558,7 @@ agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.E True -> ## A single null value will be skipped. SQL_Builder.code "COUNT(DISTINCT " ++ args.first ++ ")" - False -> - ## A tuple of nulls is not a null, so it will not be skipped - but - we want to ignore all-null columns. So we manually filter them - out. - count = SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ "))" - are_nulls = args.map arg-> arg.paren ++ " IS NULL" - all_nulls_filter = SQL_Builder.code " FILTER (WHERE NOT (" ++ SQL_Builder.join " AND " are_nulls ++ "))" - (count ++ all_nulls_filter).paren + False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLServer.") ## PRIVATE agg_count_distinct_include_null args = case args.length == 1 of @@ -595,12 +592,11 @@ ends_with = Base_Generator.lift_binary_op "ENDS_WITH" str-> sub-> res.paren ## PRIVATE -contains = Base_Generator.lift_binary_op "CONTAINS" str-> sub-> - res = SQL_Builder.code "CHARINDEX(" ++ sub ++ ", " ++ str ++ ") > 0" - res.paren +make_contains_expr expr substring = + SQL_Builder.code "CHARINDEX(" ++ substring ++ ", " ++ expr ++ ") > 0" ## PRIVATE -make_contains_expr expr substring = contains [expr, substring] +contains = Base_Generator.lift_binary_op "CONTAINS" make_contains_expr ## PRIVATE make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg-> diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso index 778a3609dcac..e5eaf0f7b3dc 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso @@ -219,6 +219,12 @@ type Snowflake_Dialect _ = [op_kind, args] expression + ## PRIVATE + Add an extra cast to adjust the output type of aggregate operations. + Some DBs do CAST(SUM(x) AS FLOAT) others do SUM(CAST(x AS FLOAT)). + cast_aggregate_columns self op_kind:Text columns:(Vector Internal_Column) = + self.cast_op_type op_kind columns (SQL_Expression.Operation op_kind (columns.map c->c.expression)) + ## PRIVATE prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement prepare_fetch_types_query self expression context = diff --git a/test/Microsoft_Tests/src/SQLServer_Spec.enso b/test/Microsoft_Tests/src/SQLServer_Spec.enso index c0c37beb98c8..d0f38ac6e069 100644 --- a/test/Microsoft_Tests/src/SQLServer_Spec.enso +++ b/test/Microsoft_Tests/src/SQLServer_Spec.enso @@ -200,8 +200,8 @@ add_sqlserver_specs suite_builder create_connection_fn = materialize = .read common_selection = Common_Table_Operations.Main.Test_Selection.Config supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True - aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False - agg_in_memory_table = (enso_project.data / "data.csv") . read + aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config advanced_stats=False text_shortest_longest=False first_last=False first_last_row_order=False aggregation_problems=False multi_distinct=False first_last_multi_order=False first_last_ignore_nothing=False text_concat=False + agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read agg_table_fn = _-> agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 9dca091a8e6e..6bdd30f483a6 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -1262,7 +1262,7 @@ add_aggregate_specs suite_builder setup = loc = Meta.get_source_location 2 Test.fail "Expected a Nothing or NaN but got: "+value.to_text+" (at "+loc+")." - suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder-> + if setup.flagged ..Supports_Infinity then suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder-> pos_inf = 1/0 neg_inf = -1/0 @@ -1341,7 +1341,7 @@ add_aggregate_specs suite_builder setup = expect_null_or_nan <| m1.columns.first.at 0 expect_null_or_nan <| m1.columns.second.at 0 - suite_builder.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) group_builder-> + if setup.flagged ..Supports_Separate_NaN then suite_builder.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) group_builder-> nan = 0.log 0 group_builder.specify "on Average" <| t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]] @@ -1790,7 +1790,7 @@ add_aggregate_specs suite_builder setup = table = table_builder [["A", [3,2,1]], ["X", [1,2,3]]] order = [Sort_Column.Name "A"] expect_sum_and_unsupported_errors 2 <| - table.aggregate columns=[Sum "X", First ignore_nothing=False "X" (order_by=order), Last ignore_nothing=False "X" (order_by=order)] + table.aggregate columns=[Sum "X", First "X" ignore_nothing=False order_by=order, Last "X" ignore_nothing=False order_by=order] on_problems=..Report_Warning if test_selection.first_last_ignore_nothing.not then group_builder.specify "with First and Last with ignore_nothing=True" <| @@ -1817,7 +1817,7 @@ add_aggregate_specs suite_builder setup = expect_sum_and_unsupported_errors 2 <| table.aggregate columns=[Sum "X", Shortest "Y", Longest "Y"] - if test_selection.text_concat.not && (setup.prefix.contains "Snowflake" . not) then + if test_selection.text_concat.not && (setup.prefix.contains "Snowflake" . not && setup.prefix.contains "SQLServer" . not) then group_builder.specify "with Concatenate" <| table = table_builder [["X", [1,2,3]], ["Y", ["a", "bb", "ccc"]]] expect_sum_and_unsupported_errors 1 <| From dcc75957af0c5ff92f84fa861ce95cbcc0a12c53 Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Sun, 22 Dec 2024 16:43:03 +0300 Subject: [PATCH 14/15] Docs panel: add support for loading images from the Cloud (#11884) --- app/common/src/text/english.json | 5 ++ .../integration-test/dashboard/actions/api.ts | 29 ++++--- .../dashboard/assetPanel.spec.ts | 23 +++++- .../dashboard/mock/example.png | Bin 0 -> 9033 bytes .../MarkdownViewer/MarkdownViewer.tsx | 76 +++++++----------- .../MarkdownViewer/defaultRenderer.ts | 46 +++++++++++ .../src/dashboard/services/RemoteBackend.ts | 7 +- app/ide-desktop/client/src/projectManager.ts | 2 +- tools/performance/engine-benchmarks/README.md | 25 ++++-- 9 files changed, 140 insertions(+), 73 deletions(-) create mode 100644 app/gui/integration-test/dashboard/mock/example.png create mode 100644 app/gui/src/dashboard/components/MarkdownViewer/defaultRenderer.ts diff --git a/app/common/src/text/english.json b/app/common/src/text/english.json index a89c0135bc24..77d36c0eed46 100644 --- a/app/common/src/text/english.json +++ b/app/common/src/text/english.json @@ -1,6 +1,10 @@ { "submit": "Submit", "retry": "Retry", + + "arbitraryFetchError": "An error occurred while fetching data", + "arbitraryFetchImageError": "An error occurred while fetching an image", + "createFolderError": "Could not create new folder", "createProjectError": "Could not create new project", "createDatalinkError": "Could not create new Datalink", @@ -174,6 +178,7 @@ "getCustomerPortalUrlBackendError": "Could not get customer portal URL", "duplicateLabelError": "This label already exists.", "emptyStringError": "This value must not be empty.", + "resolveProjectAssetPathBackendError": "Could not get asset", "directoryAssetType": "folder", "directoryDoesNotExistError": "Unable to find directory. Does it exist?", diff --git a/app/gui/integration-test/dashboard/actions/api.ts b/app/gui/integration-test/dashboard/actions/api.ts index 9944c8bbc14f..09f7c15cbb5f 100644 --- a/app/gui/integration-test/dashboard/actions/api.ts +++ b/app/gui/integration-test/dashboard/actions/api.ts @@ -15,6 +15,7 @@ import * as actions from '.' import { readFileSync } from 'node:fs' import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' +import invariant from 'tiny-invariant' const __dirname = dirname(fileURLToPath(import.meta.url)) @@ -1129,17 +1130,23 @@ async function mockApiInternal({ page, setupAPI }: MockParams) { }) }) - await get(remoteBackendPaths.getProjectAssetPath(GLOB_PROJECT_ID, '*'), (route, request) => { - const maybeId = request.url().match(/[/]projects[/]([^?/]+)/)?.[1] - if (!maybeId) return - const projectId = backend.ProjectId(maybeId) - called('getProjectAsset', { projectId }) - return route.fulfill({ - // This is a mock SVG image. Just a square with a black background. - body: '/mock/svg.svg', - contentType: 'text/plain', - }) - }) + await get( + remoteBackendPaths.getProjectAssetPath(GLOB_PROJECT_ID, '*'), + async (route, request) => { + const maybeId = request.url().match(/[/]projects[/]([^?/]+)/)?.[1] + + invariant(maybeId, 'Unable to parse the ID provided') + + const projectId = backend.ProjectId(maybeId) + + called('getProjectAsset', { projectId }) + + return route.fulfill({ + // This is a mock SVG image. Just a square with a black background. + path: join(__dirname, '../mock/example.png'), + }) + }, + ) await page.route('mock/svg.svg', (route) => { return route.fulfill({ body: MOCK_SVG, contentType: 'image/svg+xml' }) diff --git a/app/gui/integration-test/dashboard/assetPanel.spec.ts b/app/gui/integration-test/dashboard/assetPanel.spec.ts index 9282cf573724..37e8ffd05102 100644 --- a/app/gui/integration-test/dashboard/assetPanel.spec.ts +++ b/app/gui/integration-test/dashboard/assetPanel.spec.ts @@ -5,7 +5,7 @@ import { EmailAddress, UserId } from '#/services/Backend' import { PermissionAction } from '#/utilities/permissions' -import { mockAllAndLogin } from './actions' +import { mockAllAndLogin, TEXT } from './actions' /** Find an asset panel. */ function locateAssetPanel(page: Page) { @@ -87,4 +87,25 @@ test('Asset Panel documentation view', ({ page }) => await expect(assetPanel.getByTestId('asset-panel-tab-panel-docs')).toBeVisible() await expect(assetPanel.getByTestId('asset-docs-content')).toBeVisible() await expect(assetPanel.getByTestId('asset-docs-content')).toHaveText(/Project Goal/) + await expect(assetPanel.getByText(TEXT.arbitraryFetchImageError)).not.toBeVisible() })) + +test('Assets Panel docs images', ({ page }) => { + return mockAllAndLogin({ + page, + setupAPI: (api) => { + api.addProject({}) + }, + }) + .do(() => {}) + .driveTable.clickRow(0) + .toggleDocsAssetPanel() + .withAssetPanel(async (assetPanel) => { + await expect(assetPanel.getByTestId('asset-docs-content')).toBeVisible() + + for (const image of await assetPanel.getByRole('img').all()) { + await expect(image).toBeVisible() + await expect(image).toHaveJSProperty('complete', true) + } + }) +}) diff --git a/app/gui/integration-test/dashboard/mock/example.png b/app/gui/integration-test/dashboard/mock/example.png new file mode 100644 index 0000000000000000000000000000000000000000..b4d6d8b3cb9ce9011614edf353a4ede9919f057e GIT binary patch literal 9033 zcmbt)XH-)`+iff;N>wS+1(YsTL8*cu5}I@bLJrx1OlN@d9J7pfn2%+ zzFU!A1fN@C?Y|+AI}jB`dEIwu_-QW_b>i98e}heLO2lcPyu7N6FO4GalUs(7{xam~ zNUo^(J|`6`@(A6=bjV^^3l7Os?K^^E7_HJAD%&*3jA7(3f!JE$fY5 zNE_d_aDI4h(@I?BCA+FeH50 z@cd`S5C|Ck;8l=!}7` z?oTUi!K62@&0oK=du2Bq*te)nlZThQ2npN+YaIw9)Dlltcgu{cwyy1)wV?TUz06+n z(x~25V{yPMi@oh)Kh zmMXO0!^Q5QOJhZ5rciVr#+>2;WKJv;LN>>itG$AGGj4E7cX-^{XNO{8rYy(~RvO9I z$5~qoh#TLbZt+XqUv>Zdw9;FECk|92h4n2R$WbqmJ>Ry zCP)N_@Kc746k4>84QR}?HhZp2RoTXI`O0Sc>v6@=($jOxskD0I5BsrIkDXjpRaI%* zUcy?XpPp{(M9_J>LsLVZ(BAV=IY_{ck3NA1Im|xTcS6KXkx8*c6D@QO&_qXk(ox8Qo*LP z-dBaZp@Kj%89;aiQM<^A*p8(UD@cm zSNyR7yS?z++mw)ha7&?=)r!FFL)F8>)%W`R{f-Z=r4JAN@Es@(AW>%=Tqt6@hDc6! zd+Tr}$}hz)m8ya69jF$TaKf*5yt2sVg9HlEozhb^YjDNkKmYcT+7nEFWo^eO4UawJ zkrPL1j09=ZIJ>zeOYC=h;3tQ|XlZ}6D@WVN`spW$T>=qAxy|;WTvbIKTjx`u$Pu$y zJ+Xdac5jyG+ zBtF+ji)54;t^g@ioPo#nr|J}F=*ga~VoFeR5Pm*ByWV(S0=_GX$p#MRJrnp*ko&J_ zclp=Ct;>cC3=AKyP}~et4EnM*UXhxdOwJ^}Zb^R~^1$uJ2aP`8LxIy7KW}Quq`q_! zzi{`Od`6){n|;R&){ULJh83b?g&Qb)Aue`fMVB>ZHd5^=%7UMMGTCI*Hc zPq^sBJZCrJzD-tfWMrgjlAwrA^41u=kd%~<&{Kc^Hiat^5)y(77J8nZ`mNqPmiezz-025DTnUf=M?ut|ymTpWp8{t2{J3DQaUhM2ERiq8^s9MS`PiXeSYmT^B_kt4zQ6?Kj{JN<|J}QHn=`Fz zCUu9(JTx0Ktw_7WdIzk+)2DKNe!7I7L{&!VcmIHyra*N^zfi0B|SJ?t9T=s3vg|HqK1l9`nj4K_)|?ogQxrX#q#K_IgAxk zYTKwMFaJG;Q@QIii!r{0(d!l|yO)>OuV26RH>OpeKNl7k_i%Mx8!Iy&%#g9Pw4`E@ zRJ1X!wo<`sl5PC1;(Sc35ZAqQ>7$uHG0!fey1E)<&oENPd*?L(*p-ojOj*BPG#w-3 zWTUgC!l$@6owsj=+3!k7crNxPxP^#VcDZessJj-u;Awfc_3G8DKOe7BiHeFcirX*G z&CM+>E#dL75tBx{sVk7?=U1{ye(Y~JO-$;YE=~5V9rW-#)ZpL|Lb)Gp&zV_tp3qt^VMS&d$0wf2jcQ-tU|}g6t{Y*M@$Qhu z9BeJ9tE;a}m&1_$xf1fg5|(sybb8S#4{V1Ich?ZS6B84d#MF!oLCx8E2PQEYF0On# zXDN?$b6nWx&!(;3?sZN@CbMmRi&tg+kCPE?HiH?(+$f)YM{K=AiHs%)Dpwa58bXOD z6J(hS{1P=f*2g zr>iR~Mbx3cHl~qF8yf?@daYQ<6C1!xraPHS+bwFE_ynbR=py#lWh&3+=IuV_CcS)F zGFFa!%WE|6nL_-?mjG4ds0yz;4eg2JZm5`uWDt{m7gCHuCJC9DHaN0~ErNJ8C^yOW zWMN?`&`4)wW?ujEM}eD;p1$H`O^Dae4)?O~Av`xXR9mE7)7aV3Az;#2xkt6U*p~!Mo{i%APf!C2 zxGadHzJ&lj?6vDL!2mAc5v)vVm(jO6jXeI@u57l{V!;LN2sq2tqUkKgUV?)}^f9a{ zj?)xX&y+psjG*U;Sq4aPa$@ zENoa~O~Eoo8vH{gj!T3S@RvSdLtqu_!=07-`ugh_9zj9<(^yVUPK4GcY1~_PcQ~c+ zB+}FAerANi3bgGS91gFs?thhUXl#7->eUJuQvA`Q-;>-pWU+#R4kiXuuAuT?>z(9?tMVFGOl zLJV%O=1hp=*3?ecX{V>58^+p;D{Go*_D_ySWoFLitEc|OmY0`vYosktHHl5vB*w?z zjq6#?{M&G5tQ14dL3170V#T9T*6*;RqvMkzhmVhseTl5?HuIfg38_4i5eTS;2)Kh!m^o~Hah>Y(d+LQr{Qp0 z6(kHOVepZ#SAK%c-UsvHbh1AekDVj?c#%a<&xx?4AIF7t#_ivorv3Dv(n zn5!lg4s}v^^@^j_0S@nsEGK2>;^G290lo6%x=gIRygXWanGBTTJ2Ln0vj>^FYssJ$ z5KfIvO~iA5uu-S>Q<9NktKno~%^bzES0Iv<_YJdvz^i5bfeD-1A<5-{xP~*F=Qo55JUx1GqBUJ z|HZ7Od2XJvvN_n>qkWInDa5SNh44QvQc}niQ@fG;D24DJ3;y87oz2CcF`S+oQ(i;@ zoakt3I#Ue?`DTn;UzhgYSq8``VgLJ!mw*@@!=txZ2?+@bF9CFLK8wuCY_h~vq$~XH zAXkr!iaI$t0h92~Mnyz`n)}w(wPewruk{-G{kFfqzuuCWu$b8P?ylZ)lTH_?G)w)d z%F(lvRhFSG$T+VmN=iz`G%ayn-mn=~Nw>mS=j`H!3xS`3Tx>20OUMfjCZ1yKXd zmy)4nWYo2^eg8gFLv2HJgyy^5)!k0jhq7XMcX3ljo(q3EBlSh3u()T>RtHIxNEyI% z-YDeGw-h7o*Y@H^Q%$b#-@iw*@+&-tLVN#oMi`VC?Jh*i3BsD=Sx35jcS_yY#xNNp z6BCG;0W_NCdhm7pJ-vkZcwv5XC#Tu3N)c&k%#!e-EP1y;CDMs{2V*?m@ccP7G~t3! zFB}fn9zv1By&N7HNlrnbuBvJ&e)$Rs3C8)<6F0x}*EqMCr-rL|=G@`4mxo{>&qB(K z>T^BgGILw}w?n9Z%)dR{U(4Cc%~6wPtxzJ9B03o>1q&bAk#A!@JF>`56!W@wENty< z&IWAb`m>*g@CNS*1tcUsAjn-NncEZX4EY{zQM72FRb_gfo=3J zH!m-5z+iK3Ma9zMis?Rw>^&i&id)m;?==)sc*v`4fEq==A7QN3zaZ*0^whi2c-d4h+}!Z{p0J`T_$m|;DjJ*SFsT#xxt!R`%+UO)`(65!HsQy`n6z^DyfQ!I`Xz( zFl%65K{uP6VAjUT$FQ)laXXN-dfgnP>_TR3$GB9d5DF%7_f=gSRa0xLUo@-CGR%9m zXudsE>?k^4BOR2jDQ?Y-Pn~}Nbb>+;cu|^v!dD)gRq8`6Azi4N8c24%g>S`Rnl5J8 z-_1?7n>S-qQuyd1MuvwK6clJORCF<*YCcM{yV5y1F|musHrO@)fPO|3pOitOU~Eh^_4M?B$`7hH@b~6$o+{vsh&8^OnEyP#{6^LYy1)O5?@>Sgw^Zy|f4JAi zU<4~GtPMY&)Nk$6?3wY{a`G_B*u_=vc7X!v{M$#Ztv-SWE8Wj1m~JZY@$q>mult-H zlcay|8=1!B9V#8_iu&xWPl}ZS_6k;a=j9L+8|m*a%gqg<;T3R~3dDO^#d5Hbdqjw;439u?dfq;4^*!bYv6g+4V2 z2si`G9@NRLchV0X&m*1=NPHTl-Ocgw;2Uf4iHRFiEHNpG8l>~vMA?s5j;kMZ)HhxC z3qCwNyl~1%FPMxiwbfAoL}8-Vy*oJK*N$cq0x?mU{nXCO9AEQiZtfd-AdoWt2lJsI zFq#@0FO!gna9?5)Hixvz1f2TfuJgI*UAn=C=zT=bOtR#sMeQq3_)3Wys6=wP^aJ$r@{B+HgEu@arg>Fy~R59tX=Ao}`8 zZEg_`6s!=Gp*1m;vB?sCY27TWCnF=HpvZI5J@+6C^i2aF)!RRICn)-LwUA3V zbq=J-2m$ir`_M#8XXH%Kv=RRF_i~IsOPh& z)64a%1~Xxc;pd_YDLWv$N0iKyE$&xqV%i7lz2*)!K?cj7z>E7wM|Xg-HmtQ&ZDt&jMz@2+;Lmu}46ziP;QT;S4qb ziGHbDr}^U2a!N~!Cw~B$jOVGQh@}Y5!vT9)oq4t!NuPOUAobo*V4&acNRpcyl~oCp zC11?H-2*NPMnfZU$FbV5sw$BavGd;X=k)od0WNQ8dRQH~ZB z6U&qf5TuI$8qx91n>8FxAHQtR6{Z-Qcz#&}CeZA@Mun@O4^ptu){c#diHVDg%eNjk zskZ6?IJSrt7Z(S8sMAT zk==}A9R@vCpe;cD+Kf@nO4#K#l9QfHYkf)tL{P}Ix!Pj8eo4sm`ECBy+l-6?6hyCo z-<(7+4|P>JVt!9d{8EL2CTEcU3kYPD1vE+n`Ud=WHy>}aau<9$YeLr)6&0O*nxEal zPuGSe#Kf>ick7TA-@O@@)zZ?~c=z5t_kh3iQl1Nos?5V^v|52C1>=I8HeO!rU#ApE z>@4|@e}Nu3B5p5@jg3u~-GmfUT%h|VX9c$jJ(%KGgcYZ2lKG&5C-OD{Q0m#&QR#)XhGW>thOdd{gnwEFp_ zu#kti2bQx#gkOZn27t4IQ+ckq*1kbGkK@rX7G!U4Z?Sb2Pk8sj$`;W?FzH3SB1d^C zjh?QqZd3^=yS+HJul>YKEWhr}Q82mmhv>gRO@f|M!RA-c#KdeN6d^-`z&x0~pXlSq ze^H>rch63pQ$;I2`~CNhZ&mLXx)>Q4{0|^Q=dfi~`#Q!K3ACkhMfn`bWPADh>6^5QrHc*gEt^m?He5{7c&jK#PT{t6f~Tq0sPq+C^;{ZJ_B{ zldo=HTGo$NqoAMwaVLBO9DAH|>Hi$P9C(U@@&EI{#-hpwLczl))SN_ncF@mRrC5q@ z_O0UUpnX-Oz2@+d(U1n-A>m>>+?J!xt>dTTU~n~S25jI3x2I`GIg-(PbzY^4CY#px z+9QDh>ZI%%@6%<+wQPJdWYp3yg8GU2;#UHR4bG{p>V_G|0pSJB!;>`z{wrmpORH~? zf{@*t;5f-WUN4cjew!xFOC3vl;owuV)Xt|A->xqT{YoX_|2SN?-P zomQsXvtl>TD6hV_1a3}nK(+1;#&1uSlYA?9d)mW>b#lUZ+=r~Px!t%}n6F@B(J;z| z5->ZTSy6!eeumJ%@WQe!k^{1P&8QNh>&i2K=WB^~RR1f9p*LB0qfu@moEW;PY=@+S z?2>~8Flhl_N76r5)og?-s62?ave0~JWZz+spb7`96^_xHOaSOLUJC5<6@%p zG6B1@5~`>ZAtkM#FCOY9<>xk(NeDmcP=lphj#kFlAAHhI#12b*>vsTG~$X{Q~p z8n?HUk>*}+@5i@XdtvY!`dcX?>>+z@QqKON#hE8zeyg%%4YyB>MwW$2Q`^su?yfJ0 zX+))i_$Nixq*A+xt~3&&Od*iVupmfrwRoME=|44W-BA8l!q?^6J_7y^;gx)zxr^0M z-pVf;C5n^jg2u1!-Xg!l6W|+bl4N`&SG(B=t}N_>%=?55O)b}3X7@s_Dhix({zWg0 zv@*9EKlF%29pX-yqTOM+qAQD=739tg)e^EFs5#<#rb?xZQjN@@t>(5?h>RrJ(N52S zq6ze&08?cewKi|j6a#YM-Eh@Q8Jk-QU7Ku2Hoab&QbNYA$f~1@9JeSB>)AZs{rDx= zKfQ9vZ<0vLM?!>?Fw-r$Z@n?2H>2yUepo~C>9*g&cXrAM&cyK42d@=Kn~p>U*^MVG zjLq)8(P45hxR8}`egK=IA)f9g{SDf$eob090}-ne%NbAFB;e@bEY7YNox@#7L)xjJ z@H{5#%Rr(*iZW>re;;R13%`#tsn^lz{m|X^ih$X6gn{(B7dDdGA+8nma>PxcVWBFrZKIqsp_bPXr zn&0!5dw-J_Q3AdZKeaAKNIB4CcleRPeFyAI_u=mrtZ?BiNn-Zyt@9vzbzBK4O1wFUAsqkE3 zto5S&_Z0RwHoIx90}`dVU+-=#!XyV1cKDz9=J?8g4<04!^6n~NONah!Gtf1qk#w8h zpZ0$wEI3@EG_Gw?WXQKyyKU9wD@ocQ6IeX-0a!nV>E^vR}1EuYPS`GN4#(&$May*1ix*pp)%m< z(=9C5$Vo-%={V!%e0C26kWrVO!gDPzt#32Gu*xT_IOM6Vf4(B0myj=t-TEAJ3_2$O zj`EuABz624dS!Jrz+wDiR>pU=4G16B<4s>=&g>f`U8WRkDtByn{IQ(cdMUDL(>3&1 z)-XE9fW5p>JQ~r4XxY}Fz+J(Se7H#5%f6;8jc-X2vgMjxtZwq9MCls#qGz8}$g3!Y zdBgp@#EA|We`rhbMvzglWsb`)CTIrd{0C0zU=f3`yj<<502GD>7IQ`srA@^Gb0A>X z*F^0Dj!h5m#VF=dgQgBO?AKw#DcvvHK*n=}f3EHo&1CtQ{4_XA(8##{hKdq{*9sf* z4oEF%m2kn`B}@7p`0~aOryYUeX-0NE|HT();K=cXPE(=pcLXuj$-X0rv`v~uN&U>b zD_ov`x9M=Qoa{*}I9$!r*l76#^L)57+8+0=n5~VRna<`@-_T2$0JnzSArBa2RRnMn z#uvJ#ZbJO078=?`mAc`Zqsr|TRf{@$-@CtAxK0|?t-T)U(Vf-Bx?d>V|L`(oif{F#qsBUlPH5 literal 0 HcmV?d00001 diff --git a/app/gui/src/dashboard/components/MarkdownViewer/MarkdownViewer.tsx b/app/gui/src/dashboard/components/MarkdownViewer/MarkdownViewer.tsx index 96970d66f50e..1531ff5e6974 100644 --- a/app/gui/src/dashboard/components/MarkdownViewer/MarkdownViewer.tsx +++ b/app/gui/src/dashboard/components/MarkdownViewer/MarkdownViewer.tsx @@ -1,10 +1,12 @@ -/** @file A WYSIWYG editor using Lexical.js. */ +/** @file A Markdown viewer component. */ +import { useLogger } from '#/providers/LoggerProvider' +import { useText } from '#/providers/TextProvider' import { useSuspenseQuery } from '@tanstack/react-query' import type { RendererObject } from 'marked' import { marked } from 'marked' -import { useMemo } from 'react' -import { BUTTON_STYLES, TEXT_STYLE, type TestIdProps } from '../AriaComponents' +import { type TestIdProps } from '../AriaComponents' +import { DEFAULT_RENDERER } from './defaultRenderer' /** Props for a {@link MarkdownViewer}. */ export interface MarkdownViewerProps extends TestIdProps { @@ -14,67 +16,43 @@ export interface MarkdownViewerProps extends TestIdProps { readonly renderer?: RendererObject } -const defaultRenderer: RendererObject = { - /** The renderer for headings. */ - heading({ depth, tokens }) { - return `${this.parser.parseInline(tokens)}` - }, - /** The renderer for paragraphs. */ - paragraph({ tokens }) { - return `

${this.parser.parseInline(tokens)}

` - }, - /** The renderer for list items. */ - listitem({ tokens }) { - return `
  • ${this.parser.parseInline(tokens)}
  • ` - }, - /** The renderer for lists. */ - list({ items }) { - return `
      ${items.map((item) => this.listitem(item)).join('\n')}
    ` - }, - /** The renderer for links. */ - link({ href, tokens }) { - return `${this.parser.parseInline(tokens)}` - }, - /** The renderer for images. */ - image({ href, title }) { - return `${title}` - }, - /** The renderer for code. */ - code({ text }) { - return ` -
    ${text}
    -
    ` - }, - /** The renderer for blockquotes. */ - blockquote({ tokens }) { - return `
    ${this.parser.parse(tokens)}
    ` - }, -} - /** * Markdown viewer component. * Parses markdown passed in as a `text` prop into HTML and displays it. */ export function MarkdownViewer(props: MarkdownViewerProps) { - const { text, imgUrlResolver, renderer = defaultRenderer, testId } = props + const { text, imgUrlResolver, renderer = {}, testId } = props - const markedInstance = useMemo( - () => marked.use({ renderer: Object.assign({}, defaultRenderer, renderer), async: true }), - [renderer], - ) + const { getText } = useText() + const logger = useLogger() + + const markedInstance = marked.use({ renderer: Object.assign({}, DEFAULT_RENDERER, renderer) }) const { data: markdownToHtml } = useSuspenseQuery({ - queryKey: ['markdownToHtml', { text }], - queryFn: () => - markedInstance.parse(text, { + queryKey: ['markdownToHtml', { text, imgUrlResolver, markedInstance }] as const, + queryFn: ({ queryKey: [, args] }) => + args.markedInstance.parse(args.text, { async: true, walkTokens: async (token) => { if (token.type === 'image' && 'href' in token && typeof token.href === 'string') { - token.href = await imgUrlResolver(token.href) + const href = token.href + + token.raw = href + token.href = await args + .imgUrlResolver(href) + .then((url) => { + return url + }) + .catch((error) => { + logger.error(error) + return null + }) + token.text = getText('arbitraryFetchImageError') } }, }), }) + return (
    = { + /** The renderer for headings. */ + heading({ depth, tokens }) { + const variant = depth === 1 ? 'h1' : 'subtitle' + return `${this.parser.parseInline(tokens)}` + }, + /** The renderer for paragraphs. */ + paragraph({ tokens }) { + return `

    ${this.parser.parseInline(tokens)}

    ` + }, + /** The renderer for list items. */ + listitem({ tokens }) { + return `
  • ${this.parser.parseInline(tokens)}
  • ` + }, + /** The renderer for lists. */ + list({ items }) { + return `
      ${items.map((item) => this.listitem(item)).join('\n')}
    ` + }, + /** The renderer for links. */ + link({ href, tokens }) { + return `${this.parser.parseInline(tokens)}` + }, + /** The renderer for images. */ + image({ href, title, raw }) { + const alt = title ?? '' + + return ` + ${alt} + ` + }, + /** The renderer for code. */ + code({ text }) { + return ` +
    ${text}
    +
    ` + }, + /** The renderer for blockquotes. */ + blockquote({ tokens }) { + return `
    ${this.parser.parse(tokens)}
    ` + }, +} diff --git a/app/gui/src/dashboard/services/RemoteBackend.ts b/app/gui/src/dashboard/services/RemoteBackend.ts index f14f56ff43e5..817732cb1785 100644 --- a/app/gui/src/dashboard/services/RemoteBackend.ts +++ b/app/gui/src/dashboard/services/RemoteBackend.ts @@ -1268,14 +1268,15 @@ export default class RemoteBackend extends Backend { projectId: backend.ProjectId, relativePath: string, ): Promise { - const response = await this.get( + const response = await this.get( remoteBackendPaths.getProjectAssetPath(projectId, relativePath), ) if (!responseIsSuccessful(response)) { - return Promise.reject(new Error('Not implemented.')) + return await this.throw(response, 'resolveProjectAssetPathBackendError') } else { - return await response.text() + const blob = await response.blob() + return URL.createObjectURL(blob) } } diff --git a/app/ide-desktop/client/src/projectManager.ts b/app/ide-desktop/client/src/projectManager.ts index 6b83e09e8b8b..00801d1cdb02 100644 --- a/app/ide-desktop/client/src/projectManager.ts +++ b/app/ide-desktop/client/src/projectManager.ts @@ -103,7 +103,7 @@ export async function version(args: config.Args) { } /** - * Handle requests to the `enso-project` protocol. + * Handle requests to the `enso://` protocol. * * The protocol is used to fetch project assets from the backend. * If a given path is not inside a project, the request is rejected with a 403 error. diff --git a/tools/performance/engine-benchmarks/README.md b/tools/performance/engine-benchmarks/README.md index a839647dbb45..0363635f7da8 100644 --- a/tools/performance/engine-benchmarks/README.md +++ b/tools/performance/engine-benchmarks/README.md @@ -28,17 +28,26 @@ One can also download only a CSV file representing all the selected benchmark results with `bench_download.py --create-csv`. ## Contribute + Run local tests with: + ```bash python -m unittest --verbose bench_tool/test*.py ``` ## Relation to GH Actions -The `bench_download.py` script is used in [Benchmarks Upload](https://github.com/enso-org/enso/actions/workflows/bench-upload.yml) -GH Action to download the benchmarks generated by the [Benchmark Engine](https://github.com/enso-org/enso/actions/workflows/engine-benchmark.yml) -and [Benchmark Standard Libraries](https://github.com/enso-org/enso/actions/workflows/std-libs-benchmark.yml) GH Actions. -The `Benchmarks Upload` action is triggered by the `engine-benchmark.yml` and `std-libs-benchmark.yml` actions. - -The results from the benchmarks are gathered from the GH artifacts associated with corresponding workflow runs, and -save as JSON files inside https://github.com/enso-org/engine-benchmark-results repo inside its -[cache](https://github.com/enso-org/engine-benchmark-results/tree/main/cache) directory. + +The `bench_download.py` script is used in +[Benchmarks Upload](https://github.com/enso-org/enso/actions/workflows/bench-upload.yml) +GH Action to download the benchmarks generated by the +[Benchmark Engine](https://github.com/enso-org/enso/actions/workflows/engine-benchmark.yml) +and +[Benchmark Standard Libraries](https://github.com/enso-org/enso/actions/workflows/std-libs-benchmark.yml) +GH Actions. The `Benchmarks Upload` action is triggered by the +`engine-benchmark.yml` and `std-libs-benchmark.yml` actions. + +The results from the benchmarks are gathered from the GH artifacts associated +with corresponding workflow runs, and save as JSON files inside +https://github.com/enso-org/engine-benchmark-results repo inside its +[cache](https://github.com/enso-org/engine-benchmark-results/tree/main/cache) +directory. From 393267977142fafb27d68426cd87c38d5a5840ed Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Sun, 22 Dec 2024 16:54:52 +0300 Subject: [PATCH 15/15] Fix Offline Mode (#11887) Closes: https://github.com/enso-org/cloud-v2/issues/1630 This PR disables the refreshing the session if user is offline --- app/common/package.json | 1 + app/common/src/queryClient.ts | 5 +- app/common/src/text/english.json | 9 +- app/common/src/utilities/errors.ts | 52 ++++++ app/gui/src/dashboard/App.tsx | 23 +++ .../src/dashboard/assets/offline_filled.svg | 5 + .../src/dashboard/assets/offline_outline.svg | 3 + .../Form/components/FormError.tsx | 14 +- .../dashboard/components/ErrorBoundary.tsx | 68 +++++--- .../components/OfflineNotificationManager.tsx | 33 ++-- app/gui/src/dashboard/components/Suspense.tsx | 60 +------ app/gui/src/dashboard/hooks/backendHooks.tsx | 2 - app/gui/src/dashboard/hooks/offlineHooks.ts | 2 +- app/gui/src/dashboard/layouts/Drive.tsx | 79 ++++++--- .../layouts/Settings/SetupTwoFaForm.tsx | 77 +++++---- app/gui/src/dashboard/layouts/UserBar.tsx | 26 ++- .../components/PlanSelectorDialog.tsx | 158 +++++++++--------- .../payments/components/StripeProvider.tsx | 45 +++-- .../dashboard/providers/SessionProvider.tsx | 12 +- .../__test__/SessionProvider.test.tsx | 6 +- app/gui/src/dashboard/utilities/HttpClient.ts | 20 ++- app/gui/src/dashboard/utilities/error.ts | 17 +- 22 files changed, 443 insertions(+), 274 deletions(-) create mode 100644 app/common/src/utilities/errors.ts create mode 100644 app/gui/src/dashboard/assets/offline_filled.svg create mode 100644 app/gui/src/dashboard/assets/offline_outline.svg diff --git a/app/common/package.json b/app/common/package.json index a3bda304841a..b193595dc5b9 100644 --- a/app/common/package.json +++ b/app/common/package.json @@ -15,6 +15,7 @@ "./src/backendQuery": "./src/backendQuery.ts", "./src/queryClient": "./src/queryClient.ts", "./src/utilities/data/array": "./src/utilities/data/array.ts", + "./src/utilities/errors": "./src/utilities/errors.ts", "./src/utilities/data/dateTime": "./src/utilities/data/dateTime.ts", "./src/utilities/data/newtype": "./src/utilities/data/newtype.ts", "./src/utilities/data/object": "./src/utilities/data/object.ts", diff --git a/app/common/src/queryClient.ts b/app/common/src/queryClient.ts index ab69795436d0..9c8829c8f736 100644 --- a/app/common/src/queryClient.ts +++ b/app/common/src/queryClient.ts @@ -83,7 +83,7 @@ export function createQueryClient( storage: persisterStorage, // Prefer online first and don't rely on the local cache if user is online // fallback to the local cache only if the user is offline - maxAge: queryCore.onlineManager.isOnline() ? -1 : DEFAULT_QUERY_PERSIST_TIME_MS, + maxAge: DEFAULT_QUERY_PERSIST_TIME_MS, buster: DEFAULT_BUSTER, filters: { predicate: query => query.meta?.persist !== false }, prefix: 'enso:query-persist:', @@ -130,6 +130,9 @@ export function createQueryClient( defaultOptions: { queries: { ...(persister != null ? { persister } : {}), + // Default set to 'always' to don't pause ongoing queries + // and make them fail. + networkMode: 'always', refetchOnReconnect: 'always', staleTime: DEFAULT_QUERY_STALE_TIME_MS, retry: (failureCount, error: unknown) => { diff --git a/app/common/src/text/english.json b/app/common/src/text/english.json index 77d36c0eed46..b2e332c1836b 100644 --- a/app/common/src/text/english.json +++ b/app/common/src/text/english.json @@ -21,6 +21,9 @@ "deleteAssetError": "Could not delete '$0'", "restoreAssetError": "Could not restore '$0'", + "refetchQueriesPending": "Getting latest updates...", + "refetchQueriesError": "Could not get latest updates. Some information may be outdated", + "localBackendDatalinkError": "Cannot create Datalinks on the local drive", "localBackendSecretError": "Cannot create secrets on the local drive", "offlineUploadFilesError": "Cannot upload files when offline", @@ -482,9 +485,9 @@ "hidePassword": "Hide password", "showPassword": "Show password", "copiedToClipboard": "Copied to clipboard", - "noResultsFound": "No results found.", - "youAreOffline": "You are offline.", - "cannotCreateAssetsHere": "You do not have the permissions to create assets here.", + "noResultsFound": "No results found", + "youAreOffline": "You are offline", + "cannotCreateAssetsHere": "You do not have the permissions to create assets here", "enableVersionChecker": "Enable Version Checker", "enableVersionCheckerDescription": "Show a dialog if the current version of the desktop app does not match the latest version.", "disableAnimations": "Disable animations", diff --git a/app/common/src/utilities/errors.ts b/app/common/src/utilities/errors.ts new file mode 100644 index 000000000000..a359fd3e1a7a --- /dev/null +++ b/app/common/src/utilities/errors.ts @@ -0,0 +1,52 @@ +/** + * An error that occurs when a network request fails. + * + * This error is used to indicate that a network request failed due to a network error, + * such as a timeout or a connection error. + */ +export class NetworkError extends Error { + /** + * Create a new {@link NetworkError} with the specified message. + * @param message - The message to display when the error is thrown. + */ + constructor(message: string, options?: ErrorOptions) { + super(message, options) + this.name = 'NetworkError' + } +} + +/** + * An error that occurs when the user is offline. + * + * This error is used to indicate that the user is offline, such as when they are + * not connected to the internet or when they are on an airplane. + */ +export class OfflineError extends Error { + /** + * Create a new {@link OfflineError} with the specified message. + * @param message - The message to display when the error is thrown. + */ + constructor(message: string = 'User is offline', options?: ErrorOptions) { + super(message, options) + this.name = 'OfflineError' + } +} + +/** + * An error with a display message. + * + * This message can be shown to a user. + */ +export class ErrorWithDisplayMessage extends Error { + readonly displayMessage: string + /** + * Create a new {@link ErrorWithDisplayMessage} with the specified message and display message. + * @param message - The message to display when the error is thrown. + * @param options - The options to pass to the error. + */ + constructor(message: string, options: ErrorOptions & { displayMessage: string }) { + super(message, options) + this.name = 'ErrorWithDisplayMessage' + this.displayMessage = options.displayMessage + } +} diff --git a/app/gui/src/dashboard/App.tsx b/app/gui/src/dashboard/App.tsx index fe8d68ee04e0..1b7123965687 100644 --- a/app/gui/src/dashboard/App.tsx +++ b/app/gui/src/dashboard/App.tsx @@ -98,6 +98,8 @@ import { STATIC_QUERY_OPTIONS } from '#/utilities/reactQuery' import { useInitAuthService } from '#/authentication/service' import { InvitedToOrganizationModal } from '#/modals/InvitedToOrganizationModal' +import { useMutation } from '@tanstack/react-query' +import { useOffline } from './hooks/offlineHooks' // ============================ // === Global configuration === @@ -215,6 +217,9 @@ export default function App(props: AppProps) { }, }) + const { isOffline } = useOffline() + const { getText } = textProvider.useText() + const queryClient = props.queryClient // Force all queries to be stale @@ -236,6 +241,24 @@ export default function App(props: AppProps) { refetchInterval: 2 * 60 * 1000, }) + const { mutate: executeBackgroundUpdate } = useMutation({ + mutationKey: ['refetch-queries', { isOffline }], + scope: { id: 'refetch-queries' }, + mutationFn: () => queryClient.refetchQueries({ type: 'all' }), + networkMode: 'online', + onError: () => { + toastify.toast.error(getText('refetchQueriesError'), { + position: 'bottom-right', + }) + }, + }) + + React.useEffect(() => { + if (!isOffline) { + executeBackgroundUpdate() + } + }, [executeBackgroundUpdate, isOffline]) + // Both `BackendProvider` and `InputBindingsProvider` depend on `LocalStorageProvider`. // Note that the `Router` must be the parent of the `AuthProvider`, because the `AuthProvider` // will redirect the user between the login/register pages and the dashboard. diff --git a/app/gui/src/dashboard/assets/offline_filled.svg b/app/gui/src/dashboard/assets/offline_filled.svg new file mode 100644 index 000000000000..a85d8b336489 --- /dev/null +++ b/app/gui/src/dashboard/assets/offline_filled.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/app/gui/src/dashboard/assets/offline_outline.svg b/app/gui/src/dashboard/assets/offline_outline.svg new file mode 100644 index 000000000000..da5fe69221db --- /dev/null +++ b/app/gui/src/dashboard/assets/offline_outline.svg @@ -0,0 +1,3 @@ + + + diff --git a/app/gui/src/dashboard/components/AriaComponents/Form/components/FormError.tsx b/app/gui/src/dashboard/components/AriaComponents/Form/components/FormError.tsx index c1f1f32c1910..99e822d36afb 100644 --- a/app/gui/src/dashboard/components/AriaComponents/Form/components/FormError.tsx +++ b/app/gui/src/dashboard/components/AriaComponents/Form/components/FormError.tsx @@ -3,9 +3,7 @@ * * Form error component. */ - -import * as React from 'react' - +import Offline from '#/assets/offline_filled.svg' import * as textProvider from '#/providers/TextProvider' import * as reactAriaComponents from '#/components/AriaComponents' @@ -22,7 +20,7 @@ export interface FormErrorProps extends Omit + {offlineMessage} diff --git a/app/gui/src/dashboard/components/ErrorBoundary.tsx b/app/gui/src/dashboard/components/ErrorBoundary.tsx index ee5dc324d4dc..71b71194f997 100644 --- a/app/gui/src/dashboard/components/ErrorBoundary.tsx +++ b/app/gui/src/dashboard/components/ErrorBoundary.tsx @@ -1,4 +1,5 @@ /** @file Catches errors in child components. */ +import Offline from '#/assets/offline_filled.svg' import * as React from 'react' import * as sentry from '@sentry/react' @@ -7,8 +8,6 @@ import * as errorBoundary from 'react-error-boundary' import * as detect from 'enso-common/src/detect' -import * as offlineHooks from '#/hooks/offlineHooks' - import * as textProvider from '#/providers/TextProvider' import * as ariaComponents from '#/components/AriaComponents' @@ -16,6 +15,8 @@ import * as result from '#/components/Result' import { useEventCallback } from '#/hooks/eventCallbackHooks' import * as errorUtils from '#/utilities/error' +import { OfflineError } from '#/utilities/HttpClient' +import SvgMask from './SvgMask' // ===================== // === ErrorBoundary === @@ -38,7 +39,9 @@ export interface ErrorBoundaryProps > > { /** Called before the fallback is shown. */ - readonly onBeforeFallbackShown?: (args: OnBeforeFallbackShownArgs) => void + readonly onBeforeFallbackShown?: ( + args: OnBeforeFallbackShownArgs, + ) => React.ReactNode | null | undefined readonly title?: string readonly subtitle?: string } @@ -53,7 +56,7 @@ export function ErrorBoundary(props: ErrorBoundaryProps) { FallbackComponent = ErrorDisplay, onError = () => {}, onReset = () => {}, - onBeforeFallbackShown = () => {}, + onBeforeFallbackShown = () => null, title, subtitle, ...rest @@ -63,15 +66,19 @@ export function ErrorBoundary(props: ErrorBoundaryProps) { {({ reset }) => ( ( - - )} + FallbackComponent={(fallbackProps) => { + const displayMessage = errorUtils.extractDisplayMessage(fallbackProps.error) + + return ( + + ) + }} onError={(error, info) => { sentry.captureException(error, { extra: { info } }) onError(error, info) @@ -90,39 +97,52 @@ export function ErrorBoundary(props: ErrorBoundaryProps) { /** Props for a {@link ErrorDisplay}. */ export interface ErrorDisplayProps extends errorBoundary.FallbackProps { readonly status?: result.ResultProps['status'] - readonly onBeforeFallbackShown?: (args: OnBeforeFallbackShownArgs) => void + readonly onBeforeFallbackShown?: (args: OnBeforeFallbackShownArgs) => React.ReactNode | undefined readonly resetQueries?: () => void - readonly title?: string | undefined - readonly subtitle?: string | undefined + readonly title?: string | null | undefined + readonly subtitle?: string | null | undefined readonly error: unknown } /** Default fallback component to show when there is an error. */ export function ErrorDisplay(props: ErrorDisplayProps): React.JSX.Element { const { getText } = textProvider.useText() - const { isOffline } = offlineHooks.useOffline() const { error, resetErrorBoundary, - title = getText('somethingWentWrong'), - subtitle = isOffline ? getText('offlineErrorMessage') : getText('arbitraryErrorSubtitle'), - status = isOffline ? 'info' : 'error', + title, + subtitle, + status, onBeforeFallbackShown, resetQueries = () => {}, } = props + const isOfflineError = error instanceof OfflineError + const message = errorUtils.getMessageOrToString(error) const stack = errorUtils.tryGetStack(error) - onBeforeFallbackShown?.({ error, resetErrorBoundary, resetQueries }) + const render = onBeforeFallbackShown?.({ error, resetErrorBoundary, resetQueries }) const onReset = useEventCallback(() => { resetErrorBoundary() }) - return ( - + const finalTitle = title ?? getText('somethingWentWrong') + const finalSubtitle = + subtitle ?? + (isOfflineError ? getText('offlineErrorMessage') : getText('arbitraryErrorSubtitle')) + const finalStatus = + status ?? (isOfflineError ? : 'error') + + const defaultRender = ( + ) + + return <>{render ?? defaultRender} } export { useErrorBoundary, withErrorBoundary } from 'react-error-boundary' diff --git a/app/gui/src/dashboard/components/OfflineNotificationManager.tsx b/app/gui/src/dashboard/components/OfflineNotificationManager.tsx index 5776459e34cc..3f2b4377aca6 100644 --- a/app/gui/src/dashboard/components/OfflineNotificationManager.tsx +++ b/app/gui/src/dashboard/components/OfflineNotificationManager.tsx @@ -32,21 +32,24 @@ export function OfflineNotificationManager(props: OfflineNotificationManagerProp const toastId = 'offline' const { getText } = textProvider.useText() - offlineHooks.useOfflineChange((isOffline) => { - toast.toast.dismiss(toastId) - - if (isOffline) { - toast.toast.info(getText('offlineToastMessage'), { - toastId, - hideProgressBar: true, - }) - } else { - toast.toast.info(getText('onlineToastMessage'), { - toastId, - hideProgressBar: true, - }) - } - }) + offlineHooks.useOfflineChange( + (isOffline) => { + toast.toast.dismiss(toastId) + + if (isOffline) { + toast.toast.info(getText('offlineToastMessage'), { + toastId, + hideProgressBar: true, + }) + } else { + toast.toast.info(getText('onlineToastMessage'), { + toastId, + hideProgressBar: true, + }) + } + }, + { triggerImmediate: false }, + ) return ( diff --git a/app/gui/src/dashboard/components/Suspense.tsx b/app/gui/src/dashboard/components/Suspense.tsx index bf4342246a34..fd833df7fdce 100644 --- a/app/gui/src/dashboard/components/Suspense.tsx +++ b/app/gui/src/dashboard/components/Suspense.tsx @@ -7,26 +7,13 @@ import * as React from 'react' -import * as reactQuery from '@tanstack/react-query' - -import * as debounceValue from '#/hooks/debounceValueHooks' -import * as offlineHooks from '#/hooks/offlineHooks' - -import * as textProvider from '#/providers/TextProvider' - -import * as result from '#/components/Result' - import * as loader from './Loader' /** Props for {@link Suspense} component. */ export interface SuspenseProps extends React.SuspenseProps { readonly loaderProps?: loader.LoaderProps - readonly offlineFallback?: React.ReactNode - readonly offlineFallbackProps?: result.ResultProps } -const OFFLINE_FETCHING_TOGGLE_DELAY_MS = 250 - /** * Suspense is a component that allows you to wrap a part of your application that might suspend, * showing a fallback to the user while waiting for the data to load. @@ -35,19 +22,10 @@ const OFFLINE_FETCHING_TOGGLE_DELAY_MS = 250 * And handles offline scenarios. */ export function Suspense(props: SuspenseProps) { - const { children, loaderProps, fallback, offlineFallback, offlineFallbackProps } = props + const { children, loaderProps, fallback } = props return ( - - } - > + }> {children} ) @@ -58,8 +36,6 @@ export function Suspense(props: SuspenseProps) { */ interface LoaderProps extends loader.LoaderProps { readonly fallback?: SuspenseProps['fallback'] - readonly offlineFallback?: SuspenseProps['offlineFallback'] - readonly offlineFallbackProps?: SuspenseProps['offlineFallbackProps'] } /** @@ -74,35 +50,7 @@ interface LoaderProps extends loader.LoaderProps { * we want to know if there are ongoing requests once React renders the fallback in suspense */ export function Loader(props: LoaderProps) { - const { fallback, offlineFallbackProps, offlineFallback, ...loaderProps } = props - - const { getText } = textProvider.useText() - - const { isOffline } = offlineHooks.useOffline() - - const paused = reactQuery.useIsFetching({ fetchStatus: 'paused' }) - - const fetching = reactQuery.useIsFetching({ - predicate: (query) => - query.state.fetchStatus === 'fetching' || - query.state.status === 'pending' || - query.state.status === 'success', - }) - - // we use small debounce to avoid flickering when query is resolved, - // but fallback is still showing - const shouldDisplayOfflineMessage = debounceValue.useDebounceValue( - isOffline && paused >= 0 && fetching === 0, - OFFLINE_FETCHING_TOGGLE_DELAY_MS, - ) + const { fallback, ...loaderProps } = props - if (shouldDisplayOfflineMessage) { - return ( - offlineFallback ?? ( - - ) - ) - } else { - return fallback ?? - } + return fallback ?? } diff --git a/app/gui/src/dashboard/hooks/backendHooks.tsx b/app/gui/src/dashboard/hooks/backendHooks.tsx index fbed7fb15f63..6abe76bc0168 100644 --- a/app/gui/src/dashboard/hooks/backendHooks.tsx +++ b/app/gui/src/dashboard/hooks/backendHooks.tsx @@ -342,8 +342,6 @@ export function listDirectoryQueryOptions(options: ListDirectoryQueryOptions) { } } }, - - meta: { persist: false }, }) } diff --git a/app/gui/src/dashboard/hooks/offlineHooks.ts b/app/gui/src/dashboard/hooks/offlineHooks.ts index 0c2d509f57cd..c20f0c075891 100644 --- a/app/gui/src/dashboard/hooks/offlineHooks.ts +++ b/app/gui/src/dashboard/hooks/offlineHooks.ts @@ -10,7 +10,7 @@ export function useOffline() { const isOnline = React.useSyncExternalStore( reactQuery.onlineManager.subscribe.bind(reactQuery.onlineManager), () => reactQuery.onlineManager.isOnline(), - () => navigator.onLine, + () => false, ) return { isOffline: !isOnline } diff --git a/app/gui/src/dashboard/layouts/Drive.tsx b/app/gui/src/dashboard/layouts/Drive.tsx index 87a10d4bb2ec..e8310d846cf7 100644 --- a/app/gui/src/dashboard/layouts/Drive.tsx +++ b/app/gui/src/dashboard/layouts/Drive.tsx @@ -2,6 +2,7 @@ import * as React from 'react' import * as appUtils from '#/appUtils' +import Offline from '#/assets/offline_filled.svg' import * as offlineHooks from '#/hooks/offlineHooks' import * as toastAndLogHooks from '#/hooks/toastAndLogHooks' @@ -25,6 +26,7 @@ import * as ariaComponents from '#/components/AriaComponents' import * as result from '#/components/Result' import { ErrorBoundary, useErrorBoundary } from '#/components/ErrorBoundary' +import SvgMask from '#/components/SvgMask' import { listDirectoryQueryOptions } from '#/hooks/backendHooks' import { useEventCallback } from '#/hooks/eventCallbackHooks' import { useTargetDirectory } from '#/providers/DriveProvider' @@ -32,6 +34,7 @@ import { DirectoryDoesNotExistError, Plan } from '#/services/Backend' import AssetQuery from '#/utilities/AssetQuery' import * as download from '#/utilities/download' import * as github from '#/utilities/github' +import { OfflineError } from '#/utilities/HttpClient' import { tryFindSelfPermission } from '#/utilities/permissions' import * as tailwindMerge from '#/utilities/tailwindMerge' import { useQueryClient, useSuspenseQuery } from '@tanstack/react-query' @@ -58,7 +61,7 @@ const CATEGORIES_TO_DISPLAY_START_MODAL = ['cloud', 'local', 'local-directory'] /** Contains directory path and directory contents (projects, folders, secrets and files). */ function Drive(props: DriveProps) { - const { category, resetCategory } = props + const { category, resetCategory, setCategory } = props const { isOffline } = offlineHooks.useOffline() const toastAndLog = toastAndLogHooks.useToastAndLog() @@ -122,6 +125,18 @@ function Drive(props: DriveProps) { resetQueries() resetErrorBoundary() } + + if (error instanceof OfflineError) { + return ( + { + setCategory(nextCategory) + resetErrorBoundary() + }} + /> + ) + } }} > @@ -152,7 +167,6 @@ function DriveAssetsView(props: DriveProps) { const { user } = authProvider.useFullUserSession() const localBackend = backendProvider.useLocalBackend() const backend = backendProvider.useBackend(category) - const { getText } = textProvider.useText() const dispatchAssetListEvent = eventListProvider.useDispatchAssetListEvent() const [query, setQuery] = React.useState(() => AssetQuery.fromString('')) @@ -263,26 +277,7 @@ function DriveAssetsView(props: DriveProps) {
    {status === 'offline' ? - - {supportLocalBackend && ( - { - setCategory({ type: 'local' }) - }} - > - {getText('switchToLocal')} - - )} - + :