From 34010972664971520d708c1d7b0e1eb5b2866577 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Thu, 27 Apr 2023 12:21:07 +0100 Subject: [PATCH 01/34] Switch execution environment before the complete notification (#6449) Fixes the runtime test ``` - should recompute expressions changing an execution environment *** FAILED *** "[live]" did not equal "[design]" (RuntimeServerTest.scala:2721) Analysis: "[live]" -> "[design]" ``` --- .../org/enso/interpreter/instrument/job/ExecuteJob.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala index 6bdf2f6add6b..23ff69bf0492 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala @@ -46,15 +46,13 @@ class ExecuteJob( Api.Response(Api.ExecutionFailed(contextId, failure)) ) } - ctx.endpoint.sendToClient( - Api.Response(Api.ExecutionComplete(contextId)) - ) - StartBackgroundProcessingJob.startBackgroundJobs() } finally { originalExecutionEnvironment.foreach(context.setExecutionEnvironment) ctx.locking.releaseReadCompilationLock() ctx.locking.releaseContextLock(contextId) } + ctx.endpoint.sendToClient(Api.Response(Api.ExecutionComplete(contextId))) + StartBackgroundProcessingJob.startBackgroundJobs() } } From 9a0f739cb006fcd7da8a3dbe53a1d94ace5e127d Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Thu, 27 Apr 2023 13:42:34 +0100 Subject: [PATCH 02/34] Visualization preprocessor should not catch errors (#6423) related #6323 Fixes the following scenario: - preprocessor function catches `ThreadInterruptedException` and returns it as a JSON ```json {"error":"org.enso.interpreter.runtime.control.ThreadInterruptedException"} ``` - engine thinks that the visualization was computed successfully and just returns it to the user (without doing any fallback logic like retrying the computation) - IDE displays blank visualization --- .../Standard/Visualization/0.0.0-dev/src/Helpers.enso | 7 ------- .../Visualization/0.0.0-dev/src/SQL/Visualization.enso | 2 +- .../Visualization/0.0.0-dev/src/Table/Visualization.enso | 2 +- test/Visualization_Tests/src/Table_Spec.enso | 9 +++++---- 4 files changed, 7 insertions(+), 13 deletions(-) diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso index 46635555c6cb..7f50956bddf1 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso @@ -115,13 +115,6 @@ Error.map_valid self _ = self Error.catch_ : Any -> Any Error.catch_ self ~val = self.catch Any (_-> val) -## PRIVATE -recover_errors : Any -> Any -recover_errors ~body = - result = Panic.recover Any body - result.catch Any err-> - JS_Object.from_pairs [["error", err.to_display_text]] . to_text - ## PRIVATE Guides the visualization system to display the most suitable graphical diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso index 74f49dd9c813..b93d4c12b542 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso @@ -18,7 +18,7 @@ import project.Helpers Expected Enso types are inferred based on known SQL types and their mapping to Enso types. prepare_visualization : Table.IR.Query -> Text -prepare_visualization x = Helpers.recover_errors <| +prepare_visualization x = prepared = x.to_sql.prepare code = prepared.first interpolations = prepared.second diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso index 5660b70a26de..16d12e2819fa 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso @@ -20,7 +20,7 @@ import project.Helpers In case of Database backed data, it materializes a fragment of the data. prepare_visualization : Any -> Integer -> Text -prepare_visualization y max_rows=1000 = Helpers.recover_errors <| +prepare_visualization y max_rows=1000 = x = Warning.set y [] result = case x of diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index 2927ebb3f11d..6d0026101119 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -35,11 +35,12 @@ visualization_spec connection = JS_Object.from_pairs pairs . to_text Test.group "Table Visualization" <| - Test.specify "should wrap internal errors" <| + Test.specify "should forward internal errors" <| bad_table = Database_Table.Value Nothing Nothing Nothing Nothing - vis = Visualization.prepare_visualization bad_table 2 - json = JS_Object.from_pairs [["error", "Method `set_limit` of type Nothing could not be found."]] - vis . should_equal json.to_text + result = Panic.catch Any (Visualization.prepare_visualization bad_table 2) caught_panic-> + caught_panic.payload + message = "Method `set_limit` of type Nothing could not be found." + result.to_display_text . should_equal message Test.specify "should visualize database tables" <| vis = Visualization.prepare_visualization t 1 From 0c7c3bdeaf914c74e8d8af02c482454a1b01f8a8 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 27 Apr 2023 14:51:59 +0100 Subject: [PATCH 03/34] Fix for the massive number of warnings when renaming with invalid names. (#6450) * Rename makeUnique overloads to avoid issue when Nothing is passed. Suspend warnings when building the output table to avoid mass warning duplication. * Add test for mixed invalid names. Adjust so a single warning attached. * PR comments. --- .../Database/0.0.0-dev/src/Data/Table.enso | 3 +- .../Table/0.0.0-dev/src/Data/Table.enso | 12 +++---- .../org/enso/table/excel/ExcelHeaders.java | 2 +- .../org/enso/table/read/DelimitedReader.java | 2 +- .../org/enso/table/util/NameDeduplicator.java | 4 +-- .../org/enso/table/write/ExcelWriter.java | 2 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 32 ++++++++++++++----- 7 files changed, 35 insertions(+), 22 deletions(-) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index a4bc3bda8920..1e3a86427679 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -391,7 +391,8 @@ type Table rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names rename_columns self column_map=["Column"] (error_on_missing_columns=True) (on_problems=Report_Warning) = new_names = Table_Helpers.rename_columns internal_columns=self.internal_columns mapping=column_map error_on_missing_columns=error_on_missing_columns on_problems=on_problems - new_names.if_not_error (self.updated_columns (self.internal_columns.map c-> c.rename (new_names.at c.name))) + Warning.with_suspended new_names names-> + self.updated_columns (self.internal_columns.map c-> c.rename (names.at c.name)) ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 0a3e7fa69d77..46ada8c4d882 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -517,7 +517,8 @@ type Table rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names rename_columns self column_map=["Column"] (error_on_missing_columns=True) (on_problems=Report_Warning) = new_names = Table_Helpers.rename_columns internal_columns=self.columns mapping=column_map error_on_missing_columns=error_on_missing_columns on_problems=on_problems - new_names.if_not_error (Table.new (self.columns.map c-> c.rename (new_names.at c.name))) + Warning.with_suspended new_names names-> + Table.new (self.columns.map c-> c.rename (names.at c.name)) ## Returns a new table with the columns renamed based on entries in the first row. @@ -538,13 +539,8 @@ type Table table.use_first_row_as_names use_first_row_as_names : Problem_Behavior -> Table use_first_row_as_names self (on_problems=Report_Warning) = - mapper = col-> - val = col.at 0 - case val of - _ : Text -> val - Nothing -> Nothing - _ -> val.to_text - new_names = self.columns.map mapper + new_names = self.first_row.to_vector.map c-> + if c.is_nothing then Nothing else c.to_text self.drop (First 1) . rename_columns new_names on_problems=on_problems ## ALIAS group, summarize diff --git a/std-bits/table/src/main/java/org/enso/table/excel/ExcelHeaders.java b/std-bits/table/src/main/java/org/enso/table/excel/ExcelHeaders.java index 4b42f6793872..bc964adc0594 100644 --- a/std-bits/table/src/main/java/org/enso/table/excel/ExcelHeaders.java +++ b/std-bits/table/src/main/java/org/enso/table/excel/ExcelHeaders.java @@ -82,7 +82,7 @@ private static String[] inferHeaders(ExcelRow row, ExcelRow nextRow, int startCo return null; } - return deduplicator.makeUnique(rowNames); + return deduplicator.makeUniqueArray(rowNames); } /** Specifies how to set the headers for the returned table. */ diff --git a/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java b/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java index 60b3975fa3b5..82103f7c2946 100644 --- a/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java +++ b/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java @@ -327,7 +327,7 @@ private WithProblems> headersFromRow(String[] row) { Arrays.stream(row).map(this::parseHeader).collect(Collectors.toList()); NameDeduplicator deduplicator = new NameDeduplicator(); - List names = deduplicator.makeUnique(preprocessedHeaders); + List names = deduplicator.makeUniqueList(preprocessedHeaders); return new WithProblems<>(names, deduplicator.getProblems()); } diff --git a/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java b/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java index 4744600eb629..73f8486e9eec 100644 --- a/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java +++ b/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java @@ -36,11 +36,11 @@ public String makeValid(String input) { return input; } - public List makeUnique(List names) { + public List makeUniqueList(List names) { return names.stream().map(this::makeUnique).collect(Collectors.toList()); } - public String[] makeUnique(String[] names) { + public String[] makeUniqueArray(String[] names) { return Arrays.stream(names).map(this::makeUnique).toArray(String[]::new); } diff --git a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java index 91cb5ec202df..4bf633af4d82 100644 --- a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java +++ b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java @@ -159,7 +159,7 @@ private static void appendRangeWithTable(Workbook workbook, ExcelRange range, Ex throw new IllegalArgumentException("Cannot append by name when headers are not present in the existing data."); } String[] currentHeaders = sheet.get(expanded.getTopRow()).getCellsAsText(expanded.getLeftColumn(), expanded.getRightColumn()); - yield ColumnMapper.mapColumnsByName(table, new NameDeduplicator().makeUnique(currentHeaders)); + yield ColumnMapper.mapColumnsByName(table, new NameDeduplicator().makeUniqueArray(currentHeaders)); } default -> throw new IllegalArgumentException("Internal Error: appendRangeWithTable called with illegal existing data mode '" + existingDataMode + "'."); diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 239190e18f6f..34adf2474416 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -525,33 +525,49 @@ spec = Test.specify "should work happily with mixed types" <| c_0 = ['A', ["H", "B", "C"]] - c_00 = ['AA', ["", "B", "C"]] c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [1, 2, 3]] c_3 = ['Y', [5.3, 56.2, 6.3]] c_4 = ['Z', [True, False, True]] - c_5 = ['ZZ', [Nothing, False, True]] - table = Table.new [c_0, c_00, c_1, c_2, c_3, c_4, c_5] - expect_column_names ["H", "Column_1", "1980-01-01", "1", "5.3", "True", "Column_2"] table.use_first_row_as_names + table = Table.new [c_0, c_1, c_2, c_3, c_4] + expect_column_names ["H", "1980-01-01", "1", "5.3", "True"] table.use_first_row_as_names Test.specify "should correctly handle problems: invalid names ''" <| c_0 = ['A', ["", "B", "C"]] + c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [1, 2, 3]] - table = Table.new [c_0, c_2] + c_3 = ['Y', [5.3, 56.2, 6.3]] + c_4 = ['Z', [True, False, True]] + table = Table.new [c_0, c_1, c_2, c_3, c_4] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["Column_1", "1"] + tester = expect_column_names ["Column_1", "1980-01-01", "1", "5.3", "True"] problems = [Invalid_Output_Column_Names.Error [""]] Problems.test_problem_handling action problems tester Test.specify "should correctly handle problems: invalid names Nothing" <| c_0 = ['A', ["A", "B", "C"]] + c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] c_2 = ['x', [Nothing, 2, 3]] - table = Table.new [c_0, c_2] + c_3 = ['Y', [5.3, 56.2, 6.3]] + c_4 = ['Z', [True, False, True]] + table = Table.new [c_0, c_1, c_2, c_3, c_4] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["A", "Column_1"] + tester = expect_column_names ["A", "1980-01-01", "Column_1", "5.3", "True"] problems = [Invalid_Output_Column_Names.Error [Nothing]] Problems.test_problem_handling action problems tester + Test.specify "should correctly handle problems: multiple invalid names" <| + c_0 = ['A', ["", "B", "C"]] + c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]] + c_2 = ['x', [Nothing, 2, 3]] + c_3 = ['Y', [5.3, 56.2, 6.3]] + c_4 = ['Z', [True, False, True]] + table = Table.new [c_0, c_1, c_2, c_3, c_4] + action = table.use_first_row_as_names on_problems=_ + tester = expect_column_names ["Column_1", "1980-01-01", "Column_2", "5.3", "True"] + problems = [Invalid_Output_Column_Names.Error ["", Nothing]] + Problems.test_problem_handling action problems tester + Test.specify "should correctly handle problems: duplicate names" <| c_0 = ['A', ["A", "B", "C"]] c_1 = ['B', ["A", "B", "C"]] From 00510de0531c93c38a11d1008bf2abb04b9c9163 Mon Sep 17 00:00:00 2001 From: Michael Mauderer Date: Thu, 27 Apr 2023 16:46:58 +0200 Subject: [PATCH 04/34] Implement workflow-level Play button business-logic (#6427) Implements #6179 https://user-images.githubusercontent.com/1428930/234429549-2e25a8e2-a677-475c-a4b2-49f00421b9ad.mp4 --- app/gui/src/controller/graph/executed.rs | 7 +++++++ app/gui/src/model/execution_context.rs | 5 +++++ app/gui/src/model/execution_context/plain.rs | 4 ++++ .../model/execution_context/synchronized.rs | 15 +++++++++++++++ app/gui/src/model/project.rs | 2 ++ app/gui/src/model/project/synchronized.rs | 1 + app/gui/src/presenter/project.rs | 18 ++++++++++++++++++ .../execution-environment-selector/src/lib.rs | 4 +++- .../graph-editor/src/execution_environment.rs | 3 +++ app/gui/view/graph-editor/src/lib.rs | 1 + 10 files changed, 59 insertions(+), 1 deletion(-) diff --git a/app/gui/src/controller/graph/executed.rs b/app/gui/src/controller/graph/executed.rs index 580015b91b8f..a6fa946f0e93 100644 --- a/app/gui/src/controller/graph/executed.rs +++ b/app/gui/src/controller/graph/executed.rs @@ -365,6 +365,13 @@ impl Handle { self.execution_ctx.set_execution_environment(execution_environment).await?; Ok(()) } + + /// Trigger a clean execution of the current graph with the "live" execution environment. That + /// means old computations and caches will be discarded. + pub async fn trigger_clean_live_execution(&self) -> FallibleResult { + self.execution_ctx.trigger_clean_live_execution().await?; + Ok(()) + } } diff --git a/app/gui/src/model/execution_context.rs b/app/gui/src/model/execution_context.rs index 7f3d4b3d2f4c..8d4bb6a3ad03 100644 --- a/app/gui/src/model/execution_context.rs +++ b/app/gui/src/model/execution_context.rs @@ -511,6 +511,11 @@ pub trait API: Debug { &'a self, execution_environment: ExecutionEnvironment, ) -> BoxFuture<'a, FallibleResult>; + + /// Trigger a clean execution of the current graph with the "live" execution environment. That + /// means old computations and caches will be discarded. + #[allow(clippy::needless_lifetimes)] // Note: Needless lifetimes + fn trigger_clean_live_execution<'a>(&'a self) -> BoxFuture<'a, FallibleResult>; } // Note: Needless lifetimes diff --git a/app/gui/src/model/execution_context/plain.rs b/app/gui/src/model/execution_context/plain.rs index 057ed4a000af..f58cc8068d10 100644 --- a/app/gui/src/model/execution_context/plain.rs +++ b/app/gui/src/model/execution_context/plain.rs @@ -287,6 +287,10 @@ impl model::execution_context::API for ExecutionContext { self.execution_environment.set(environment); futures::future::ready(Ok(())).boxed_local() } + + fn trigger_clean_live_execution(&self) -> LocalBoxFuture { + futures::future::ready(Ok(())).boxed_local() + } } diff --git a/app/gui/src/model/execution_context/synchronized.rs b/app/gui/src/model/execution_context/synchronized.rs index f055bd71ab28..bb8826785da0 100644 --- a/app/gui/src/model/execution_context/synchronized.rs +++ b/app/gui/src/model/execution_context/synchronized.rs @@ -329,6 +329,21 @@ impl model::execution_context::API for ExecutionContext { } .boxed_local() } + + fn trigger_clean_live_execution(&self) -> BoxFuture { + async move { + self.language_server + .client + .recompute( + &self.id, + &language_server::InvalidatedExpressions::All, + &Some(ExecutionEnvironment::Live), + ) + .await?; + Ok(()) + } + .boxed_local() + } } impl Drop for ExecutionContext { diff --git a/app/gui/src/model/project.rs b/app/gui/src/model/project.rs index 74eb3a032521..c67c22f6e4b6 100644 --- a/app/gui/src/model/project.rs +++ b/app/gui/src/model/project.rs @@ -179,6 +179,8 @@ pub enum Notification { ConnectionLost(BackendConnection), /// Indicates that the project VCS status has changed. VcsStatusChanged(VcsStatus), + /// Indicates that the project has finished execution. + ExecutionFinished, } /// Denotes one of backend connections used by a project. diff --git a/app/gui/src/model/project/synchronized.rs b/app/gui/src/model/project/synchronized.rs index bf6435d54a38..4967efe096b5 100644 --- a/app/gui/src/model/project/synchronized.rs +++ b/app/gui/src/model/project/synchronized.rs @@ -544,6 +544,7 @@ impl Project { Event::Notification(Notification::ExecutionStatus(_)) => {} Event::Notification(Notification::ExecutionComplete { context_id }) => { execution_update_handler(context_id, ExecutionUpdate::Completed); + publisher.notify(model::project::Notification::ExecutionFinished); } Event::Notification(Notification::ExpressionValuesComputed(_)) => { // the notification is superseded by `ExpressionUpdates`. diff --git a/app/gui/src/presenter/project.rs b/app/gui/src/presenter/project.rs index 7f8ffc5f6689..5ab2c404523c 100644 --- a/app/gui/src/presenter/project.rs +++ b/app/gui/src/presenter/project.rs @@ -225,6 +225,11 @@ impl Model { self.view.graph().model.breadcrumbs.set_project_changed(changed); } + fn execution_finished(&self) { + self.view.graph().frp.set_read_only(false); + self.view.graph().frp.execution_finished.emit(()); + } + fn execution_context_interrupt(&self) { let controller = self.graph_controller.clone_ref(); executor::global::spawn(async move { @@ -306,6 +311,15 @@ impl Model { error!("Invalid execution environment: {execution_environment:?}"); } } + + fn trigger_clean_live_execution(&self) { + let graph_controller = self.graph_controller.clone_ref(); + executor::global::spawn(async move { + if let Err(err) = graph_controller.trigger_clean_live_execution().await { + error!("Error starting clean live execution: {err}"); + } + }); + } } @@ -405,6 +419,7 @@ impl Project { view.set_read_only <+ view.toggle_read_only.map(f_!(model.toggle_read_only())); eval graph_view.execution_environment((env) model.execution_environment_changed(env)); + eval_ graph_view.execution_environment_play_button_pressed( model.trigger_clean_live_execution()); } let graph_controller = self.model.graph_controller.clone_ref(); @@ -461,6 +476,9 @@ impl Project { Notification::VcsStatusChanged(VcsStatus::Clean) => { model.set_project_changed(false); } + Notification::ExecutionFinished => { + model.execution_finished(); + } }; std::future::ready(()) }); diff --git a/app/gui/view/execution-environment-selector/src/lib.rs b/app/gui/view/execution-environment-selector/src/lib.rs index a43eac6ca09c..a868c82c6fb2 100644 --- a/app/gui/view/execution-environment-selector/src/lib.rs +++ b/app/gui/view/execution-environment-selector/src/lib.rs @@ -87,11 +87,12 @@ ensogl::define_endpoints_2! { Input { set_available_execution_environments (ExecutionEnvironments), set_execution_environment (ExecutionEnvironment), + reset_play_button_state (), } Output { selected_execution_environment (ExecutionEnvironment), play_press(), - size (Vector2), + size(Vector2), } } @@ -268,6 +269,7 @@ impl component::Frp for Frp { model.set_play_button_visibility(play_button_visibility); }); play_button.reset <+ selected_entry.constant(()); + play_button.reset <+ input.reset_play_button_state; // == Outputs == diff --git a/app/gui/view/graph-editor/src/execution_environment.rs b/app/gui/view/graph-editor/src/execution_environment.rs index 43bb57bb0a08..76fd2a1cfc4f 100644 --- a/app/gui/view/graph-editor/src/execution_environment.rs +++ b/app/gui/view/graph-editor/src/execution_environment.rs @@ -49,7 +49,10 @@ pub fn init_frp(frp: &Frp, model: &GraphEditorModelWithNetwork) { <- any(selector.selected_execution_environment,external_update); out.execution_environment <+ execution_environment_update; out.execution_environment_play_button_pressed <+ selector.play_press; + frp.set_read_only <+ selector.play_press.constant(true); + // === Play Button === + selector.reset_play_button_state <+ frp.execution_finished; // === Layout === diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index f78549af2d2b..ce24e9b4983b 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -594,6 +594,7 @@ ensogl::define_endpoints_2! { /// Set the execution environmenta available to the graph. set_available_execution_environments (Rc>), set_execution_environment (ExecutionEnvironment), + execution_finished(), // === Debug === From 376415ab17e3e49ff2978d14eb556cf5344a30b2 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Thu, 27 Apr 2023 17:09:46 +0200 Subject: [PATCH 05/34] Mark some tests as (unconditonally) flaky (#6453) Too many spurious failures on CI now. Will need to be tackled at some point. --- .../websocket/json/VcsManagerTest.scala | 143 +++++++++--------- .../scala/org/enso/testkit/FlakySpec.scala | 7 +- 2 files changed, 78 insertions(+), 72 deletions(-) diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala index b32a3deff2e1..4db4bb780913 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala @@ -10,7 +10,7 @@ import org.eclipse.jgit.storage.file.FileRepositoryBuilder import org.enso.languageserver.boot.{ProfilingConfig, StartupConfig} import org.enso.languageserver.data._ import org.enso.languageserver.vcsmanager.VcsApi -import org.enso.testkit.RetrySpec +import org.enso.testkit.{FlakySpec, RetrySpec} import java.io.File import java.nio.charset.StandardCharsets @@ -19,7 +19,7 @@ import java.time.{Clock, LocalDate} import scala.concurrent.duration._ import scala.jdk.CollectionConverters._ -class VcsManagerTest extends BaseServerTest with RetrySpec { +class VcsManagerTest extends BaseServerTest with RetrySpec with FlakySpec { override def mkConfig: Config = { val directoriesDir = Files.createTempDirectory(null).toRealPath() @@ -617,12 +617,13 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { text1.get(0) should equal("file contents") } - "reset to a named save" taggedAs Retry in withCleanRoot { client => - timingsConfig = timingsConfig.withAutoSave(0.5.seconds) - val sleepDuration: Long = 2 * 1000 // 2 seconds - val client2 = getInitialisedWsClient() - val testFileName = "Foo2.enso" - client.send(json""" + "reset to a named save" taggedAs (SkipOnFailure, Retry) in withCleanRoot { + client => + timingsConfig = timingsConfig.withAutoSave(0.5.seconds) + val sleepDuration: Long = 2 * 1000 // 2 seconds + val client2 = getInitialisedWsClient() + val testFileName = "Foo2.enso" + client.send(json""" { "jsonrpc": "2.0", "method": "vcs/status", "id": 1, @@ -634,7 +635,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.fuzzyExpectJson(json""" + client.fuzzyExpectJson(json""" { "jsonrpc": "2.0", "id": 1, "result": { @@ -648,36 +649,36 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - val srcDir = testContentRoot.file.toPath.resolve("src") - Files.createDirectory(srcDir) - val fooPath = srcDir.resolve(testFileName) - fooPath.toFile.createNewFile() - Files.write( - fooPath, - "file contents".getBytes(StandardCharsets.UTF_8) - ) - // "file contents" version: 4d23065da489de360890285072c209b2b39d45d12283dbb5d1fa4389 + val srcDir = testContentRoot.file.toPath.resolve("src") + Files.createDirectory(srcDir) + val fooPath = srcDir.resolve(testFileName) + fooPath.toFile.createNewFile() + Files.write( + fooPath, + "file contents".getBytes(StandardCharsets.UTF_8) + ) + // "file contents" version: 4d23065da489de360890285072c209b2b39d45d12283dbb5d1fa4389 - add(testContentRoot.file, srcDir) - commit(testContentRoot.file, "Add missing files") - val barPath = srcDir.resolve("Bar.enso") - barPath.toFile.createNewFile() - Files.write( - barPath, - "file contents b".getBytes(StandardCharsets.UTF_8) - ) - add(testContentRoot.file, srcDir) - commit(testContentRoot.file, "Release") - Files.write( - fooPath, - "different contents".getBytes(StandardCharsets.UTF_8) - ) - // "different contents" version: e2bf8493b00a13749e643e2f970b6025c227cc91340c2acb7d67e1da + add(testContentRoot.file, srcDir) + commit(testContentRoot.file, "Add missing files") + val barPath = srcDir.resolve("Bar.enso") + barPath.toFile.createNewFile() + Files.write( + barPath, + "file contents b".getBytes(StandardCharsets.UTF_8) + ) + add(testContentRoot.file, srcDir) + commit(testContentRoot.file, "Release") + Files.write( + fooPath, + "different contents".getBytes(StandardCharsets.UTF_8) + ) + // "different contents" version: e2bf8493b00a13749e643e2f970b6025c227cc91340c2acb7d67e1da - add(testContentRoot.file, srcDir) - commit(testContentRoot.file, "More changes") + add(testContentRoot.file, srcDir) + commit(testContentRoot.file, "More changes") - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "text/openFile", "id": 2, @@ -690,7 +691,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 2, "result": { @@ -700,7 +701,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client2.send(json""" + client2.send(json""" { "jsonrpc": "2.0", "method": "text/openFile", "id": 2, @@ -712,7 +713,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc": "2.0", "id": 2, "result": { @@ -723,7 +724,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "capability/acquire", "id": 3, @@ -739,14 +740,14 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 3, "result": null } """) - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "text/applyEdit", "id": 4, @@ -771,13 +772,13 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 4, "result": null } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc" : "2.0", "method" : "text/didChange", "params" : { @@ -813,9 +814,9 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - // Ensure auto-save kicks in - Thread.sleep(sleepDuration) - client.expectJson(json""" + // Ensure auto-save kicks in + Thread.sleep(sleepDuration) + client.expectJson(json""" { "jsonrpc": "2.0", "method":"text/autoSave", "params": { @@ -826,7 +827,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc": "2.0", "method":"text/autoSave", "params": { @@ -838,7 +839,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } """) - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "vcs/status", "id": 5, @@ -850,7 +851,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.fuzzyExpectJson(json""" + client.fuzzyExpectJson(json""" { "jsonrpc": "2.0", "id": 5, "result": { @@ -871,13 +872,13 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - val allCommits = commits(testContentRoot.file) - val sndToLast = allCommits.tail.head + val allCommits = commits(testContentRoot.file) + val sndToLast = allCommits.tail.head - val text0 = Files.readAllLines(fooPath) - text0.get(0) should equal("bar contents") + val text0 = Files.readAllLines(fooPath) + text0.get(0) should equal("bar contents") - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "vcs/restore", "id": 6, @@ -890,7 +891,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc" : "2.0", "method" : "text/didChange", "params" : { @@ -924,7 +925,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { ] } }""") - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 6, "result": { @@ -937,7 +938,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc" : "2.0", "method" : "text/didChange", "params" : { @@ -972,10 +973,10 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } }""") - val text1 = Files.readAllLines(fooPath) - text1.get(0) should equal("file contents") + val text1 = Files.readAllLines(fooPath) + text1.get(0) should equal("file contents") - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "text/applyEdit", "id": 7, @@ -1000,14 +1001,14 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 7, "id": 7, "result": null } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc" : "2.0", "method" : "text/didChange", "params" : { @@ -1042,9 +1043,9 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } }""") - // Ensure auto-save kicks in - Thread.sleep(sleepDuration) - client.expectJson(json""" + // Ensure auto-save kicks in + Thread.sleep(sleepDuration) + client.expectJson(json""" { "jsonrpc": "2.0", "method":"text/autoSave", "params": { @@ -1055,7 +1056,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client2.expectJson(json""" + client2.expectJson(json""" { "jsonrpc": "2.0", "method":"text/autoSave", "params": { @@ -1066,10 +1067,10 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - val text2 = Files.readAllLines(fooPath) - text2.get(0) should equal("foo contents") + val text2 = Files.readAllLines(fooPath) + text2.get(0) should equal("foo contents") - client.send(json""" + client.send(json""" { "jsonrpc": "2.0", "method": "vcs/restore", "id": 8, @@ -1082,7 +1083,7 @@ class VcsManagerTest extends BaseServerTest with RetrySpec { } } """) - client.expectJson(json""" + client.expectJson(json""" { "jsonrpc": "2.0", "id": 8, "error": { diff --git a/lib/scala/testkit/src/main/scala/org/enso/testkit/FlakySpec.scala b/lib/scala/testkit/src/main/scala/org/enso/testkit/FlakySpec.scala index 452c443613f4..0c1634b9636a 100644 --- a/lib/scala/testkit/src/main/scala/org/enso/testkit/FlakySpec.scala +++ b/lib/scala/testkit/src/main/scala/org/enso/testkit/FlakySpec.scala @@ -11,16 +11,21 @@ import org.scalatest._ */ trait FlakySpec extends TestSuite { - /** Tags test as _flaky_. */ + /** Tags test as conditionally _flaky_. */ object Flaky extends Tag("org.enso.test.flaky") { val isEnabled = sys.env.contains("CI_TEST_FLAKY_ENABLE") } + /** Tags test as pending on failure */ + object SkipOnFailure extends Tag("org.enso.test.skiponfailure") + override def withFixture(test: NoArgTest): Outcome = super.withFixture(test) match { case Failed(_) | Canceled(_) if Flaky.isEnabled && test.tags.contains(Flaky.name) => Pending + case Failed(_) | Canceled(_) if test.tags.contains(SkipOnFailure.name) => + Pending case outcome => outcome } From 233bf86d2f62c52070fbb149219cdfebcd6c7dd9 Mon Sep 17 00:00:00 2001 From: Michael Mauderer Date: Thu, 27 Apr 2023 19:30:26 +0200 Subject: [PATCH 06/34] Execution environment integration fixes. (#6434) Follow up to https://github.com/enso-org/enso/pull/6341#issuecomment-1522341055 . Contains some refactoring and solves some left over to-dos in the code. --- Cargo.lock | 1 + .../src/language_server/types.rs | 34 +++++------------ app/gui/docs/product/shortcuts.md | 3 +- app/gui/src/controller/graph/executed.rs | 5 +++ app/gui/src/model/execution_context.rs | 3 ++ app/gui/src/model/execution_context/plain.rs | 4 ++ .../model/execution_context/synchronized.rs | 4 ++ app/gui/src/presenter/graph.rs | 38 ++++--------------- app/gui/src/presenter/project.rs | 26 ++++++------- .../execution-environment-selector/Cargo.toml | 1 + .../execution-environment-selector/src/lib.rs | 15 +++++--- .../graph-editor/src/execution_environment.rs | 28 +++----------- app/gui/view/graph-editor/src/lib.rs | 13 +++---- app/gui/view/graph-editor/src/shortcuts.rs | 5 ++- .../component/drop-down-menu/src/lib.rs | 2 +- 15 files changed, 74 insertions(+), 108 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bde01b80ce76..c047daa3e1e7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4418,6 +4418,7 @@ dependencies = [ name = "ide-view-execution-environment-selector" version = "0.1.0" dependencies = [ + "engine-protocol", "enso-frp", "enso-prelude", "ensogl", diff --git a/app/gui/controller/engine-protocol/src/language_server/types.rs b/app/gui/controller/engine-protocol/src/language_server/types.rs index aa890c766af8..eaec9b5772a5 100644 --- a/app/gui/controller/engine-protocol/src/language_server/types.rs +++ b/app/gui/controller/engine-protocol/src/language_server/types.rs @@ -1164,7 +1164,7 @@ pub struct LibraryComponentGroup { /// /// For more information, see /// https://github.com/enso-org/design/blob/main/epics/basic-libraries/write-action-control/design.md. -#[derive(Hash, Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Display)] +#[derive(Hash, Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum ExecutionEnvironment { /// Allows editing the graph, but the `Output` context is disabled, so it prevents accidental /// changes. @@ -1173,6 +1173,15 @@ pub enum ExecutionEnvironment { Live, } +impl Display for ExecutionEnvironment { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Design => write!(f, "design"), + Self::Live => write!(f, "live"), + } + } +} + impl Default for ExecutionEnvironment { fn default() -> Self { ExecutionEnvironment::Design @@ -1184,29 +1193,6 @@ impl ExecutionEnvironment { pub fn list_all() -> Vec { vec![ExecutionEnvironment::Design, ExecutionEnvironment::Live] } - - /// List all available execution environments as ImStrings. Useful for UI. - pub fn list_all_as_imstrings() -> Vec { - Self::list_all().iter().map(|env| (*env).into()).collect() - } -} - -impl From for ImString { - fn from(env: ExecutionEnvironment) -> Self { - ImString::new(env.to_string()) - } -} - -impl TryFrom<&str> for ExecutionEnvironment { - type Error = (); - - fn try_from(value: &str) -> core::result::Result { - match value.to_lowercase().as_str() { - "design" => Ok(ExecutionEnvironment::Design), - "live" => Ok(ExecutionEnvironment::Live), - _ => Err(()), - } - } } impl ExecutionEnvironment { diff --git a/app/gui/docs/product/shortcuts.md b/app/gui/docs/product/shortcuts.md index d109fb96c52c..e1a649759797 100644 --- a/app/gui/docs/product/shortcuts.md +++ b/app/gui/docs/product/shortcuts.md @@ -50,7 +50,8 @@ broken and require further investigation. | escape | Cancel current action. For example, drop currently dragged connection. | | cmd+shift+t | Terminate the program execution | | cmd+shift+r | Re-execute the program | -| cmd+shift+e | Toggle the execution environment between Live and Design. | +| cmd+shift+k | Switch the execution environment to Design. | +| cmd+shift+l | Switch the execution environment to Live. | #### Navigation diff --git a/app/gui/src/controller/graph/executed.rs b/app/gui/src/controller/graph/executed.rs index a6fa946f0e93..4fd46904a3cf 100644 --- a/app/gui/src/controller/graph/executed.rs +++ b/app/gui/src/controller/graph/executed.rs @@ -366,6 +366,11 @@ impl Handle { Ok(()) } + /// Get the current execution environment. + pub fn execution_environment(&self) -> ExecutionEnvironment { + self.execution_ctx.execution_environment() + } + /// Trigger a clean execution of the current graph with the "live" execution environment. That /// means old computations and caches will be discarded. pub async fn trigger_clean_live_execution(&self) -> FallibleResult { diff --git a/app/gui/src/model/execution_context.rs b/app/gui/src/model/execution_context.rs index 8d4bb6a3ad03..8316c13f7cac 100644 --- a/app/gui/src/model/execution_context.rs +++ b/app/gui/src/model/execution_context.rs @@ -512,6 +512,9 @@ pub trait API: Debug { execution_environment: ExecutionEnvironment, ) -> BoxFuture<'a, FallibleResult>; + /// Get the execution environment of the context. + fn execution_environment(&self) -> ExecutionEnvironment; + /// Trigger a clean execution of the current graph with the "live" execution environment. That /// means old computations and caches will be discarded. #[allow(clippy::needless_lifetimes)] // Note: Needless lifetimes diff --git a/app/gui/src/model/execution_context/plain.rs b/app/gui/src/model/execution_context/plain.rs index f58cc8068d10..b0ee3823e82e 100644 --- a/app/gui/src/model/execution_context/plain.rs +++ b/app/gui/src/model/execution_context/plain.rs @@ -288,6 +288,10 @@ impl model::execution_context::API for ExecutionContext { futures::future::ready(Ok(())).boxed_local() } + fn execution_environment(&self) -> ExecutionEnvironment { + self.execution_environment.get() + } + fn trigger_clean_live_execution(&self) -> LocalBoxFuture { futures::future::ready(Ok(())).boxed_local() } diff --git a/app/gui/src/model/execution_context/synchronized.rs b/app/gui/src/model/execution_context/synchronized.rs index bb8826785da0..41d5dd58a2f6 100644 --- a/app/gui/src/model/execution_context/synchronized.rs +++ b/app/gui/src/model/execution_context/synchronized.rs @@ -330,6 +330,10 @@ impl model::execution_context::API for ExecutionContext { .boxed_local() } + fn execution_environment(&self) -> ExecutionEnvironment { + self.model.execution_environment.get() + } + fn trigger_clean_live_execution(&self) -> BoxFuture { async move { self.language_server diff --git a/app/gui/src/presenter/graph.rs b/app/gui/src/presenter/graph.rs index d62c4396348c..ce515d9043f7 100644 --- a/app/gui/src/presenter/graph.rs +++ b/app/gui/src/presenter/graph.rs @@ -12,7 +12,6 @@ use crate::presenter::graph::state::State; use double_representation::context_switch::Context; use double_representation::context_switch::ContextSwitch; use double_representation::context_switch::ContextSwitchExpression; -use engine_protocol::language_server::ExecutionEnvironment; use engine_protocol::language_server::SuggestionId; use enso_frp as frp; use futures::future::LocalBoxFuture; @@ -83,15 +82,13 @@ pub fn default_node_position() -> Vector2 { #[derive(Debug)] struct Model { - project: model::Project, - controller: controller::ExecutedGraph, - view: view::graph_editor::GraphEditor, - state: Rc, - _visualization: Visualization, - widget: controller::Widget, - _execution_stack: CallStack, - // TODO(#5930): Move me once we synchronise the execution environment with the language server. - execution_environment: Rc>, + project: model::Project, + controller: controller::ExecutedGraph, + view: view::graph_editor::GraphEditor, + state: Rc, + _visualization: Visualization, + widget: controller::Widget, + _execution_stack: CallStack, } impl Model { @@ -118,7 +115,6 @@ impl Model { _visualization: visualization, widget, _execution_stack: execution_stack, - execution_environment: Default::default(), } } @@ -188,7 +184,7 @@ impl Model { /// ``` fn node_action_context_switch(&self, id: ViewNodeId, active: bool) { let context = Context::Output; - let environment = self.execution_environment.get(); + let environment = self.controller.execution_environment(); let current_state = environment.output_context_enabled(); let switch = if current_state { ContextSwitch::Disable } else { ContextSwitch::Enable }; let expr = if active { @@ -493,15 +489,6 @@ impl Model { } } } - - fn toggle_execution_environment(&self) -> ExecutionEnvironment { - let new_environment = match self.execution_environment.get() { - ExecutionEnvironment::Live => ExecutionEnvironment::Design, - ExecutionEnvironment::Design => ExecutionEnvironment::Live, - }; - self.execution_environment.set(new_environment); - new_environment - } } @@ -717,15 +704,6 @@ impl Graph { } })); - - // === Execution Environment === - - // TODO(#5930): Delete me once we synchronise the execution environment with the - // language server. - view.set_execution_environment <+ view.toggle_execution_environment.map( - f_!(model.toggle_execution_environment())); - - // === Refreshing Nodes === remove_node <= update_data.map(|update| update.remove_nodes()); diff --git a/app/gui/src/presenter/project.rs b/app/gui/src/presenter/project.rs index 5ab2c404523c..2805043bf9b5 100644 --- a/app/gui/src/presenter/project.rs +++ b/app/gui/src/presenter/project.rs @@ -296,20 +296,16 @@ impl Model { fn execution_environment_changed( &self, - execution_environment: &ide_view::execution_environment_selector::ExecutionEnvironment, + execution_environment: ide_view::execution_environment_selector::ExecutionEnvironment, ) { - if let Ok(execution_environment) = execution_environment.as_str().try_into() { - let graph_controller = self.graph_controller.clone_ref(); - executor::global::spawn(async move { - if let Err(err) = - graph_controller.set_execution_environment(execution_environment).await - { - error!("Error setting execution environment: {err}"); - } - }); - } else { - error!("Invalid execution environment: {execution_environment:?}"); - } + let graph_controller = self.graph_controller.clone_ref(); + executor::global::spawn(async move { + if let Err(err) = + graph_controller.set_execution_environment(execution_environment).await + { + error!("Error setting execution environment: {err}"); + } + }); } fn trigger_clean_live_execution(&self) { @@ -418,7 +414,7 @@ impl Project { eval_ view.execution_context_restart(model.execution_context_restart()); view.set_read_only <+ view.toggle_read_only.map(f_!(model.toggle_read_only())); - eval graph_view.execution_environment((env) model.execution_environment_changed(env)); + eval graph_view.execution_environment((env) model.execution_environment_changed(*env)); eval_ graph_view.execution_environment_play_button_pressed( model.trigger_clean_live_execution()); } @@ -433,7 +429,7 @@ impl Project { /// Initialises execution environment. fn init_execution_environments(self) -> Self { let graph = &self.model.view.graph(); - let entries = Rc::new(ExecutionEnvironment::list_all_as_imstrings()); + let entries = Rc::new(ExecutionEnvironment::list_all()); graph.set_available_execution_environments(entries); self } diff --git a/app/gui/view/execution-environment-selector/Cargo.toml b/app/gui/view/execution-environment-selector/Cargo.toml index 216223954cc4..00f965fe70b6 100644 --- a/app/gui/view/execution-environment-selector/Cargo.toml +++ b/app/gui/view/execution-environment-selector/Cargo.toml @@ -16,3 +16,4 @@ ensogl-drop-down-menu = { path = "../../../../lib/rust/ensogl/component/drop-dow ensogl-gui-component = { path = "../../../../lib/rust/ensogl/component/gui" } ensogl-hardcoded-theme = { path = "../../../../lib/rust/ensogl/app/theme/hardcoded" } ensogl-list-view = { path = "../../../../lib/rust/ensogl/component/list-view" } +engine-protocol = { path = "../../controller/engine-protocol" } diff --git a/app/gui/view/execution-environment-selector/src/lib.rs b/app/gui/view/execution-environment-selector/src/lib.rs index a868c82c6fb2..0fd0d43b91a5 100644 --- a/app/gui/view/execution-environment-selector/src/lib.rs +++ b/app/gui/view/execution-environment-selector/src/lib.rs @@ -74,13 +74,14 @@ impl Style { // =========== /// An identifier of a execution environment. -pub type ExecutionEnvironment = ImString; +pub type ExecutionEnvironment = engine_protocol::language_server::ExecutionEnvironment; + /// A list of execution environments. pub type ExecutionEnvironments = Rc>; /// Provide a dummy list of execution environments. Used for testing and demo scenes. pub fn make_dummy_execution_environments() -> ExecutionEnvironments { - Rc::new(vec!["Design".to_string().into(), "Live".to_string().into()]) + Rc::new(ExecutionEnvironment::list_all()) } ensogl::define_endpoints_2! { @@ -157,7 +158,9 @@ impl Model { } fn set_entries(&self, entries: Rc>) { - let provider = ensogl_list_view::entry::AnyModelProvider::from(entries.clone_ref()); + let labels = entries.iter().map(|e| e.to_string().capitalize_first_letter()).collect_vec(); + let labels = Rc::new(labels); + let provider = ensogl_list_view::entry::AnyModelProvider::from(labels); self.dropdown.set_entries(provider); self.dropdown.set_selected(0); } @@ -261,11 +264,11 @@ impl component::Frp for Frp { selected_id <- dropdown.frp.chosen_entry.unwrap(); selection <- all(input.set_available_execution_environments, selected_id); - selected_entry <- selection.map(|(entries, entry_id)| entries[*entry_id].clone()); - output.selected_execution_environment <+ selected_entry; + selected_entry <- selection.map(|(entries, entry_id)| entries[*entry_id]); + output.selected_execution_environment <+ selected_entry.on_change(); eval selected_entry ([model] (execution_mode) { - let play_button_visibility = matches!(execution_mode.to_lowercase().as_str(), "design"); + let play_button_visibility = matches!(execution_mode, ExecutionEnvironment::Design); model.set_play_button_visibility(play_button_visibility); }); play_button.reset <+ selected_entry.constant(()); diff --git a/app/gui/view/graph-editor/src/execution_environment.rs b/app/gui/view/graph-editor/src/execution_environment.rs index 76fd2a1cfc4f..119d2baa0155 100644 --- a/app/gui/view/graph-editor/src/execution_environment.rs +++ b/app/gui/view/graph-editor/src/execution_environment.rs @@ -4,23 +4,12 @@ use super::*; use crate::Frp; -use ide_view_execution_environment_selector::ExecutionEnvironment; - // ============================= // === Execution Environment === // ============================= -fn get_next_execution_environment( - current: &ExecutionEnvironment, - available: &[ExecutionEnvironment], -) -> Option { - let index = available.iter().position(|mode| mode == current)?; - let next_index = (index + 1) % available.len(); - Some(available[next_index].clone()) -} - /// Initialise the FRP logic for the execution environment selector. pub fn init_frp(frp: &Frp, model: &GraphEditorModelWithNetwork) { let out = &frp.private.output; @@ -34,20 +23,15 @@ pub fn init_frp(frp: &Frp, model: &GraphEditorModelWithNetwork) { // === Execution Environment Changes === selector.set_available_execution_environments <+ frp.set_available_execution_environments; - selected_environment <- frp.set_execution_environment.map(|env| (*env).into()); - environment_state - <- all(out.execution_environment,frp.set_available_execution_environments); - - environment_toggled <- environment_state.sample(&frp.toggle_execution_environment); - toggled_execution_environment <- environment_toggled.map(|(mode,available)| - get_next_execution_environment(mode,available)).unwrap(); - external_update <- any(selected_environment,toggled_execution_environment); + switch_to_live <- + frp.switch_to_live_execution_environment.constant(ExecutionEnvironment::Live); + switch_to_design <- + frp.switch_to_design_execution_environment.constant(ExecutionEnvironment::Design); + external_update <- any(switch_to_live,switch_to_design); selector.set_execution_environment <+ external_update; - execution_environment_update - <- any(selector.selected_execution_environment,external_update); - out.execution_environment <+ execution_environment_update; + out.execution_environment <+ selector.selected_execution_environment.on_change(); out.execution_environment_play_button_pressed <+ selector.play_press; frp.set_read_only <+ selector.play_press.constant(true); diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index ce24e9b4983b..68ab05f854d5 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -589,11 +589,10 @@ ensogl::define_endpoints_2! { // === Execution Environment === - // TODO(#5930): Temporary shortcut for testing different execution environments - toggle_execution_environment(), - /// Set the execution environmenta available to the graph. - set_available_execution_environments (Rc>), - set_execution_environment (ExecutionEnvironment), + /// Set the execution environments available to the graph. + set_available_execution_environments (Rc>), + switch_to_design_execution_environment(), + switch_to_live_execution_environment(), execution_finished(), @@ -771,7 +770,7 @@ ensogl::define_endpoints_2! { min_x_spacing_for_new_nodes (f32), /// The selected environment mode. - execution_environment (execution_environment_selector::ExecutionEnvironment), + execution_environment (ExecutionEnvironment), /// A press of the execution environment selector play button. execution_environment_play_button_pressed (), } @@ -1750,7 +1749,7 @@ impl GraphEditorModelWithNetwork { // === Execution Environment === - node.set_execution_environment <+ self.model.frp.input.set_execution_environment; + node.set_execution_environment <+ self.model.frp.output.execution_environment; } diff --git a/app/gui/view/graph-editor/src/shortcuts.rs b/app/gui/view/graph-editor/src/shortcuts.rs index 73ab6b14c048..42ffd759b226 100644 --- a/app/gui/view/graph-editor/src/shortcuts.rs +++ b/app/gui/view/graph-editor/src/shortcuts.rs @@ -63,6 +63,7 @@ pub const SHORTCUTS: &[(ensogl::application::shortcut::ActionType, &str, &str, & (Press, "debug_mode", "ctrl shift enter", "debug_push_breadcrumb"), (Press, "debug_mode", "ctrl shift up", "debug_pop_breadcrumb"), (Press, "debug_mode", "ctrl n", "add_node_at_cursor"), - // === Execution Mode === - (Press, "", "shift ctrl e", "toggle_execution_environment"), + // Execution Environment + (Press, "", "cmd shift k", "switch_to_design_execution_environment"), + (Press, "", "cmd shift l", "switch_to_live_execution_environment"), ]; diff --git a/lib/rust/ensogl/component/drop-down-menu/src/lib.rs b/lib/rust/ensogl/component/drop-down-menu/src/lib.rs index 76335a7a0c74..de5ff5928ea6 100644 --- a/lib/rust/ensogl/component/drop-down-menu/src/lib.rs +++ b/lib/rust/ensogl/component/drop-down-menu/src/lib.rs @@ -361,8 +361,8 @@ impl DropDownMenu { chosen_entry_unmasked <- model.selection_menu.chosen_entry.map(f!((entry_id) model.get_unmasked_index(*entry_id)) ); - frp.source.chosen_entry <+ chosen_entry_unmasked; set_selected <- any(frp.input.set_selected, chosen_entry_unmasked); + frp.source.chosen_entry <+ set_selected; eval set_selected([model](entry_id) { if let Some(entry_id) = entry_id { From 4114368199204f5d6dd23b1a9a5553ffbb9c95ec Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Thu, 27 Apr 2023 21:10:27 +0300 Subject: [PATCH 07/34] Unselect all nodes when the project name is edited (#6432) Fixes #6373 --- app/gui/view/graph-editor/src/lib.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index 68ab05f854d5..d0fbbcc41c0a 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -2839,6 +2839,8 @@ fn new_graph_editor(app: &Application) -> GraphEditor { frp::extend! { network edit_mode <- bool(&inputs.edit_mode_off,&inputs.edit_mode_on); eval edit_mode ((edit_mode_on) model.breadcrumbs.ide_text_edit_mode.emit(edit_mode_on)); + // Deselect nodes when the project name is edited. + frp.deselect_all_nodes <+ model.breadcrumbs.project_mouse_down; } From 462016a428f5e21a51d0cc99cac2e18e4ea5e15b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Thu, 27 Apr 2023 22:06:17 +0200 Subject: [PATCH 08/34] Create database table from memory (#6429) --- CHANGELOG.md | 3 + build.sbt | 2 + .../Standard/Base/0.0.0-dev/src/Panic.enso | 25 ++++ .../Standard/Base/0.0.0-dev/src/Random.enso | 8 ++ .../0.0.0-dev/src/Connection/Connection.enso | 45 +----- .../Database/0.0.0-dev/src/Data/Dialect.enso | 1 + .../Database/0.0.0-dev/src/Errors.enso | 28 ++++ .../src/Extensions/Upload_Table.enso | 120 ++++++++++++++++ .../src/Internal/Base_Generator.enso | 18 +++ .../0.0.0-dev/src/Internal/IR/Query.enso | 21 +++ .../src/Internal/JDBC_Connection.enso | 89 +++++++----- .../Postgres/Postgres_Connection.enso | 35 ++--- .../Internal/Postgres/Postgres_Dialect.enso | 1 + .../Internal/Redshift/Redshift_Dialect.enso | 7 + .../Internal/SQLite/SQLite_Connection.enso | 34 ++--- .../src/Internal/SQLite/SQLite_Dialect.enso | 2 +- .../Internal/SQLite/SQLite_Type_Mapping.enso | 18 ++- .../Standard/Database/0.0.0-dev/src/Main.enso | 2 + .../Cross_Tab_Spec.enso | 2 +- .../Date_Time_Spec.enso | 4 +- .../Expression_Spec.enso | 9 +- .../Join/Cross_Join_Spec.enso | 3 +- .../Join/Join_Spec.enso | 2 +- .../Join/Zip_Spec.enso | 2 +- .../Transpose_Spec.enso | 2 +- .../Table_Tests/src/Database/Common_Spec.enso | 18 ++- .../src/Database/Helpers/Name_Generator.enso | 8 +- .../src/Database/Postgres_Spec.enso | 42 +++--- .../src/Database/Redshift_Spec.enso | 15 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 17 ++- .../Types/Postgres_Type_Mapping_Spec.enso | 10 +- .../Types/SQLite_Type_Mapping_Spec.enso | 18 ++- .../Table_Tests/src/Database/Upload_Spec.enso | 136 ++++++++++++++++++ .../Table_Tests/src/In_Memory/Table_Spec.enso | 5 +- .../table_test_helpers/ExplodingStorage.java | 110 ++++++++++++++ .../ExplodingStoragePayload.java | 3 + test/Tests/src/Semantic/Error_Spec.enso | 46 ++++++ test/Visualization_Tests/src/Table_Spec.enso | 6 +- 38 files changed, 705 insertions(+), 212 deletions(-) create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso create mode 100644 test/Table_Tests/src/Database/Upload_Spec.enso create mode 100644 test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStorage.java create mode 100644 test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStoragePayload.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 24b8f08f9a37..52042f1786f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -396,6 +396,8 @@ - [Added `Table.parse_to_columns`.][6383] - [Added parsing methods for `Integer`, `Decimal`, `Json`, `Date`, `Date_Time`, `Time_Of_Day`, `Time_Zone`, and `URI` to `Text`.][6404] +- [Implemented `create_database_table` allowing upload of in-memory + tables.][6429] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -599,6 +601,7 @@ [6383]: https://github.com/enso-org/enso/pull/6383 [6404]: https://github.com/enso-org/enso/pull/6404 [6347]: https://github.com/enso-org/enso/pull/6347 +[6429]: https://github.com/enso-org/enso/pull/6429 #### Enso Compiler diff --git a/build.sbt b/build.sbt index 599c71b930e0..a9f11104fc83 100644 --- a/build.sbt +++ b/build.sbt @@ -1966,6 +1966,8 @@ lazy val `enso-test-java-helpers` = project result }.value ) + .dependsOn(`std-base` % "provided") + .dependsOn(`std-table` % "provided") lazy val `std-table` = project .in(file("std-bits") / "table") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso index 5c391a3a1b83..253b577e0a23 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso @@ -127,6 +127,31 @@ type Panic catch : Any -> Any -> (Caught_Panic -> Any) -> Any catch panic_type ~action handler = @Builtin_Method "Panic.catch" + ## PRIVATE + Runs the provided `action` ensuring that the `finalize` block is called + regardless of if the action succeeds or fails. + + This emulates the `finally` clause in Java. + + If an exception occurs in the `finalizer`, it is propagated. If `action` + throws an exception and the `finalizer` also throws an exception, the + exception thrown by `finalizer` takes precedence. + + > Example + Print the `Cleaning...` message regardless of if the action succeeds. + do_cleanup = + IO.println "Cleaning..." + Panic.with_finally do_cleanup <| + Panic.throw (Illegal_State.Error "Foo") + with_finalizer : Any -> Any -> Any + with_finalizer ~finalizer ~action = + handle_panic caught_panic = + finalizer + Panic.throw caught_panic + result = Panic.catch Any action handle_panic + finalizer + result + ## Executes the provided action and converts a possible panic matching any of the provided types into a dataflow Error. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso index c673ae4587b6..865f389a6f11 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso @@ -2,6 +2,7 @@ import project.Any.Any import project.Data.Boolean.Boolean import project.Data.Numbers.Integer import project.Data.Numbers.Decimal +import project.Data.Text.Text import project.Data.Vector.Vector import project.Data.Json.JS_Object import project.Data.Text.Text @@ -11,6 +12,7 @@ import project.System polyglot java import java.lang.Integer as Java_Integer polyglot java import java.util.Random as Java_Random +polyglot java import java.util.UUID polyglot java import org.enso.base.Random_Utils ## UNSTABLE @@ -83,3 +85,9 @@ random_indices : Integer -> Integer -> Random_Number_Generator -> Vector Integer random_indices n k rng = array = Random_Utils.random_indices n k rng.java_random Vector.from_polyglot_array array + +## PRIVATE + Generates a text representation of a randomly generated UUID. +random_uuid : Text +random_uuid = + UUID.randomUUID.to_text diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 77a94658aef4..a7223eb7d09a 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -16,7 +16,7 @@ import project.Internal.SQL_Type_Reference.SQL_Type_Reference import project.Internal.Statement_Setter.Statement_Setter from project.Internal.Result_Set import read_column, result_set_to_table -from project.Internal.JDBC_Connection import create_table_statement, handle_sql_errors +from project.Internal.JDBC_Connection import handle_sql_errors from project.Errors import SQL_Error, Table_Not_Found polyglot java import java.lang.UnsupportedOperationException @@ -215,43 +215,6 @@ type Connection stmt.executeUpdate ## PRIVATE - UNSTABLE - This is a prototype function used in our test suites. It may change. - - It creates a new table in the database with the given name (will fail if - the table already existed), inserts the contents of the provided - in-memory table and returns a handle to the newly created table. - - ! Temporary Tables - - Note that temporary tables may not be visible in the table catalog, so - some features which rely on it like the `Table.query` shorthand mode - may not work correctly with temporary tables. - - Arguments: - - name: The name of the table to create. - - table: An In-Memory table specifying the contents to upload. Schema of - the created database table is based on the column types of this table. - - temporary: Specifies whether the table should be marked as temporary. A - temporary table will be dropped after the connection closes and will - usually not be visible to other connections. - - batch_size: Specifies how many rows should be uploaded in a single - batch. - upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Table - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State <| - type_mapping = self.dialect.get_type_mapping - ## TODO [RW] problem handling! probably want to add on_problems to this method? - This is just a prototype, so ignoring this. To be revisited as part of #5161. - type_mapper value_type = type_mapping.value_type_to_sql value_type Problem_Behavior.Report_Error - create_sql = create_table_statement type_mapper name table temporary - create_table = self.execute_update create_sql - - db_table = if create_table.is_error then create_table else self.query (SQL_Query.Table_Name name) - if db_table.is_error.not then - pairs = db_table.internal_columns.map col->[col.name, SQL_Expression.Constant Nothing] - insert_query = self.dialect.generate_sql <| Query.Insert name pairs - insert_template = insert_query.prepare.first - statement_setter = self.dialect.get_statement_setter - self.jdbc_connection.load_table insert_template statement_setter table batch_size - - db_table + drop_table : Text -> Nothing + drop_table self table_name = + self.execute_update (self.dialect.generate_sql (Query.Drop_Table table_name)) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso index 3d7f80b3bd10..7f278986aa4a 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso @@ -6,6 +6,7 @@ import Standard.Table.Internal.Naming_Helpers.Naming_Helpers import Standard.Table.Internal.Problem_Builder.Problem_Builder import project.Connection.Connection.Connection +import project.Data.SQL.Builder import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table.Table diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso index 68095b259cb8..e39dc95c35fb 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso @@ -134,3 +134,31 @@ type Table_Not_Found to_display_text self = case self.treated_as_query of True -> "The name " + self.name + " was treated as a query, but the query failed with the following error: " + self.related_query_error.to_display_text + "; if you want to force to use that as a table name, wrap it in `SQL_Query.Table_Name`." False -> "Table " + self.name + " was not found in the database." + +type Table_Already_Exists + ## PRIVATE + Indicates that a table already exists in the database. + + Arguments: + - table_name: The name of the table that already exists. + Error table_name:Text + + ## PRIVATE + Pretty print the table already exists error. + to_display_text : Text + to_display_text self = "Table " + self.table_name.pretty + " already exists in the database." + +type Non_Unique_Primary_Key + ## PRIVATE + Indicates that the columns selected for the primary key do not uniquely + identify rows in the table. + + Arguments: + - primary_key: The primary key that is not unique. + Error (primary_key : Vector Text) + + ## PRIVATE + Pretty print the non-unique primary key error. + to_display_text : Text + to_display_text self = + "The primary key " + self.primary_key.to_display_text + " is not unique." diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso new file mode 100644 index 000000000000..cb200039c909 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso @@ -0,0 +1,120 @@ +from Standard.Base import all +from Standard.Base.Random import random_uuid +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import Standard.Table.Data.Table.Table as In_Memory_Table +from Standard.Table.Errors import all +from Standard.Table import Aggregate_Column + +import project.Connection.Connection.Connection +import project.Data.SQL_Query.SQL_Query +import project.Data.Table.Table as Database_Table +import project.Internal.IR.Query.Query +import project.Internal.IR.SQL_Expression.SQL_Expression +from project.Errors import all + +## Creates a new database table from this in-memory table. + + Arguments: + - connection: the database connection to use. The table will be created in + the database and schema associated with this connection. + - table_name: the name of the table to create. If not provided, a random name + will be generated for temporary tables. If `temporary=False`, then a name + must be provided. + - primary_key: the names of the columns to use as the primary key. The first + column from the table is used by default. If it is set to `Nothing` or an + empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that it + will be dropped once the `connection` is closed. Defaults to `False`. + - structure_only: if set to `True`, the created table will inherit the + structure (column names and types) of the source table, but no rows will be + inserted. Defaults to `False`. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar supported + type, an `Inexact_Type_Coercion` problem is reported according to the + `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in the source table, + `Missing_Input_Columns` error is raised. + - If the selected primary key columns are not unique, a + `Non_Unique_Primary_Key` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + + If an error has been raised, the table is not created (that may not always + apply to `SQL_Error`). +In_Memory_Table.create_database_table : Connection -> Text|Nothing -> (Vector Text) | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Primary_Key | SQL_Error | Illegal_Argument +In_Memory_Table.create_database_table self connection table_name=Nothing primary_key=[self.columns.first.name] temporary=False structure_only=False on_problems=Problem_Behavior.Report_Warning = Panic.recover SQL_Error <| + resolved_primary_key = resolve_primary_key self primary_key + checked_primary_key = case resolved_primary_key of + Nothing -> resolved_primary_key + _ -> check_primary_key_is_unique self resolved_primary_key . if_not_error <| + resolved_primary_key + + effective_table_name = resolve_effective_table_name table_name temporary + + type_mapping = connection.dialect.get_type_mapping + column_descriptors = self.columns.map column-> + name = column.name + value_type = column.value_type + sql_type = type_mapping.value_type_to_sql value_type on_problems + sql_type_text = type_mapping.sql_type_to_text sql_type + Pair.new name sql_type_text + create_statement = connection.dialect.generate_sql <| + Query.Create_Table effective_table_name column_descriptors checked_primary_key temporary + + upload_status = create_statement.if_not_error <| connection.jdbc_connection.run_within_transaction <| + Panic.rethrow <| connection.execute_update create_statement + if structure_only.not then + column_names = column_descriptors.map .first + insert_template = make_batched_insert_template connection effective_table_name column_names + statement_setter = connection.dialect.get_statement_setter + connection.jdbc_connection.batch_insert insert_template statement_setter self default_batch_size + + upload_status.if_not_error <| + connection.query (SQL_Query.Table_Name effective_table_name) + +## PRIVATE + Ensures that provided primary key columns are present in the table and that + there are no duplicates. +resolve_primary_key table primary_key = case primary_key of + Nothing -> Nothing + _ : Vector -> if primary_key.is_empty then Nothing else + table.select_columns primary_key reorder=True . column_names + +## PRIVATE + Checks if the specified primary key uniquely identifies all rows in the table. + + If the key is not unique, it will raise a `Non_Unique_Primary_Key` error. +check_primary_key_is_unique table primary_key = + deduplicated = table.distinct primary_key + if deduplicated.row_count != table.row_count then + Error.throw (Non_Unique_Primary_Key.Error primary_key) + +## PRIVATE + Generates a random table name if it was nothing, if it is allowed (temporary=True). +resolve_effective_table_name table_name temporary = case table_name of + Nothing -> if temporary then "temporary-table-"+random_uuid else + Error.throw (Illegal_Argument.Error "A name must be provided when creating a non-temporary table.") + _ : Text -> table_name + +## PRIVATE + The recommended batch size seems to be between 50 and 100. + See: https://docs.oracle.com/cd/E18283_01/java.112/e16548/oraperf.htm#:~:text=batch%20sizes%20in%20the%20general%20range%20of%2050%20to%20100 +default_batch_size = 100 + +## PRIVATE +make_batched_insert_template : Connection -> Text -> Vector (Vector Text) -> SQL_Query +make_batched_insert_template connection table_name column_names = + # We add Nothing as placeholders, they will be replaced with the actual values later. + pairs = column_names.map name->[name, SQL_Expression.Constant Nothing] + query = connection.dialect.generate_sql <| Query.Insert table_name pairs + template = query.prepare.first + template diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index 0ed276359ec3..73f275443d72 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -415,6 +415,10 @@ generate_query dialect query = case query of Builder.code "SELECT " ++ prefix ++ cols ++ generate_select_context dialect ctx Query.Insert table_name pairs -> generate_insert_query dialect table_name pairs + Query.Create_Table name columns primary_key temporary -> + generate_create_table dialect name columns primary_key temporary + Query.Drop_Table name -> + Builder.code "DROP TABLE " ++ dialect.wrap_identifier name _ -> Error.throw <| Unsupported_Database_Operation.Error "Unsupported query type: "+query.to_text ## PRIVATE @@ -462,3 +466,17 @@ make_concat make_raw_concat_expr make_contains_expr has_quote args = transformed_expr = Builder.code "CASE WHEN " ++ expr ++ " IS NULL THEN '' ELSE " ++ possibly_quoted.paren ++ " END" concatenated = make_raw_concat_expr transformed_expr separator prefix.paren ++ append ++ concatenated ++ append ++ suffix.paren + +## PRIVATE + Generates the SQL code corresponding to a CREATE TABLE query. +generate_create_table dialect name columns primary_key temporary = + column_definitions = columns.map descriptor-> + name = descriptor.first + sql_type_as_text = descriptor.second + dialect.wrap_identifier name ++ " " ++ sql_type_as_text + modifiers = if primary_key.is_nothing then [] else + [Builder.code ", PRIMARY KEY (" ++ Builder.join ", " (primary_key.map dialect.wrap_identifier) ++ ")"] + table_type = if temporary then "TEMPORARY TABLE" else "TABLE" + create_prefix = Builder.code ("CREATE "+table_type+" ") ++ dialect.wrap_identifier name + create_body = (Builder.join ", " column_definitions) ++ (Builder.join "" modifiers) + create_prefix ++ " (" ++ create_body ++ ")" diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso index a1f5a14b665d..3529b0a4cdd4 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso @@ -28,3 +28,24 @@ type Query - table_name: The name of the table to insert to. - pairs: A list of pairs consisting of a column name and and expression. Insert table_name pairs + + ## PRIVATE + + An SQL query that creates a new table. + + Arguments: + - table_name: the name of the table. + - columns: descriptions of table columns. Each column is described by a + pair of its name and the text representation of its SQL type. + - primary_key: a vector of names of primary key columns or `Nothing` if + no primary key should be set. The column names are not checked, it is + the responsibility of the caller to ensure that the columns in + `primary_key` actually exist in `columns`, as otherwise the behavior is + undefined (most likely will result in an `SQL_Error` once executed). + - temporary: specifies if the table should be marked as temporary. + Create_Table (table_name:Text) (columns : Vector (Pair Text Text)) (primary_key : Vector Text) (temporary : Boolean) + + ## PRIVATE + + An SQL query that drops a table. + Drop_Table (table_name:Text) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso index ed8226546ca3..fec77e9af8df 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso @@ -7,11 +7,9 @@ import Standard.Base.Runtime.Managed_Resource.Managed_Resource import Standard.Table.Data.Table.Table as Materialized_Table import Standard.Table.Data.Type.Value_Type.Value_Type -import project.Data.SQL.Builder import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table.Table as Database_Table -import project.Internal.Base_Generator import project.Internal.Statement_Setter.Statement_Setter from project.Errors import SQL_Error, SQL_Timeout @@ -120,31 +118,63 @@ type JDBC_Connection Error.throw <| Illegal_Argument.Error 'The provided raw SQL query should not contain any holes ("?").' ## PRIVATE - - Given an insert query template and the associated Database_Table, and a - Materialized_Table of data, load to the database. - load_table : Text -> Statement_Setter -> Materialized_Table -> Integer -> Nothing - load_table self insert_template statement_setter table batch_size = + Runs the following action with auto-commit turned off on this connection + and returns the result. + + Afterwards, the auto-commit setting is reverted to the state before + running this function (so if it was off before, this method may not + change anything). + run_without_autocommit : Any -> Any + run_without_autocommit self ~action = self.with_connection java_connection-> default_autocommit = java_connection.getAutoCommit - java_connection.setAutoCommit False - Managed_Resource.bracket Nothing (_ -> java_connection.setAutoCommit default_autocommit) _-> - Managed_Resource.bracket (java_connection.prepareStatement insert_template) .close stmt-> - num_rows = table.row_count - columns = table.columns - check_rows updates_array expected_size = - updates = Vector.from_polyglot_array updates_array - if updates.length != expected_size then Panic.throw <| Illegal_State.Error "The batch update unexpectedly affected "+updates.length.to_text+" rows instead of "+expected_size.to_text+"." else - updates.each affected_rows-> - if affected_rows != 1 then - Panic.throw <| Illegal_State.Error "A single update within the batch unexpectedly affected "+affected_rows.to_text+" rows." - 0.up_to num_rows . each row_id-> - values = columns.map col-> col.at row_id - set_statement_values stmt statement_setter values - stmt.addBatch - if (row_id+1 % batch_size) == 0 then check_rows stmt.executeBatch batch_size - if (num_rows % batch_size) != 0 then check_rows stmt.executeBatch (num_rows % batch_size) + Managed_Resource.bracket (java_connection.setAutoCommit False) (_ -> java_connection.setAutoCommit default_autocommit) _-> + action + + ## PRIVATE + Performs the given action within a transaction. + Once the action is completed, the transaction is committed. + If a panic escapes from the action, the transaction is rolled-back and + closed. + If the rollback fails and panics, the panic related to the rollback will + take precedence over the original panic that caused that rollback. + run_within_transaction : Any -> Any + run_within_transaction self ~action = + self.run_without_autocommit <| + self.with_connection java_connection-> + handle_panic caught_panic = + java_connection.rollback + Panic.throw caught_panic + result = Panic.catch Any handler=handle_panic <| + action java_connection.commit + result + + ## PRIVATE + Insert rows from an in-memory table using a prepared query template in + batches for efficiency. + + It is the caller's responsibility to call this method from within a + transaction to ensure consistency. + batch_insert : Text -> Statement_Setter -> Materialized_Table -> Integer -> Nothing + batch_insert self insert_template statement_setter table batch_size = + self.with_connection java_connection-> + Managed_Resource.bracket (java_connection.prepareStatement insert_template) .close stmt-> + num_rows = table.row_count + columns = table.columns + check_rows updates_array expected_size = + updates = Vector.from_polyglot_array updates_array + if updates.length != expected_size then Panic.throw <| Illegal_State.Error "The batch update unexpectedly affected "+updates.length.to_text+" rows instead of "+expected_size.to_text+"." else + updates.each affected_rows-> + if affected_rows != 1 then + Panic.throw <| Illegal_State.Error "A single update within the batch unexpectedly affected "+affected_rows.to_text+" rows." + 0.up_to num_rows . each row_id-> + values = columns.map col-> col.at row_id + set_statement_values stmt statement_setter values + stmt.addBatch + if (row_id+1 % batch_size) == 0 then check_rows stmt.executeBatch batch_size + if (num_rows % batch_size) != 0 then check_rows stmt.executeBatch (num_rows % batch_size) + java_connection.commit ## PRIVATE @@ -197,14 +227,3 @@ handle_sql_errors ~action related_query=Nothing = set_statement_values stmt statement_setter values = values.each_with_index ix-> value-> statement_setter.fill_hole stmt (ix + 1) value - -## PRIVATE - Given a Materialized_Table, create a SQL statement to build the table. -create_table_statement : (Value_Type -> SQL_Type) -> Text -> Materialized_Table -> Boolean -> SQL_Statement -create_table_statement type_mapper name table temporary = - column_types = table.columns.map col-> type_mapper col.value_type - column_names = table.columns.map .name - col_makers = column_names.zip column_types name-> typ-> - Base_Generator.wrap_in_quotes name ++ " " ++ typ.name - create_prefix = Builder.code <| if temporary then "CREATE TEMPORARY TABLE " else "CREATE TABLE " - (create_prefix ++ Base_Generator.wrap_in_quotes name ++ " (" ++ (Builder.join ", " col_makers) ++ ")").build diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index 95bac49793f9..fdd2cf4fe84a 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -127,37 +127,20 @@ type Postgres_Connection execute_update self query = self.connection.execute_update query - ## PRIVATE - UNSTABLE - This is a prototype function used in our test suites. It may change. - - It creates a new table in the database with the given name (will fail if - the table already existed), inserts the contents of the provided - in-memory table and returns a handle to the newly created table. - - ! Temporary Tables - - Note that temporary tables may not be visible in the table catalog, so - some features which rely on it like the `Table.query` shorthand mode - may not work correctly with temporary tables. - - Arguments: - - name: The name of the table to create. - - table: An In-Memory table specifying the contents to upload. Schema of - the created database table is based on the column types of this table. - - temporary: Specifies whether the table should be marked as temporary. A - temporary table will be dropped after the connection closes and will - usually not be visible to other connections. - - batch_size: Specifies how many rows should be uploaded in a single - batch. - upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State <| - self.connection.upload_table name table temporary batch_size ## PRIVATE Access the dialect. dialect self = self.connection.dialect + ## PRIVATE + Access the underlying JDBC connection. + jdbc_connection self = self.connection.jdbc_connection + + ## PRIVATE + drop_table : Text -> Nothing + drop_table self table_name = + self.connection.drop_table table_name + ## PRIVATE Creates a Postgres connection based on a URL, properties and a dialect. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 0d7ce471dbe1..3d6f5fcc0c17 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -199,6 +199,7 @@ type Postgres_Dialect is_supported self operation = self.internal_generator_dialect.is_supported operation + ## PRIVATE make_internal_generator_dialect = cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso index 72a91eebf333..c673f7e61cbc 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso @@ -6,6 +6,7 @@ from Standard.Table import Value_Type import project.Connection.Connection.Connection import project.Data.Dialect +import project.Data.SQL.Builder import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table.Table @@ -139,3 +140,9 @@ type Redshift_Dialect check_aggregate_support self aggregate = _ = aggregate True + + ## PRIVATE + Checks if an operation is supported by the dialect. + is_supported : Text -> Boolean + is_supported self operation = + self.internal_generator_dialect.is_supported operation diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index 8e7ba1de5592..8f1c796e8603 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -121,35 +121,17 @@ type SQLite_Connection self.connection.execute_update query ## PRIVATE - UNSTABLE - This is a prototype function used in our test suites. It may change. - - It creates a new table in the database with the given name (will fail if - the table already existed), inserts the contents of the provided - in-memory table and returns a handle to the newly created table. - - ! Temporary Tables - - Note that temporary tables may not be visible in the table catalog, so - some features which rely on it like the `Table.query` shorthand mode - may not work correctly with temporary tables. + Access the dialect. + dialect self = self.connection.dialect - Arguments: - - name: The name of the table to create. - - table: An In-Memory table specifying the contents to upload. Schema of - the created database table is based on the column types of this table. - - temporary: Specifies whether the table should be marked as temporary. A - temporary table will be dropped after the connection closes and will - usually not be visible to other connections. - - batch_size: Specifies how many rows should be uploaded in a single - batch. - upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State <| - self.connection.upload_table name table temporary batch_size + ## PRIVATE + Access the underlying JDBC connection. + jdbc_connection self = self.connection.jdbc_connection ## PRIVATE - Access the dialect. - dialect self = self.connection.dialect + drop_table : Text -> Nothing + drop_table self table_name = + self.connection.drop_table table_name ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 7bf212c313c7..a0e00b421b4d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -9,6 +9,7 @@ from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all from Standard.Table import Value_Type import project.Connection.Connection.Connection +import project.Data.Dialect import project.Data.SQL.Builder import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type @@ -207,7 +208,6 @@ type SQLite_Dialect self.internal_generator_dialect.is_supported operation - ## PRIVATE make_internal_generator_dialect = text = [starts_with, contains, ends_with, make_case_sensitive]+concat_ops+trim_ops diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso index e9817a802774..81a1990e23ae 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso @@ -59,9 +59,9 @@ type SQLite_Type_Mapping Value_Type.Float _ -> SQLite_Types.real Value_Type.Decimal _ _ -> SQLite_Types.numeric Value_Type.Char _ _ -> SQLite_Types.text - Value_Type.Time -> SQLite_Types.blob - Value_Type.Date -> SQLite_Types.blob - Value_Type.Date_Time _ -> SQLite_Types.blob + Value_Type.Time -> SQLite_Types.text + Value_Type.Date -> SQLite_Types.text + Value_Type.Date_Time _ -> SQLite_Types.text Value_Type.Binary _ _ -> SQLite_Types.blob Value_Type.Mixed -> SQLite_Types.text Value_Type.Unsupported_Data_Type type_name underlying_type -> @@ -78,7 +78,9 @@ type SQLite_Type_Mapping sql_type_to_value_type sql_type = on_not_found = Value_Type.Unsupported_Data_Type sql_type.name sql_type - simple_types_map.get sql_type.typeid on_not_found + do_simple_mapping = + simple_types_map.get sql_type.typeid if_missing=on_not_found + name_based_workarounds.get sql_type.name if_missing=do_simple_mapping ## PRIVATE sql_type_to_text : SQL_Type -> Text @@ -124,6 +126,14 @@ simple_types_map = Map.from_vector <| special_types = [[Types.BOOLEAN, Value_Type.Boolean]] ints + floats + numerics + strings + blobs + special_types +## PRIVATE + The SQLite JDBC mapping relies on slightly modified version of the rules from + https://www.sqlite.org/datatype3.html#affinity_name_examples + However, with this the date-time columns will be mapped to the numeric type. + Instead, we want to treat such columns as Text, so we override the mapping. +name_based_workarounds = Map.from_vector <| + ["TIME", "DATE", "DATETIME", "TIMESTAMP"] . map x-> [x, default_text] + ## PRIVATE Maps operation names to functions that infer its result type. operations_map : Map Text (Vector -> SQL_Type) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index 48942374eb74..01e24099b7ef 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -10,6 +10,7 @@ import project.Connection.SQLite_Options.SQLite_Options import project.Connection.SQLite_Options.In_Memory import project.Connection.SSL_Mode.SSL_Mode import project.Data.SQL_Query.SQL_Query +import project.Extensions.Upload_Table from project.Connection.Postgres_Options.Postgres_Options import Postgres from project.Connection.Redshift_Options.Redshift_Options import Redshift @@ -27,6 +28,7 @@ export project.Connection.SQLite_Options.SQLite_Options export project.Connection.SQLite_Options.In_Memory export project.Connection.SSL_Mode.SSL_Mode export project.Data.SQL_Query.SQL_Query +export project.Extensions.Upload_Table from project.Connection.Postgres_Options.Postgres_Options export Postgres from project.Connection.Redshift_Options.Redshift_Options export Redshift diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 1ef96094f410..d45c83be88f9 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -17,7 +17,7 @@ main = run_default_backend spec spec setup = prefix = setup.prefix table_builder = setup.table_builder - db_todo = if prefix.contains "In-Memory" then Nothing else "Table.transpose is not implemented yet in Database." + db_todo = if setup.is_database.not then Nothing else "Table.transpose is not implemented yet in Database." Test.group prefix+"Table.cross_tab" pending=db_todo <| table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso index dbf6efdd8bc3..cae4242ae4d5 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Date_Time_Spec.enso @@ -133,7 +133,7 @@ spec setup = if setup.test_selection.date_time.not then Test.group prefix+"partial Date-Time support" <| - Test.specify "will fail to upload a Table containing Dates" <| + Test.specify "will warn when uploading a Table containing Dates" <| d = Date.new 2020 10 24 table = table_builder [["A", [d]], ["X", [123]]] - table.should_fail_with Inexact_Type_Coercion + Problems.expect_warning Inexact_Type_Coercion table diff --git a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso index 5f4047a68355..56626453794a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso @@ -102,11 +102,12 @@ spec detailed setup = Test.specify "should sanitize names" <| t = table_builder [["X", ['\0', 'x', '']]] c = t.compute '[X] == \'\0\'' . catch SQL_Error - c.name . should_equal "[X] == '\0'" - case c.to_vector . catch SQL_Error of - # We ignore the SQL error upon materialization - some backends just do not support `\0` in query parameters. This is not the main point of this test. + # We ignore the SQL error - some backends just do not support `\0` values. This is not the main point of this test. + case c of _ : SQL_Error -> Nothing - vec -> vec.should_equal [True, False, False] + _ -> + c.name.should_equal "[X] == '\0'" + c.to_vector.should_equal [True, False, False] Test.group prefix+"Expression Nothing literals" <| specify_test "should be able to add an nothing column" expression_test-> diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index b94f20b6418b..ba11a64b515e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -18,7 +18,7 @@ spec setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - db_todo = if prefix.contains "In-Memory" then Nothing else "Table.cross_join is still WIP for the DB backend." + db_todo = if setup.is_database.not then Nothing else "Table.cross_join is still WIP for the DB backend." Test.group prefix+"Table.cross_join" pending=db_todo <| Test.specify "should allow to create a cross product of two tables in the right order" <| t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] @@ -150,4 +150,3 @@ spec setup = r5 = [100, 4, 'a', 'x'] expected_rows = [r0, r1, r2, r3, r4, r5] r.should_equal expected_rows - diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 429f423c5038..20f5ae11ece4 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -30,7 +30,7 @@ spec setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - db_todo = if prefix.contains "In-Memory" then Nothing else "TODO: handling NULLs in equality conditions." + db_todo = if setup.is_database.not then Nothing else "TODO: handling NULLs in equality conditions." Test.group prefix+"Table.join" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index c800cf6911de..b186cbe73a9f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -19,7 +19,7 @@ spec setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - db_todo = if prefix.contains "In-Memory" then Nothing else "Table.zip is still WIP for the DB backend." + db_todo = if setup.is_database.not then Nothing else "Table.zip is still WIP for the DB backend." Test.group prefix+"Table.zip" pending=db_todo <| if setup.is_database.not then Test.specify "should allow to zip two tables, preserving memory layout order" <| diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index cd7b8c2e2ebd..4a3d340b6739 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -12,7 +12,7 @@ main = run_default_backend spec spec setup = prefix = setup.prefix table_builder = setup.table_builder - db_todo = if prefix.contains "In-Memory" then Nothing else "Table.transpose is not implemented yet in Database." + db_todo = if setup.is_database.not then Nothing else "Table.transpose is not implemented yet in Database." Test.group prefix+"Table.transpose" pending=db_todo <| Test.specify "should transpose all columns by default" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] diff --git a/test/Table_Tests/src/Database/Common_Spec.enso b/test/Table_Tests/src/Database/Common_Spec.enso index 7fd67a017e28..ba785af9822d 100644 --- a/test/Table_Tests/src/Database/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common_Spec.enso @@ -18,12 +18,21 @@ spec prefix connection = tables_to_clean = Vector.new_builder upload prefix data temporary=True = name = Name_Generator.random_name prefix - table = connection.upload_table name data temporary=temporary - tables_to_clean.append name + table = data.create_database_table connection name temporary=temporary primary_key=Nothing + tables_to_clean.append table.name table + clean_tables = + tables_to_clean.to_vector.each name-> + Panic.catch Any (connection.drop_table name) caught_panic-> + IO.println "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text + Panic.with_finalizer clean_tables <| + run_tests prefix connection upload + +run_tests prefix connection upload = ## We have to upload the table as non-temporary, because otherwise it will not be visible in the list of tables and make `Table.query` confused. + TODO [RW] remove temporary=False once #6398 is done. t1 = upload "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) temporary=False Test.group prefix+"Basic Table Access" <| Test.specify "should allow to materialize tables and columns into local memory" <| @@ -266,10 +275,5 @@ spec prefix connection = Test.specify "report error when trying to filter by a custom predicate" <| t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation - clean_table name = Panic.recover Any <| - sql = 'DROP TABLE "' + name + '"' - Panic.rethrow <| connection.execute_update sql - tables_to_clean.to_vector.each clean_table - main = Test_Suite.run_main <| spec "[SQLite] " (Database.connect (SQLite In_Memory)) diff --git a/test/Table_Tests/src/Database/Helpers/Name_Generator.enso b/test/Table_Tests/src/Database/Helpers/Name_Generator.enso index 31c295f5303d..5d3974f40d6f 100644 --- a/test/Table_Tests/src/Database/Helpers/Name_Generator.enso +++ b/test/Table_Tests/src/Database/Helpers/Name_Generator.enso @@ -1,11 +1,5 @@ from Standard.Base import all - -polyglot java import java.util.UUID - -## Generates a text representation of a randomly generated UUID. -random_uuid : Text -random_uuid = - UUID.randomUUID.to_text +from Standard.Base.Random import random_uuid ## Generates a random name with a given prefix, ensuring relative uniqueness by appending a random UUID. diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index dd5ab249609d..c0c8ff9ab60e 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -6,17 +6,17 @@ import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import Table, Value_Type from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First -from Standard.Database import Database, Postgres, SQL_Query, Credentials, SSL_Mode -from Standard.Database.Errors import SQL_Error import Standard.Database.Data.SQL_Type.SQL_Type - import Standard.Database.Internal.Postgres.Pgpass +from Standard.Database import all +from Standard.Database.Errors import SQL_Error from Standard.Test import Test, Test_Suite import Standard.Test.Extensions import Standard.Test.Test_Environment import project.Database.Common_Spec +import project.Database.Upload_Spec import project.Database.Helpers.Name_Generator import project.Database.Types.Postgres_Type_Mapping_Spec import project.Common_Table_Operations @@ -192,14 +192,7 @@ run_tests connection db_name = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - case connection.upload_table name in_mem_table of - table -> - tables.append name - table - clean_tables table_names = - table_names.each name-> - sql = 'DROP TABLE "' + name + '"' - Panic.rethrow <| connection.execute_update sql + in_mem_table.create_database_table connection name primary_key=Nothing temporary=True materialize = .read Common_Spec.spec prefix connection @@ -207,17 +200,15 @@ run_tests connection db_name = common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False fixed_length_text_columns=True aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = connection.upload_table (Name_Generator.random_name "Agg1") agg_in_memory_table + agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True tables.append agg_table.name - empty_agg_table = connection.upload_table (Name_Generator.random_name "Agg_Empty") (agg_in_memory_table.take (First 0)) + empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True tables.append empty_agg_table.name setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection postgres_specific_spec connection db_name setup Common_Table_Operations.Main.spec setup - clean_tables tables.to_vector - table_spec = db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME" db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':' @@ -254,10 +245,18 @@ table_spec = ca_fail.is_error . should_equal True ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True - test_with_connection run_tests - test_with_connection Postgres_Type_Mapping_Spec.spec + case create_connection_builder of + Nothing -> + message = "PostgreSQL test database is not configured. See README.md for instructions." + Test.group "[PostgreSQL] Database tests" pending=message Nothing + connection_builder -> + connection = connection_builder Nothing + run_tests connection db_name + Postgres_Type_Mapping_Spec.spec connection + + Upload_Spec.spec connection_builder "[PostgreSQL] " -test_with_connection spec = +create_connection_builder = db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME" db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':' db_host = db_host_port.at 0 @@ -266,13 +265,10 @@ test_with_connection spec = db_password = Environment.get "ENSO_DATABASE_TEST_DB_PASSWORD" case db_name.is_nothing of - True -> - message = "PostgreSQL test database is not configured. See README.md for instructions." - Test.group "[PostgreSQL] Database tests" pending=message Nothing + True -> Nothing False -> - connection = Panic.rethrow <| + _ -> Panic.rethrow <| Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) - spec connection db_name pgpass_file = enso_project.data / "pgpass.conf" diff --git a/test/Table_Tests/src/Database/Redshift_Spec.enso b/test/Table_Tests/src/Database/Redshift_Spec.enso index 2d4174c49080..bf1ada2d771d 100644 --- a/test/Table_Tests/src/Database/Redshift_Spec.enso +++ b/test/Table_Tests/src/Database/Redshift_Spec.enso @@ -43,14 +43,7 @@ run_tests connection = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - case connection.upload_table name in_mem_table of - table -> - tables.append name - table - clean_tables table_names = - table_names.each name-> - sql = 'DROP TABLE "' + name + '"' - Panic.rethrow <| connection.execute_update sql + in_mem_table.create_database_table connection name primary_key=Nothing temporary=True materialize = .read Common_Spec.spec prefix connection @@ -59,16 +52,14 @@ run_tests connection = common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = connection.upload_table (Name_Generator.random_name "Agg1") agg_in_memory_table + agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True tables.append agg_table.name - empty_agg_table = connection.upload_table (Name_Generator.random_name "Agg_Empty") (agg_in_memory_table.take (First 0)) + empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True tables.append empty_agg_table.name setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection Common_Table_Operations.Main.spec setup - clean_tables tables.to_vector - connect_via_json_config = credentials = enso_project.data / 'redshift_credentials.json' msg = "Redshift connection is not set up. Please create a JSON file containing the credentials in `data/redshift_credentials.json`" diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index fae2e551beb8..0d7634759829 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -4,14 +4,15 @@ import Standard.Base.Runtime.Ref.Ref import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import Table, Value_Type -from Standard.Database import Database, SQLite, In_Memory, SQL_Query import Standard.Database.Connection.SQLite_Format.SQLite_Format +from Standard.Database import all from Standard.Database.Errors import SQL_Error from Standard.Test import Test, Test_Suite import Standard.Test.Extensions import project.Database.Common_Spec +import project.Database.Upload_Spec import project.Database.Types.SQLite_Type_Mapping_Spec import project.Database.Helpers.Name_Generator import project.Common_Table_Operations @@ -114,7 +115,7 @@ sqlite_spec connection prefix = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - connection.upload_table name in_mem_table + in_mem_table.create_database_table connection name primary_key=Nothing materialize = .read Common_Spec.spec prefix connection @@ -132,8 +133,8 @@ sqlite_spec connection prefix = the missing statistics. aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config advanced_stats=False text_shortest_longest=False first_last=False first_last_row_order=False multi_distinct=False aggregation_problems=False nan=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = connection.upload_table (Name_Generator.random_name "Agg1") agg_in_memory_table - empty_agg_table = connection.upload_table (Name_Generator.random_name "Agg_Empty") (agg_in_memory_table.take (First 0)) + agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection Common_Table_Operations.Main.spec setup @@ -144,10 +145,14 @@ spec = enso_project.data.create_directory file = enso_project.data / "sqlite_test.db" file.delete_if_exists - sqlite_spec (Database.connect (SQLite file)) "[SQLite] " + in_file_prefix = "[SQLite File] " + sqlite_spec (Database.connect (SQLite file)) in_file_prefix + Upload_Spec.spec (_ -> Database.connect (SQLite file)) in_file_prefix file.delete - sqlite_spec (Database.connect (SQLite In_Memory)) "[SQLite Memory] " + in_memory_prefix = "[SQLite In-Memory] " + sqlite_spec (Database.connect (SQLite In_Memory)) in_memory_prefix + Upload_Spec.spec (_ -> Database.connect (SQLite In_Memory)) in_memory_prefix persistent_connector=False SQLite_Type_Mapping_Spec.spec diff --git a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso index b5e9c8890d36..e8a99c03d585 100644 --- a/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/Postgres_Type_Mapping_Spec.enso @@ -10,11 +10,9 @@ from Standard.Test import Problems, Test, Test_Suite import Standard.Test.Extensions import project.Database.Helpers.Name_Generator -from project.Database.Postgres_Spec import test_with_connection - -spec connection db_name = - _ = db_name +from project.Database.Postgres_Spec import create_connection_builder +spec connection = make_table prefix columns = name = Name_Generator.random_name prefix column_exprs = columns.map col_def-> @@ -130,7 +128,9 @@ spec connection db_name = t2.at "b" . value_type . should_equal (Value_Type.Integer Bits.Bits_16) Problems.expect_warning Inexact_Type_Coercion t2 -main = Test_Suite.run_main (test_with_connection spec) +main = Test_Suite.run_main <| + connection = create_connection_builder Nothing + spec connection max_int4 = 2147483647 default_text = Value_Type.Char size=Nothing variable_length=True diff --git a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso index 6c1b4cfe23e9..daaf503fc42b 100644 --- a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso @@ -1,10 +1,11 @@ from Standard.Base import all import Standard.Table.Data.Type.Value_Type.Bits -from Standard.Table import Aggregate_Column, Value_Type +from Standard.Table import Aggregate_Column, Value_Type, Table from Standard.Table.Errors import Invalid_Value_Type, Inexact_Type_Coercion import Standard.Database.Data.Dialect +import Standard.Database.Extensions.Upload_Table import Standard.Database.Internal.SQLite.SQLite_Type_Mapping from Standard.Database import Database, SQLite, In_Memory, SQL_Query @@ -82,7 +83,7 @@ spec = # t2.at "First c" . value_type . should_equal Value_Type.Boolean Test.specify "should approximate types to the closest supported one" <| - t = make_table "T" [["a", "BINARY"], ["b", "INT"]] + t = make_table "approx-table" [["a", "BINARY"], ["b", "INT"]] t1 = t.cast "a" (Value_Type.Binary 10 variable_length=True) t1.at "a" . value_type . should_equal Value_Type.Binary @@ -92,6 +93,19 @@ spec = t2.at "b" . value_type . should_equal Value_Type.Integer Problems.expect_warning Inexact_Type_Coercion t2 + Test.specify "will coerce date/time columns to Text" <| + t = make_table "datetime-table" [["a", "DATE"], ["b", "TIME"], ["c", "DATETIME"], ["d", "TIMESTAMP"]] + t.at "a" . value_type . should_equal Value_Type.Char + t.at "b" . value_type . should_equal Value_Type.Char + t.at "c" . value_type . should_equal Value_Type.Char + t.at "d" . value_type . should_equal Value_Type.Char + + t1 = Table.new [["a", [Date.now]], ["b", [Time_Of_Day.now]], ["c", [Date_Time.now]]] + t2 = t1.create_database_table connection temporary=True + t2.at "a" . value_type . should_equal Value_Type.Char + t2.at "b" . value_type . should_equal Value_Type.Char + t2.at "c" . value_type . should_equal Value_Type.Char + Test.specify "should be able to infer types for all supported operations" <| dialect = Dialect.sqlite internal_mapping = dialect.internal_generator_dialect.operation_map diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso new file mode 100644 index 000000000000..4c323d4fc210 --- /dev/null +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -0,0 +1,136 @@ +from Standard.Base import all + +from Standard.Table import Table, Column, Value_Type, Column_Selector +from Standard.Table.Errors import Missing_Input_Columns + +from Standard.Database import all +from Standard.Database.Errors import all +from Standard.Database.Internal.Result_Set import result_set_to_table + +from Standard.Test import Test, Test_Suite, Problems +import Standard.Test.Extensions + +import project.Database.Helpers.Name_Generator + +polyglot java import org.enso.table_test_helpers.ExplodingStorage +polyglot java import org.enso.table_test_helpers.ExplodingStoragePayload + +main = Test_Suite.run_main <| + spec (_ -> Database.connect (SQLite In_Memory)) "[SQLite] " persistent_connector=False + +## PRIVATE + Tests uploading tables. + + Arguments: + - make_new_connection: a function that takes `Nothing` and returns a new + connection. + - prefix: a string that will be prepended to the test names. + - persistent_connector: specifies if the database is persisted between + connections. Should be `True` for all databases except SQLite in the + `In_Memory` mode in which every re-connect creates a separate in-memory + database, so features relying on persistence cannot really be tested. +spec make_new_connection prefix persistent_connector=True = + connection = make_new_connection Nothing + Test.group prefix+"Uploading an in-memory Table" <| + in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + Test.specify "should include the created table in the tables directory" <| + db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "permanent_table_1") temporary=False + Panic.with_finalizer (connection.drop_table db_table.name) <| + db_table.at "X" . to_vector . should_equal [1, 2, 3] + + connection.tables.at "Name" . to_vector . should_contain db_table.name + connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] + + postgres_pending = if prefix.contains "Postgre" then "TODO: See issue https://github.com/enso-org/enso/issues/6398" + Test.specify "should include the temporary table in the tables directory" pending=postgres_pending <| + db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "temporary_table_1") temporary=True + db_table.at "X" . to_vector . should_equal [1, 2, 3] + connection.tables.at "Name" . to_vector . should_contain db_table.name + connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] + + if persistent_connector then + Test.specify "should drop the temporary table after the connection is closed" <| + tmp_connection = make_new_connection Nothing + db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "temporary_table_2") temporary=True + name = db_table.name + tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] + tmp_connection.close + connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + + Test.specify "should preserve the regular table after the connection is closed" <| + tmp_connection = make_new_connection Nothing + db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "permanent_table_1") temporary=False + name = db_table.name + Panic.with_finalizer (connection.drop_table name) <| + tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] + tmp_connection.close + connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] + + Test.specify "should rollback the table creation if create_database_table fails" <| + normal_column = Column.from_vector "Y" ((100+0).up_to (100+1000)).to_vector + exploding_column = make_mock_column "X" (0.up_to 1000).to_vector 512 + exploding_table = Table.new [normal_column, exploding_column] + name = Name_Generator.random_name "rolling-back-table" + connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + Test.expect_panic_with matcher=ExplodingStoragePayload <| + exploding_table.create_database_table connection name temporary=False primary_key=Nothing + connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + + Test.specify "should set a primary key for the table" <| + t1 = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] + db_table_1 = t1.create_database_table connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"] + Panic.with_finalizer (connection.drop_table db_table_1.name) <| + db_table_1.at "X" . to_vector . should_equal [1, 2, 3] + get_primary_key connection db_table_1.name . should_equal ["Y", "X"] + + db_table_2 = t1.create_database_table connection (Name_Generator.random_name "primary-key-2") + Panic.with_finalizer (connection.drop_table db_table_2.name) <| + db_table_2.at "X" . to_vector . should_equal [1, 2, 3] + get_primary_key connection db_table_2.name . should_equal ["X"] + + db_table_3 = t1.create_database_table connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing + Panic.with_finalizer (connection.drop_table db_table_3.name) <| + db_table_3.at "X" . to_vector . should_equal [1, 2, 3] + get_primary_key connection db_table_3.name . should_equal Nothing + + Test.specify "should ensure that primary key columns are valid" <| + r1 = in_memory_table.create_database_table connection (Name_Generator.random_name "primary-key-4") primary_key=["X", "nonexistent"] + r1.should_fail_with Missing_Input_Columns + + db_table_2 = in_memory_table.create_database_table connection (Name_Generator.random_name "primary-key-5") primary_key=["X", 0] + Panic.with_finalizer (connection.drop_table db_table_2.name) <| + get_primary_key connection db_table_2.name . should_equal ["X"] + + Test.specify "should fail if the primary key is not unique" <| + t1 = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] + r1 = t1.create_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["X"] + r1.should_fail_with Non_Unique_Primary_Key + r2 = t1.create_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["Y"] + r2.should_fail_with Non_Unique_Primary_Key + r3 = t1.create_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r3.at "X" . to_vector . should_equal [1, 2, 1] + + t2 = Table.new [["X", [1, 2, 1]], ["Y", ['a', 'b', 'a']]] + r4 = t2.create_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r4.should_fail_with Non_Unique_Primary_Key + +## PRIVATE + Creates a mock column containing `values`. + + If `exploding_index` is accessed, an exception will be thrown. +make_mock_column name values exploding_index = + storage = ExplodingStorage.new values.to_array exploding_index + Column.from_storage name storage + +## PRIVATE + + This method may not work correctly with temporary tables, possibly resulting + in `SQL_Error` as such tables may not be found. +get_primary_key connection table_name = + connection.jdbc_connection.with_connection java_connection-> + rs = java_connection.getMetaData.getPrimaryKeys Nothing connection.schema table_name + keys_table = result_set_to_table rs connection.dialect.make_column_fetcher_for_type + # The names of the columns are sometimes lowercase and sometimes uppercase, so we do a case insensitive select first. + selected = keys_table.select_columns [Column_Selector.By_Name "COLUMN_NAME", Column_Selector.By_Name "KEY_SEQ"] reorder=True + key_column_names = selected.order_by 1 . at 0 . to_vector + if key_column_names.is_empty then Nothing else key_column_names diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 34adf2474416..a6af3f7d817e 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -12,6 +12,7 @@ from Standard.Table.Errors import Invalid_Output_Column_Names, Duplicate_Output_ import Standard.Visualization +import Standard.Database.Extensions.Upload_Table from Standard.Database import Database, SQLite, In_Memory from Standard.Test import Test, Test_Suite, Problems @@ -852,8 +853,8 @@ spec = t = Table.new [["X", [1, 2, 3]]] t.join 42 . should_fail_with Type_Error - db = Database.connect (SQLite In_Memory) - db_table = db.upload_table "test" (Table.new [["Y", [4, 5, 6]]]) + db_connection = Database.connect (SQLite In_Memory) + db_table = (Table.new [["Y", [4, 5, 6]]]).create_database_table db_connection "test" r = t.join db_table r.should_fail_with Illegal_Argument diff --git a/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStorage.java b/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStorage.java new file mode 100644 index 000000000000..35d0ff6a3230 --- /dev/null +++ b/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStorage.java @@ -0,0 +1,110 @@ +package org.enso.table_test_helpers; + +import org.enso.table.data.column.builder.object.Builder; +import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; +import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.type.IntegerType; +import org.enso.table.data.column.storage.type.StorageType; +import org.enso.table.data.mask.OrderMask; +import org.enso.table.data.mask.SliceRange; + +import java.util.BitSet; +import java.util.List; + +/** + * A helper class used in the Upload_Spec test to purposefully interrupt a table upload in the + * middle of it by throwing an exception. It is used to test the transactionality of the upload. + */ +public class ExplodingStorage extends Storage { + private final long[] array; + private final long explodingIndex; + + public ExplodingStorage(long[] array, long explodingIndex) { + this.array = array; + this.explodingIndex = explodingIndex; + } + + private void checkIndex(long idx) { + if (idx == explodingIndex) { + throw new ExplodingStoragePayload(); + } + } + + @Override + public int size() { + return array.length; + } + + @Override + public int countMissing() { + return 0; + } + + @Override + public StorageType getType() { + return IntegerType.INT_64; + } + + @Override + public boolean isNa(long idx) { + checkIndex(idx); + return false; + } + + public long getItem(int idx) { + checkIndex(idx); + return array[idx]; + } + + @Override + public Long getItemBoxed(int idx) { + return getItem(idx); + } + + @Override + public boolean isOpVectorized(String name) { + return false; + } + + @Override + protected Storage runVectorizedMap( + String name, Object argument, MapOperationProblemBuilder problemBuilder) { + return null; + } + + @Override + protected Storage runVectorizedZip( + String name, Storage argument, MapOperationProblemBuilder problemBuilder) { + return null; + } + + @Override + public Storage mask(BitSet mask, int cardinality) { + return null; + } + + @Override + public Storage applyMask(OrderMask mask) { + return null; + } + + @Override + public Storage countMask(int[] counts, int total) { + return null; + } + + @Override + public Storage slice(int offset, int limit) { + return null; + } + + @Override + public Builder createDefaultBuilderOfSameType(int capacity) { + return null; + } + + @Override + public Storage slice(List ranges) { + return null; + } +} diff --git a/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStoragePayload.java b/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStoragePayload.java new file mode 100644 index 000000000000..3daf8af26b20 --- /dev/null +++ b/test/Tests/polyglot-sources/enso-test-java-helpers/src/main/java/org/enso/table_test_helpers/ExplodingStoragePayload.java @@ -0,0 +1,3 @@ +package org.enso.table_test_helpers; + +public class ExplodingStoragePayload extends RuntimeException {} diff --git a/test/Tests/src/Semantic/Error_Spec.enso b/test/Tests/src/Semantic/Error_Spec.enso index d49ef73aa699..f9cee8c31546 100644 --- a/test/Tests/src/Semantic/Error_Spec.enso +++ b/test/Tests/src/Semantic/Error_Spec.enso @@ -289,5 +289,51 @@ spec = caught_js_arr_panic . should_equal "JS array:[1, 2, 3]" + Test.specify "should allow to use `with_finalizer`" <| + ref1 = Ref.new "" + r1 = Panic.with_finalizer (ref1.put "finalized") <| + 42 + r1.should_equal 42 + ref1.get . should_equal "finalized" + + ref2 = Ref.new "" + Test.expect_panic_with matcher=Illegal_Argument <| + Panic.with_finalizer (ref2.put "finalized") <| + ref2.put "in-progress" + Panic.throw (Illegal_Argument.Error "msg") + 42 + ref2.get . should_equal "finalized" + + ref3 = Ref.new "" + r3 = Panic.with_finalizer (ref3.put "finalized") <| + ref3.put "in-progress" + Error.throw (Illegal_Argument.Error "msg2") + r3.should_fail_with Illegal_Argument + ref3.get . should_equal "finalized" + + Test.specify "should propagate any panics raised in `with_finalizer` finalization" <| + v1 = Vector.new_builder + c1 = Panic.catch Any handler=(.payload) <| + do_finalize = + v1.append 2 + Panic.throw "finalizer" + do_act = + v1.append 1 + 42 + Panic.with_finalizer do_finalize do_act + c1 . should_equal "finalizer" + v1.to_vector . should_equal [1, 2] + + v2 = Vector.new_builder + c2 = Panic.catch Any handler=(.payload) <| + do_finalize = + v2.append 2 + Panic.throw "finalizer" + do_act = + v2.append 1 + Panic.throw "action" + Panic.with_finalizer do_finalize do_act + c2 . should_equal "finalizer" + v2.to_vector . should_equal [1, 2] main = Test_Suite.run_main spec diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index 6d0026101119..ccf9b2e1878e 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -2,8 +2,7 @@ from Standard.Base import all from Standard.Table import Table, Aggregate_Column, Value_Type -from Standard.Database import SQLite -from Standard.Database import Database +from Standard.Database import all import Standard.Database.Data.Table.Table as Database_Table import Standard.Visualization.Table.Visualization @@ -23,7 +22,8 @@ type Foo to_js_object self = JS_Object.from_pairs [["x", self.x]] visualization_spec connection = - t = connection.upload_table "T" <| Table.new [["A", ['a', 'a', 'a']], ["B", [2, 2, 3]], ["C", [3, 5, 6]]] + in_mem = Table.new [["A", ['a', 'a', 'a']], ["B", [2, 2, 3]], ["C", [3, 5, 6]]] + t = in_mem.create_database_table connection "T" primary_key=Nothing temporary=True make_json header data all_rows ixes_header ixes = p_header = ["header", header] From c0679afa07e856235b1d040235f40d467553c951 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Fri, 28 Apr 2023 04:21:23 +0200 Subject: [PATCH 09/34] Fixes for URL and file handling (#6407) This PR addresses two issues: * problems with importing projects on macOS, * console Window appearing when Project Manager is spawned. --- .../lib/client/src/bin/project-manager.ts | 3 ++ .../lib/client/src/file-associations.ts | 28 +++++++++++++--- app/ide-desktop/lib/client/src/index.ts | 32 +++++++++++++++++-- .../lib/client/src/project-management.ts | 6 ++-- .../lib/client/src/url-associations.ts | 5 ++- app/ide-desktop/tsconfig.json | 1 + 6 files changed, 65 insertions(+), 10 deletions(-) diff --git a/app/ide-desktop/lib/client/src/bin/project-manager.ts b/app/ide-desktop/lib/client/src/bin/project-manager.ts index a37ebbe7a865..6e3782879a5e 100644 --- a/app/ide-desktop/lib/client/src/bin/project-manager.ts +++ b/app/ide-desktop/lib/client/src/bin/project-manager.ts @@ -47,6 +47,9 @@ export function spawn(args: config.Args, processArgs: string[]): childProcess.Ch const binPath = pathOrPanic(args) const process = childProcess.spawn(binPath, processArgs, { stdio: [/* stdin */ 'pipe', /* stdout */ 'inherit', /* stderr */ 'inherit'], + // The Project Manager should never spawn any windows. On Windows OS this needs to be + // manually prevented, as the default is to spawn a console window. + windowsHide: true, }) logger.log(`Backend has been spawned (pid = ${String(process.pid)}).`) process.on('exit', code => { diff --git a/app/ide-desktop/lib/client/src/file-associations.ts b/app/ide-desktop/lib/client/src/file-associations.ts index 7c13b15d80e4..02dc0bfbfd3c 100644 --- a/app/ide-desktop/lib/client/src/file-associations.ts +++ b/app/ide-desktop/lib/client/src/file-associations.ts @@ -93,19 +93,21 @@ function getClientArguments(): string[] { export function isFileOpenable(path: string): boolean { const extension = pathModule.extname(path).toLowerCase() return ( - extension === fileAssociations.BUNDLED_PROJECT_EXTENSION || - extension === fileAssociations.SOURCE_FILE_EXTENSION + extension === fileAssociations.BUNDLED_PROJECT_SUFFIX || + extension === fileAssociations.SOURCE_FILE_SUFFIX ) } -/* On macOS when Enso-associated file is opened, the application is first started and then it +/** On macOS when Enso-associated file is opened, the application is first started and then it * receives the `open-file` event. However, if there is already an instance of Enso running, * it receives the `open-file` event (and no new instance is created for us). In this case, * we manually start a new instance of the application and pass the file path to it (using the * Windows-style command). */ -export function onFileOpened(event: Event, path: string) { +export function onFileOpened(event: Event, path: string): string | void { + logger.log(`Received 'open-file' event for path '${path}'.`) if (isFileOpenable(path)) { + logger.log(`The file '${path}' is openable.`) // If we are not ready, we can still decide to open a project rather than enter the welcome // screen. However, we still check for the presence of arguments, to prevent hijacking the // user-spawned IDE instance (OS-spawned will not have arguments set). @@ -127,13 +129,31 @@ export function onFileOpened(event: Event, path: string) { // Prevent parent (this) process from waiting for the child to exit. child.unref() } + } else { + logger.log(`The file '${path}' is not openable, ignoring the 'open-file' event.`) } } +/** Set up the `open-file` event handler that might import a project and invoke the given callback, + * if this IDE instance should load the project. See {@link onFileOpened} for more details. + * + * @param setProjectToOpen - A function that will be called with the ID of the project to open. + */ +export function setOpenFileEventHandler(setProjectToOpen: (id: string) => void) { + electron.app.on('open-file', (event, path) => { + const projectId = onFileOpened(event, path) + if (typeof projectId === 'string') { + setProjectToOpen(projectId) + } + }) +} + /** Handle the case where IDE is invoked with a file to open. * * Imports project if necessary. Returns the ID of the project to open. In case of an error, displays an error message and rethrows the error. * + * @param openedFile - The path to the file to open. + * @returns The ID of the project to open. * @throws An `Error`, if the project from the file cannot be opened or imported. */ export function handleOpenFile(openedFile: string): string { try { diff --git a/app/ide-desktop/lib/client/src/index.ts b/app/ide-desktop/lib/client/src/index.ts index f705ec912766..67d4f1eea98d 100644 --- a/app/ide-desktop/lib/client/src/index.ts +++ b/app/ide-desktop/lib/client/src/index.ts @@ -47,7 +47,9 @@ class App { async run() { urlAssociations.registerAssociations() // Register file associations for macOS. - electron.app.on('open-file', fileAssociations.onFileOpened) + fileAssociations.setOpenFileEventHandler(id => { + this.setProjectToOpenOnStartup(id) + }) const { windowSize, chromeOptions, fileToOpen, urlToOpen } = this.processArguments() this.handleItemOpening(fileToOpen, urlToOpen) @@ -70,6 +72,7 @@ class App { * freezes. This freeze should be diagnosed and fixed. Then, the `whenReady()` listener * should be used here instead. */ electron.app.on('ready', () => { + logger.log('Electron application is ready.') void this.main(windowSize) }) this.registerShortcuts() @@ -92,6 +95,29 @@ class App { return { ...configParser.parseArgs(argsToParse), fileToOpen, urlToOpen } } + /** + * Sets the project to be opened on application startup. + * + * This method should be called before the application is ready, as it only + * modifies the startup options. If the application is already initialized, + * an error will be logged, and the method will have no effect. + * + * @param idOfProjectToOpen - The ID of the project to be opened on startup. + */ + setProjectToOpenOnStartup(idOfProjectToOpen: string) { + // Make sure that we are not initialized yet, as this method should be called before the + // application is ready. + if (!electron.app.isReady()) { + logger.log(`Setting project to open on startup: ${idOfProjectToOpen}.`) + this.args.groups.startup.options.project.value = idOfProjectToOpen + } else { + logger.error( + `Cannot set project to open on startup: ${idOfProjectToOpen},` + + ` as the application is already initialized.` + ) + } + } + /** This method is invoked when the application was spawned due to being a default application * for a URL protocol or file extension. */ handleItemOpening(fileToOpen: string | null, urlToOpen: URL | null) { @@ -101,8 +127,8 @@ class App { // This makes the IDE open the relevant project. Also, this prevents us from using this // method after IDE has been fully set up, as the initializing code would have already // read the value of this argument. - this.args.groups.startup.options.project.value = - fileAssociations.handleOpenFile(fileToOpen) + const projectId = fileAssociations.handleOpenFile(fileToOpen) + this.setProjectToOpenOnStartup(projectId) } if (urlToOpen != null) { diff --git a/app/ide-desktop/lib/client/src/project-management.ts b/app/ide-desktop/lib/client/src/project-management.ts index b5e4f57c6a0e..dc0143cefa61 100644 --- a/app/ide-desktop/lib/client/src/project-management.ts +++ b/app/ide-desktop/lib/client/src/project-management.ts @@ -34,7 +34,8 @@ const logger = config.logger * @throws `Error` if the path does not belong to a valid project. */ export function importProjectFromPath(openedPath: string): string { - if (pathModule.extname(openedPath).endsWith(fileAssociations.BUNDLED_PROJECT_EXTENSION)) { + if (pathModule.extname(openedPath).endsWith(fileAssociations.BUNDLED_PROJECT_SUFFIX)) { + logger.log(`Path '${openedPath}' denotes a bundled project.`) // The second part of condition is for the case when someone names a directory like `my-project.enso-project` // and stores the project there. Not the most fortunate move, but... if (isProjectRoot(openedPath)) { @@ -44,7 +45,7 @@ export function importProjectFromPath(openedPath: string): string { return importBundle(openedPath) } } else { - logger.log(`Opening file: '${openedPath}'.`) + logger.log(`Opening non-bundled file: '${openedPath}'.`) const rootPath = getProjectRoot(openedPath) // Check if the project root is under the projects directory. If it is, we can open it. // Otherwise, we need to install it first. @@ -62,6 +63,7 @@ export function importProjectFromPath(openedPath: string): string { * @returns Project ID (from Project Manager's metadata) identifying the imported project. */ export function importBundle(bundlePath: string): string { + logger.log(`Importing project from bundle: '${bundlePath}'.`) // The bundle is a tarball, so we just need to extract it to the right location. const bundleRoot = directoryWithinBundle(bundlePath) const targetDirectory = generateDirectoryName(bundleRoot ?? bundlePath) diff --git a/app/ide-desktop/lib/client/src/url-associations.ts b/app/ide-desktop/lib/client/src/url-associations.ts index 7a06770d4365..dbcebfe592a2 100644 --- a/app/ide-desktop/lib/client/src/url-associations.ts +++ b/app/ide-desktop/lib/client/src/url-associations.ts @@ -83,7 +83,10 @@ export function handleOpenUrl(openedUrl: URL) { // If we failed to acquire the lock, it means that another instance of the application is // already running. In this case, we must send the URL to the existing instance and exit. logger.log('Another instance of the application is already running. Exiting.') - electron.app.quit() + // Note that we need here to exit rather than quit. Otherwise, the application would + // continue initializing and would create a new window, before quitting. + // We don't want anything to flash on the screen, so we just exit. + electron.app.exit(0) } else { // If we acquired the lock, it means that we are the first instance of the application. // In this case, we must wait for the application to be ready and then send the URL to the diff --git a/app/ide-desktop/tsconfig.json b/app/ide-desktop/tsconfig.json index adfdbf409f89..05c497e8c6d0 100644 --- a/app/ide-desktop/tsconfig.json +++ b/app/ide-desktop/tsconfig.json @@ -6,6 +6,7 @@ "forceConsistentCasingInFileNames": true, "module": "ESNext", "moduleResolution": "node", + "allowJs": true, "checkJs": true, "strict": true, "noImplicitAny": true, From c6790f1e9cad8bbefe924637314f2dbc43ea1bac Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 28 Apr 2023 09:16:00 +0200 Subject: [PATCH 10/34] Report only unique warnings (#6372) This change makes sure that reported warnings are unique, based on the value of internal clock tick and ignoring differences in reassignments. Before: ![Screenshot from 2023-04-20 15-42-55](https://user-images.githubusercontent.com/292128/233415710-925c1045-37c7-49f5-9bc3-bfbfd30270a3.png) After: ![Screenshot from 2023-04-20 15-27-27](https://user-images.githubusercontent.com/292128/233415807-8cb67bc2-ac37-4db7-924e-ae7619074b5b.png) On the positive side, no further changes, like in LS, have to be done. Closes #6257. --- CHANGELOG.md | 8 +- .../semantic/WarningBenchmarks.java | 101 ++++++++++++++++++ .../IndirectInvokeConversionNode.java | 4 +- .../callable/IndirectInvokeMethodNode.java | 4 +- .../node/callable/InvokeCallableNode.java | 2 +- .../node/callable/InvokeConversionNode.java | 4 +- .../node/callable/InvokeMethodNode.java | 4 +- .../enso/interpreter/runtime/EnsoContext.java | 2 +- .../enso/interpreter/runtime/data/Array.java | 17 ++- .../interpreter/runtime/data/ArrayRope.java | 3 + .../interpreter/runtime/error/Warning.java | 46 +++++--- .../runtime/error/WarningsLibrary.java | 2 +- .../runtime/error/WithWarnings.java | 96 ++++++++++------- .../org/enso/interpreter/test/ArrayTest.java | 37 +++---- .../enso/interpreter/test/BigNumberTest.java | 37 +++---- .../test/ForeignMethodInvokeTest.java | 36 ++----- .../interpreter/test/LazyAtomFieldTest.java | 36 +++---- .../org/enso/interpreter/test/ListTest.java | 27 ++--- .../enso/interpreter/test/MetaObjectTest.java | 14 +-- .../interpreter/test/PolyglotErrorTest.java | 27 ++--- .../org/enso/interpreter/test/PrintTest.java | 25 ++--- .../org/enso/interpreter/test/TestBase.java | 30 ++++-- .../interpreter/test/TypeMembersTest.java | 25 ++--- .../org/enso/interpreter/test/VectorTest.java | 37 +++---- .../enso/interpreter/test/WarningsTest.java | 33 +++++- .../enso/interpreter/dsl/MethodProcessor.java | 2 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 7 ++ test/Tests/src/Semantic/Warnings_Spec.enso | 19 ++++ 28 files changed, 406 insertions(+), 279 deletions(-) create mode 100644 engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 52042f1786f3..c5bc858c6b5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -691,7 +691,7 @@ - [Optimize Atom storage layouts][3862] - [Make instance methods callable like statics for builtin types][4077] - [Convert large longs to doubles, safely, for host calls][4099] -- [Consistent ordering with comparators](4067) +- [Consistent ordering with comparators][4067] - [Profile engine startup][4110] - [Report type of polyglot values][4111] - [Engine can now recover from serialization failures][5591] @@ -709,6 +709,7 @@ - [Vector.sort handles incomparable types][5998] - [Removing need for asynchronous thread to execute ResourceManager finalizers][6335] +- [Warning.get_all returns only unique warnings][6372] [3227]: https://github.com/enso-org/enso/pull/3227 [3248]: https://github.com/enso-org/enso/pull/3248 @@ -797,9 +798,9 @@ [4048]: https://github.com/enso-org/enso/pull/4048 [4049]: https://github.com/enso-org/enso/pull/4049 [4056]: https://github.com/enso-org/enso/pull/4056 +[4067]: https://github.com/enso-org/enso/pull/4067 [4077]: https://github.com/enso-org/enso/pull/4077 [4099]: https://github.com/enso-org/enso/pull/4099 -[4067]: https://github.com/enso-org/enso/pull/4067 [4110]: https://github.com/enso-org/enso/pull/4110 [4111]: https://github.com/enso-org/enso/pull/4111 [5591]: https://github.com/enso-org/enso/pull/5591 @@ -811,11 +812,12 @@ [5791]: https://github.com/enso-org/enso/pull/5791 [5900]: https://github.com/enso-org/enso/pull/5900 [5966]: https://github.com/enso-org/enso/pull/5966 +[5998]: https://github.com/enso-org/enso/pull/5998 [6067]: https://github.com/enso-org/enso/pull/6067 [6151]: https://github.com/enso-org/enso/pull/6151 [6171]: https://github.com/enso-org/enso/pull/6171 -[5998]: https://github.com/enso-org/enso/pull/5998 [6335]: https://github.com/enso-org/enso/pull/6335 +[6372]: https://github.com/enso-org/enso/pull/6372 # Enso 2.0.0-alpha.18 (2021-10-12) diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java new file mode 100644 index 000000000000..da08d9e081b6 --- /dev/null +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java @@ -0,0 +1,101 @@ +package org.enso.interpreter.bench.benchmarks.semantic; + +import org.enso.interpreter.test.TestBase; +import org.enso.polyglot.MethodNames; +import org.graalvm.polyglot.Context; +import org.graalvm.polyglot.Value; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.BenchmarkParams; + +import java.io.IOException; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +@BenchmarkMode(Mode.AverageTime) +@Fork(1) +@Warmup(iterations = 5, time = 1) +@Measurement(iterations = 3, time = 3) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class WarningBenchmarks extends TestBase { + private static final int INPUT_VEC_SIZE = 10000; + private Context ctx; + private Value vecSumBench; + + private Value createVec; + private Value noWarningsVec; + private Value sameWarningVec; + + private Value elem; + + private Value elemWithWarning; + + private String benchmarkName; + + @Setup + public void initializeBench(BenchmarkParams params) throws IOException { + ctx = createDefaultContext(); + + benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + + var code = """ + from Standard.Base import all + + vec_sum_bench : Vector Integer -> Integer + vec_sum_bench vec = + vec.fold 0 (x->y->x+y) + + create_vec size elem = + Vector.fill size elem + + elem = + 42 + + elem_with_warning = + x = 42 + Warning.attach "Foo!" x + """; + var src = SrcUtil.source(benchmarkName, code); + Value module = ctx.eval(src); + vecSumBench = Objects.requireNonNull(module.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "vec_sum_bench")); + createVec = Objects.requireNonNull(module.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "create_vec")); + elem = Objects.requireNonNull(module.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "elem")); + elemWithWarning = Objects.requireNonNull(module.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "elem_with_warning")); + noWarningsVec = createVec.execute(INPUT_VEC_SIZE, elem); + sameWarningVec = createVec.execute(INPUT_VEC_SIZE, elemWithWarning); + } + + @TearDown + public void cleanup() { + ctx.close(true); + } + + @Benchmark + public void noWarningsVecSum() { + Value res = vecSumBench.execute(noWarningsVec); + checkResult(res); + } + + @Benchmark + public void sameWarningVecSum() { + Value res = vecSumBench.execute(sameWarningVec); + checkResult(res); + } + + private static void checkResult(Value res) { + if (res.asInt() != INPUT_VEC_SIZE*42) { + throw new AssertionError("Expected result: " + INPUT_VEC_SIZE*42 + ", got: " + res.asInt()); + } + } + +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeConversionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeConversionNode.java index 0f397f0871ba..d497adeee9d8 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeConversionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeConversionNode.java @@ -142,7 +142,7 @@ Object doWarning( int thatArgumentPosition, @Cached IndirectInvokeConversionNode childDispatch) { arguments[thatArgumentPosition] = that.getValue(); - ArrayRope warnings = that.getReassignedWarnings(this); + ArrayRope warnings = that.getReassignedWarningsAsRope(this); Object result = childDispatch.execute( frame, @@ -156,7 +156,7 @@ Object doWarning( argumentsExecutionMode, isTail, thatArgumentPosition); - return WithWarnings.prependTo(result, warnings); + return WithWarnings.appendTo(result, warnings); } @Specialization(guards = "interop.isString(that)") diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java index 6a22a01234e8..79fafca1515c 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java @@ -121,7 +121,7 @@ Object doWarning( int thisArgumentPosition, @Cached IndirectInvokeMethodNode childDispatch) { arguments[thisArgumentPosition] = self.getValue(); - ArrayRope warnings = self.getReassignedWarnings(this); + ArrayRope warnings = self.getReassignedWarningsAsRope(this); Object result = childDispatch.execute( frame, @@ -134,7 +134,7 @@ Object doWarning( argumentsExecutionMode, isTail, thisArgumentPosition); - return WithWarnings.prependTo(result, warnings); + return WithWarnings.appendTo(result, warnings); } @Specialization diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java index 8916c6a26f29..f12789694ad5 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java @@ -297,7 +297,7 @@ public Object invokeWarnings( if (result instanceof DataflowError) { return result; } else if (result instanceof WithWarnings withWarnings) { - return withWarnings.prepend(extracted); + return withWarnings.append(extracted); } else { return WithWarnings.wrap(result, extracted); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java index 6af3ce73ee8d..3df215b17fb4 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java @@ -172,10 +172,10 @@ Object doWarning( } } arguments[thatArgumentPosition] = that.getValue(); - ArrayRope warnings = that.getReassignedWarnings(this); + ArrayRope warnings = that.getReassignedWarningsAsRope(this); Object result = childDispatch.execute(frame, state, conversion, self, that.getValue(), arguments); - return WithWarnings.prependTo(result, warnings); + return WithWarnings.appendTo(result, warnings); } @Specialization(guards = "interop.isString(that)") diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index 74a672bd9fa8..fa58b5875a5f 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -275,7 +275,7 @@ Object doWarning( arguments[thisArgumentPosition] = selfWithoutWarnings; Object result = childDispatch.execute(frame, state, symbol, selfWithoutWarnings, arguments); - return WithWarnings.prependTo(result, arrOfWarnings); + return WithWarnings.appendTo(result, arrOfWarnings); } @ExplodeLoop @@ -327,7 +327,7 @@ Object doPolyglot( Object res = hostMethodCallNode.execute(polyglotCallType, symbol.getName(), self, args); if (anyWarnings) { anyWarningsProfile.enter(); - res = WithWarnings.prependTo(res, accumulatedWarnings); + res = WithWarnings.appendTo(res, accumulatedWarnings); } return res; } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/EnsoContext.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/EnsoContext.java index d0bf6a65a6bb..1cff5992521e 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/EnsoContext.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/EnsoContext.java @@ -504,7 +504,7 @@ public TruffleLogger getLogger(Class klass) { * *

The counter is used to track the creation time of warnings. */ - public long clockTick() { + public long nextSequenceId() { return clock.getAndIncrement(); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Array.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Array.java index 87601a4dbe00..6417441ddc3d 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Array.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Array.java @@ -19,6 +19,7 @@ import java.util.Arrays; import org.enso.interpreter.runtime.error.WithWarnings; +import org.graalvm.collections.EconomicSet; /** A primitive boxed array type for use in the runtime. */ @ExportLibrary(InteropLibrary.class) @@ -28,6 +29,7 @@ public final class Array implements TruffleObject { private final Object[] items; private Boolean withWarnings; + private Warning[] cachedWarnings; /** * Creates a new array @@ -203,13 +205,22 @@ boolean hasWarnings(@CachedLibrary(limit = "3") WarningsLibrary warnings) { @ExportMessage Warning[] getWarnings(Node location, @CachedLibrary(limit = "3") WarningsLibrary warnings) throws UnsupportedMessageException { - ArrayRope ropeOfWarnings = new ArrayRope<>(); + if (cachedWarnings == null) { + cachedWarnings = Warning.fromSetToArray(collectAllWarnings(warnings, location)); + } + return cachedWarnings; + } + + @CompilerDirectives.TruffleBoundary + private EconomicSet collectAllWarnings(WarningsLibrary warnings, Node location) + throws UnsupportedMessageException { + EconomicSet setOfWarnings = EconomicSet.create(new WithWarnings.WarningEquivalence()); for (int i = 0; i < items.length; i++) { if (warnings.hasWarnings(items[i])) { - ropeOfWarnings = ropeOfWarnings.prepend(warnings.getWarnings(items[i], location)); + setOfWarnings.addAll(Arrays.asList(warnings.getWarnings(items[i], location))); } } - return ropeOfWarnings.toArray(Warning[]::new); + return setOfWarnings; } @ExportMessage diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/ArrayRope.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/ArrayRope.java index 9ee3a1c5c815..9b0c0be6329a 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/ArrayRope.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/ArrayRope.java @@ -1,5 +1,7 @@ package org.enso.interpreter.runtime.data; +import com.oracle.truffle.api.CompilerDirectives; + import java.util.Arrays; import java.util.function.Function; @@ -33,6 +35,7 @@ public final ArrayRope prepend(T... items) { return new ArrayRope<>(new ConcatSegment<>(new ArraySegment<>(items), this.segment)); } + @CompilerDirectives.TruffleBoundary public T[] toArray(Function genArray) { T[] res = genArray.apply(size()); writeArray(res); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/Warning.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/Warning.java index 82e7d690d4cc..41716e1cbb33 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/Warning.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/Warning.java @@ -17,6 +17,7 @@ import org.enso.interpreter.runtime.data.ArrayRope; import org.enso.interpreter.runtime.data.Type; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; +import org.graalvm.collections.EconomicSet; import java.util.Arrays; import java.util.Comparator; @@ -27,18 +28,18 @@ public final class Warning implements TruffleObject { private final Object value; private final Object origin; private final ArrayRope reassignments; - private final long creationTime; + private final long sequenceId; - public Warning(Object value, Object origin, long creationTime) { - this(value, origin, creationTime, new ArrayRope<>()); + private Warning(Object value, Object origin, long sequenceId) { + this(value, origin, sequenceId, new ArrayRope<>()); } - public Warning( - Object value, Object origin, long creationTime, ArrayRope reassignments) { + private Warning( + Object value, Object origin, long sequenceId, ArrayRope reassignments) { this.value = value; this.origin = origin; this.reassignments = reassignments; - this.creationTime = creationTime; + this.sequenceId = sequenceId; } @Builtin.Method(name = "value", description = "Gets the payload of the warning.") @@ -57,7 +58,7 @@ public Object getOrigin() { autoRegister = false) @Builtin.Specialize public static Warning create(EnsoContext ctx, Object payload, Object origin) { - return new Warning(payload, origin, ctx.clockTick()); + return new Warning(payload, origin, ctx.nextSequenceId()); } @Builtin.Method(description = "Gets the list of locations where the warnings was reassigned.") @@ -73,7 +74,7 @@ public Array getReassignments() { @Builtin.Specialize public static WithWarnings attach( EnsoContext ctx, WithWarnings value, Object warning, Object origin) { - return value.prepend(new Warning(warning, origin, ctx.clockTick())); + return value.append(new Warning(warning, origin, ctx.nextSequenceId())); } @Builtin.Method( @@ -82,7 +83,7 @@ public static WithWarnings attach( autoRegister = false) @Builtin.Specialize(fallback = true) public static WithWarnings attach(EnsoContext ctx, Object value, Object warning, Object origin) { - return WithWarnings.wrap(value, new Warning(warning, origin, ctx.clockTick())); + return WithWarnings.wrap(value, new Warning(warning, origin, ctx.nextSequenceId())); } @Builtin.Method( @@ -93,10 +94,8 @@ public static WithWarnings attach(EnsoContext ctx, Object value, Object warning, @CompilerDirectives.TruffleBoundary public static Array getAll(WithWarnings value, WarningsLibrary warningsLib) { Warning[] warnings = value.getWarningsArray(warningsLib); - Arrays.sort(warnings, Comparator.comparing(Warning::getCreationTime).reversed()); - Object[] result = new Object[warnings.length]; - System.arraycopy(warnings, 0, result, 0, warnings.length); - return new Array(result); + sortArray(warnings); + return new Array((Object[]) warnings); } @Builtin.Method( @@ -107,7 +106,9 @@ public static Array getAll(WithWarnings value, WarningsLibrary warningsLib) { public static Array getAll(Object value, WarningsLibrary warnings) { if (warnings.hasWarnings(value)) { try { - return new Array((Object[]) warnings.getWarnings(value, null)); + Warning[] arr = warnings.getWarnings(value, null); + sortArray(arr); + return new Array((Object[]) arr); } catch (UnsupportedMessageException e) { throw new IllegalStateException(e); } @@ -116,6 +117,17 @@ public static Array getAll(Object value, WarningsLibrary warnings) { } } + @CompilerDirectives.TruffleBoundary + private static void sortArray(Warning[] arr) { + Arrays.sort(arr, Comparator.comparing(Warning::getSequenceId).reversed()); + } + + /** Converts set to an array behing a truffle boundary. */ + @CompilerDirectives.TruffleBoundary + public static Warning[] fromSetToArray(EconomicSet set) { + return set.toArray(new Warning[set.size()]); + } + @Builtin.Method( name = "set_array", description = "Sets all the warnings associated with the value.", @@ -185,8 +197,8 @@ SourceSection getSourceLocation() throws UnsupportedMessageException { } } - public long getCreationTime() { - return creationTime; + public long getSequenceId() { + return sequenceId; } @CompilerDirectives.TruffleBoundary @@ -194,7 +206,7 @@ public Warning reassign(Node location) { RootNode root = location.getRootNode(); SourceSection section = location.getEncapsulatingSourceSection(); Reassignment reassignment = new Reassignment(root.getName(), section); - return new Warning(value, origin, creationTime, reassignments.prepend(reassignment)); + return new Warning(value, origin, sequenceId, reassignments.prepend(reassignment)); } @ExportMessage diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WarningsLibrary.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WarningsLibrary.java index dd079a317d11..31fc820d917d 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WarningsLibrary.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WarningsLibrary.java @@ -42,7 +42,7 @@ public boolean hasWarnings(Object receiver) { } /** - * Returns all warnings associated with the receiver. + * Returns all unique warnings associated with the receiver. * * @param receiver the receiver to analyze * @param location optional parameter specifying the node to which the warnings should be diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WithWarnings.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WithWarnings.java index dc6c17d40cef..d80981d4058a 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WithWarnings.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/WithWarnings.java @@ -1,5 +1,6 @@ package org.enso.interpreter.runtime.error; +import com.oracle.truffle.api.CompilerDirectives; import org.enso.interpreter.runtime.data.ArrayRope; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; @@ -11,22 +12,27 @@ import com.oracle.truffle.api.library.Message; import com.oracle.truffle.api.library.ReflectionLibrary; import com.oracle.truffle.api.nodes.Node; +import org.graalvm.collections.EconomicSet; +import org.graalvm.collections.Equivalence; + +import java.util.Arrays; @ExportLibrary(TypesLibrary.class) @ExportLibrary(WarningsLibrary.class) @ExportLibrary(ReflectionLibrary.class) public final class WithWarnings implements TruffleObject { - private final ArrayRope warnings; + private final EconomicSet warnings; private final Object value; private WithWarnings(Object value, Warning... warnings) { assert !(value instanceof WithWarnings); - this.warnings = new ArrayRope<>(warnings); + this.warnings = createSetFromArray(warnings); this.value = value; } - private WithWarnings(Object value, ArrayRope warnings) { - this.warnings = warnings; + private WithWarnings(Object value, EconomicSet warnings, Warning... additionalWarnings) { + assert !(value instanceof WithWarnings); + this.warnings = cloneSetAndAppend(warnings, additionalWarnings); this.value = value; } @@ -43,36 +49,25 @@ public Object getValue() { } public WithWarnings append(Warning... newWarnings) { - return new WithWarnings(value, warnings.append(newWarnings)); + return new WithWarnings(value, warnings, newWarnings); } public WithWarnings append(ArrayRope newWarnings) { - return new WithWarnings(value, warnings.append(newWarnings)); - } - - public WithWarnings prepend(Warning... newWarnings) { - return new WithWarnings(value, warnings.prepend(newWarnings)); - } - - public WithWarnings prepend(ArrayRope newWarnings) { - return new WithWarnings(value, warnings.prepend(newWarnings)); + return new WithWarnings(value, warnings, newWarnings.toArray(Warning[]::new)); } public Warning[] getWarningsArray(WarningsLibrary warningsLibrary) { - Warning[] warningsArr = warnings.toArray(Warning[]::new); Warning[] allWarnings; - if (warningsLibrary != null && warningsLibrary.hasWarnings(value)) { try { - Warning[] valuesWarnings = warningsLibrary.getWarnings(value, null); - allWarnings = new Warning[valuesWarnings.length + warningsArr.length]; - System.arraycopy(warningsArr, 0, allWarnings, 0, warningsArr.length); - System.arraycopy(valuesWarnings, 0, allWarnings, warningsArr.length, valuesWarnings.length); + Warning[] valueWarnings = warningsLibrary.getWarnings(value, null); + EconomicSet tmp = cloneSetAndAppend(warnings, valueWarnings); + allWarnings = Warning.fromSetToArray(tmp); } catch (UnsupportedMessageException e) { throw new IllegalStateException(e); } } else { - allWarnings = warningsArr; + allWarnings = Warning.fromSetToArray(warnings); } return allWarnings; } @@ -82,37 +77,29 @@ public int getWarningsCount() { return warnings.size(); } - public ArrayRope getReassignedWarnings(Node location) { - return getReassignedWarnings(location, null); + public ArrayRope getReassignedWarningsAsRope(Node location) { + return new ArrayRope<>(getReassignedWarnings(location, null)); } - public ArrayRope getReassignedWarnings(Node location, WarningsLibrary warningsLibrary) { + public Warning[] getReassignedWarnings(Node location, WarningsLibrary warningsLibrary) { Warning[] warnings = getWarningsArray(warningsLibrary); for (int i = 0; i < warnings.length; i++) { warnings[i] = warnings[i].reassign(location); } - return new ArrayRope<>(warnings); + return warnings; } public static WithWarnings appendTo(Object target, ArrayRope warnings) { if (target instanceof WithWarnings) { - return ((WithWarnings) target).append(warnings); + return ((WithWarnings) target).append(warnings.toArray(Warning[]::new)); } else { - return new WithWarnings(target, warnings); + return new WithWarnings(target, warnings.toArray(Warning[]::new)); } } - public static WithWarnings prependTo(Object target, ArrayRope warnings) { + public static WithWarnings appendTo(Object target, Warning[] warnings) { if (target instanceof WithWarnings) { - return ((WithWarnings) target).prepend(warnings); - } else { - return new WithWarnings(target, warnings); - } - } - - public static WithWarnings prependTo(Object target, Warning[] warnings) { - if (target instanceof WithWarnings) { - return ((WithWarnings) target).prepend(warnings); + return ((WithWarnings) target).append(warnings); } else { return new WithWarnings(target, warnings); } @@ -133,9 +120,9 @@ boolean hasWarnings() { Warning[] getWarnings( Node location, @CachedLibrary(limit = "3") WarningsLibrary warningsLibrary) { if (location != null) { - return getReassignedWarnings(location, warningsLibrary).toArray(Warning[]::new); + return getReassignedWarnings(location, warningsLibrary); } else { - return warnings.toArray(Warning[]::new); + return Warning.fromSetToArray(warnings); } } @@ -154,6 +141,37 @@ boolean hasSpecialDispatch() { return true; } + public static class WarningEquivalence extends Equivalence { + + @Override + public boolean equals(Object a, Object b) { + if (a instanceof Warning thisObj && b instanceof Warning thatObj) { + return thisObj.getSequenceId() == thatObj.getSequenceId(); + } + return false; + } + + @Override + public int hashCode(Object o) { + return (int)((Warning)o).getSequenceId(); + } + } + + @CompilerDirectives.TruffleBoundary + private EconomicSet createSetFromArray(Warning[] entries) { + EconomicSet set = EconomicSet.create(new WarningEquivalence()); + set.addAll(Arrays.stream(entries).iterator()); + return set; + } + + @CompilerDirectives.TruffleBoundary + private EconomicSet cloneSetAndAppend(EconomicSet initial, Warning[] entries) { + EconomicSet set = EconomicSet.create(new WarningEquivalence()); + set.addAll(initial.iterator()); + set.addAll(Arrays.stream(entries).iterator()); + return set; + } + @Override public String toString() { return "WithWarnings{" + value + " + " + warnings.size() + " warnings}"; diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/ArrayTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/ArrayTest.java index d1e26773c463..d9ef270d68bb 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/ArrayTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/ArrayTest.java @@ -1,34 +1,25 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; import java.net.URI; -import java.nio.file.Paths; -import java.util.Map; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import org.junit.Before; + +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -public class ArrayTest { - private Context ctx; - - @Before - public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); +public class ArrayTest extends TestBase { + private static Context ctx; + + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(); + } + + @AfterClass + public static void disposeCtx() { + ctx.close(); } @Test diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/BigNumberTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/BigNumberTest.java index a5d824366439..668b6c5acca1 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/BigNumberTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/BigNumberTest.java @@ -1,42 +1,35 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; import java.math.BigInteger; import java.net.URI; import java.net.URISyntaxException; -import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Map; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import org.junit.Before; + +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -public class BigNumberTest { - private Context ctx; - - @Before - public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); +public class BigNumberTest extends TestBase { + private static Context ctx; + + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(); } + @AfterClass + public static void disposeCtx() { + ctx.close(); + } + + @Test public void evaluation() throws Exception { final String code = """ diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/ForeignMethodInvokeTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/ForeignMethodInvokeTest.java index 000e87f4104d..6e7af79eb8d1 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/ForeignMethodInvokeTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/ForeignMethodInvokeTest.java @@ -1,40 +1,24 @@ package org.enso.interpreter.test; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeNotNull; -import java.io.ByteArrayOutputStream; -import java.nio.file.Paths; -import java.util.Map; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Value; -import org.junit.After; -import org.junit.Before; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -public class ForeignMethodInvokeTest { - private Context ctx; +public class ForeignMethodInvokeTest extends TestBase { + private static Context ctx; - @Before - public void prepareCtx() { - this.ctx = Context.newBuilder("enso") - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assumeNotNull("Enso not found: " + langs, langs.get("enso")); + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(); } - @After - public void disposeCtx() { - this.ctx.close(); + @AfterClass + public static void disposeCtx() { + ctx.close(); } @Test diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/LazyAtomFieldTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/LazyAtomFieldTest.java index bc28cd100313..49290373aa40 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/LazyAtomFieldTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/LazyAtomFieldTest.java @@ -4,41 +4,37 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; -import java.nio.file.Paths; -import java.util.Map; import java.util.stream.Collectors; import org.enso.polyglot.MethodNames; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; + +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -public class LazyAtomFieldTest { +public class LazyAtomFieldTest extends TestBase { private static final ByteArrayOutputStream out = new ByteArrayOutputStream(); - private Context ctx; + private static Context ctx; + + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(out); + } @Before - public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .out(out) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); + public void resetOut() { out.reset(); } + @AfterClass + public static void disposeCtx() { + ctx.close(); + } + @Test public void evaluation() throws Exception { final String code = """ diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/ListTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/ListTest.java index fbe01302cab5..2ddb17a140d2 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/ListTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/ListTest.java @@ -1,24 +1,21 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; import java.math.BigInteger; import java.net.URI; import java.net.URISyntaxException; -import java.nio.file.Paths; import java.util.ArrayList; -import java.util.Map; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; + +import org.junit.After; import org.junit.Before; import org.junit.Test; -public class ListTest { +public class ListTest extends TestBase { private Context ctx; private final int size = 100_000; private Value generator; @@ -31,18 +28,7 @@ public class ListTest { @Before public void prepareCtx() throws Exception { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); + this.ctx = createDefaultContext(); final String code = """ from Standard.Base.Data.List.List import Cons, Nil @@ -74,6 +60,11 @@ public void prepareCtx() throws Exception { asText = evalCode(code, "as_text"); } + @After + public void disposeCtx() { + this.ctx.close(); + } + @Test public void mapPlusOneAndIterate() throws Exception { var list = generator.execute(size); diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/MetaObjectTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/MetaObjectTest.java index 142c43bc23a6..d317e453eacb 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/MetaObjectTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/MetaObjectTest.java @@ -16,20 +16,20 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import org.junit.After; -import org.junit.Before; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; public class MetaObjectTest extends TestBase { - private Context ctx; + private static Context ctx; - @Before - public void prepareCtx() { + @BeforeClass + public static void prepareCtx() { ctx = createDefaultContext(); } - @After - public void disposeCtx() { + @AfterClass + public static void disposeCtx() { ctx.close(); } diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java index 415bdf75d13e..f67ebfcf27d0 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java @@ -1,20 +1,16 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; -import java.nio.file.Paths; -import java.util.Map; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; + +import org.junit.After; import org.junit.Before; import org.junit.Test; -public class PolyglotErrorTest { +public class PolyglotErrorTest extends TestBase { private Context ctx; private Value panic; @@ -24,17 +20,7 @@ public static String bar(Object o) { @Before public void prepareCtx() throws Exception { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); + this.ctx = createDefaultContext(); var code = """ import Standard.Base.Panic.Panic @@ -102,6 +88,11 @@ Panic.catch Illegal_State (PolyglotErrorTest.bar (TypeCc.Cc 'z')) caught_panic-> assertTrue("It is a function", this.panic.canExecute()); } + @After + public void disposeCtx() { + this.ctx.close(); + } + @Test public void panic1() { var v = panic.execute(1); diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java index a896d6a77d8d..a8f90659eed0 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java @@ -1,11 +1,10 @@ package org.enso.interpreter.test; import org.enso.polyglot.MethodNames; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; +import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -13,32 +12,24 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; -import java.nio.file.Paths; -import java.util.Map; import static org.junit.Assert.*; -public class PrintTest { +public class PrintTest extends TestBase { private static final ByteArrayOutputStream out = new ByteArrayOutputStream(); private Context ctx; @Before public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .out(out) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); + ctx = createDefaultContext(out); out.reset(); } + @After + public void disposeCtx() { + ctx.close(); + } + private void checkPrint(String code, String expected) throws Exception { Value result = evalCode(code, "test"); assertTrue("should return Nothing", result.isNull()); diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java b/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java index 4394b4959136..e67f6fa07f6b 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java @@ -10,6 +10,7 @@ import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.RootNode; import java.io.ByteArrayOutputStream; +import java.io.OutputStream; import java.nio.file.Paths; import java.util.Map; import java.util.concurrent.Callable; @@ -24,21 +25,30 @@ public abstract class TestBase { protected static Context createDefaultContext() { - var context = - Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath()) - .build(); + var context = defaultContextBuilder().build(); final Map langs = context.getEngine().getLanguages(); assertNotNull("Enso found: " + langs, langs.get("enso")); return context; } + protected static Context createDefaultContext(OutputStream out) { + var context = defaultContextBuilder().out(out).build(); + final Map langs = context.getEngine().getLanguages(); + assertNotNull("Enso found: " + langs, langs.get("enso")); + return context; + } + + private static Context.Builder defaultContextBuilder() { + return Context.newBuilder() + .allowExperimentalOptions(true) + .allowIO(true) + .allowAllAccess(true) + .logHandler(new ByteArrayOutputStream()) + .option( + RuntimeOptions.LANGUAGE_HOME_OVERRIDE, + Paths.get("../../distribution/component").toFile().getAbsolutePath()); + } + /** * Executes the given callable in the given context. A necessity for executing artificially * created Truffle ASTs. diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/TypeMembersTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/TypeMembersTest.java index f1e13ed129d3..699d9c21e3a6 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/TypeMembersTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/TypeMembersTest.java @@ -1,13 +1,8 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; import java.net.URI; -import java.nio.file.Paths; -import java.util.Map; import java.util.Set; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.PolyglotException; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; @@ -16,27 +11,23 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; + +import org.junit.After; import org.junit.Before; import org.junit.Test; -public class TypeMembersTest { +public class TypeMembersTest extends TestBase { private Context ctx; @Before public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); + ctx = createDefaultContext(); } + @After + public void disposeCtx() { + ctx.close(); + } @Test public void checkAtomMembers() throws Exception { diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/VectorTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/VectorTest.java index 7546b60eee83..7479ab9f9eef 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/VectorTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/VectorTest.java @@ -1,40 +1,31 @@ package org.enso.interpreter.test; -import java.io.ByteArrayOutputStream; import java.net.URI; -import java.nio.file.Paths; import java.util.BitSet; import java.util.List; -import java.util.Map; import java.util.function.Consumer; -import org.enso.polyglot.RuntimeOptions; import org.graalvm.polyglot.Context; -import org.graalvm.polyglot.Language; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import org.graalvm.polyglot.proxy.ProxyArray; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import org.junit.Before; + +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; -public class VectorTest { - private Context ctx; - - @Before - public void prepareCtx() { - this.ctx = Context.newBuilder() - .allowExperimentalOptions(true) - .allowIO(true) - .allowAllAccess(true) - .logHandler(new ByteArrayOutputStream()) - .option( - RuntimeOptions.LANGUAGE_HOME_OVERRIDE, - Paths.get("../../distribution/component").toFile().getAbsolutePath() - ).build(); - final Map langs = ctx.getEngine().getLanguages(); - assertNotNull("Enso found: " + langs, langs.get("enso")); +public class VectorTest extends TestBase { + private static Context ctx; + + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(); + } + + @AfterClass + public static void disposeCtx() { + ctx.close(); } @Test diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/WarningsTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/WarningsTest.java index fbbe272005cc..32a267a77b3e 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/WarningsTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/WarningsTest.java @@ -1,17 +1,42 @@ package org.enso.interpreter.test; +import org.enso.interpreter.runtime.EnsoContext; import org.enso.interpreter.runtime.error.Warning; import org.enso.interpreter.runtime.error.WarningsLibrary; import org.enso.interpreter.runtime.error.WithWarnings; +import org.enso.polyglot.LanguageInfo; +import org.enso.polyglot.MethodNames; +import org.graalvm.polyglot.Context; +import org.junit.AfterClass; import org.junit.Assert; -import static org.junit.Assert.assertEquals; +import org.junit.BeforeClass; import org.junit.Test; -public class WarningsTest { +import static org.junit.Assert.assertEquals; + +public class WarningsTest extends TestBase { + + private static Context ctx; + + @BeforeClass + public static void initEnsoContext() { + ctx = createDefaultContext(); + } + + @AfterClass + public static void disposeContext() { + ctx.close(); + } + @Test public void doubleWithWarningsWrap() { - var warn1 = new Warning("w1", this, 1L); - var warn2 = new Warning("w2", this, 2L); + var ensoContext = + (EnsoContext) + ctx.getBindings(LanguageInfo.ID) + .invokeMember(MethodNames.TopScope.LEAK_CONTEXT) + .asHostObject(); + var warn1 = Warning.create(ensoContext, "w1", this); + var warn2 = Warning.create(ensoContext, "w2", this); var value = 42; var with1 = WithWarnings.wrap(42, warn1); diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java index 734a644484ee..7e09c3b7c474 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java @@ -502,7 +502,7 @@ private boolean generateWarningsCheck( + arrayRead(argumentsArray, arg.getPosition()) + " = withWarnings.getValue();"); out.println( - " gatheredWarnings = gatheredWarnings.prepend(withWarnings.getReassignedWarnings(bodyNode));"); + " gatheredWarnings = gatheredWarnings.prepend(withWarnings.getReassignedWarningsAsRope(bodyNode));"); out.println(" }"); } return true; diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 0d7634759829..ead9985511b1 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -177,4 +177,11 @@ spec = file.delete_if_exists + Test.specify 'should not duplicate warnings' <| + c = Database.connect (SQLite In_Memory) + t0 = Table.new [["X", ["a", "bc", "def"]]] + t1 = c.upload_table "Tabela" t0 + t2 = t1.cast "X" (Value_Type.Char size=1) + Warning.get_all t2 . length . should_equal 1 + main = Test_Suite.run_main spec diff --git a/test/Tests/src/Semantic/Warnings_Spec.enso b/test/Tests/src/Semantic/Warnings_Spec.enso index 7cd337b7ab36..2d399139c5b6 100644 --- a/test/Tests/src/Semantic/Warnings_Spec.enso +++ b/test/Tests/src/Semantic/Warnings_Spec.enso @@ -19,6 +19,8 @@ My_Type.my_method self = self.a + self.b + self.c type Wrap Value foo +f x _ = Warning.attach "Baz!" x + type My_Fancy_Collection Value (x:Integer) @@ -388,5 +390,22 @@ spec = Test.group "Dataflow Warnings" <| Problems.assume_no_problems <| condition_1 False x3 Problems.assume_no_problems <| condition_2 False x3 + Test.specify "should only report unique warnings" <| + a = 1 + b = Warning.attach "Foo!" a + c = Warning.attach "Bar!" b + d = Warning.attach "Foo!" b + + result_1 = b + c + Warning.get_all result_1 . map (x-> x.value.to_text) . should_equal ["Bar!", "Foo!"] + + result_2 = b + b + b + Warning.get_all result_2 . length . should_equal 1 + + result_3 = b + b + d + Warning.get_all result_3 . map (x-> x.value.to_text) . should_equal ["Foo!", "Foo!"] + + result_4 = f a 1 + f a 2 + f a 3 + Warning.get_all result_4 . map (x-> x.value.to_text) . should_equal ["Baz!", "Baz!", "Baz!"] main = Test_Suite.run_main spec From ae3f9025e3599506723458f1c417b081060ae627 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 28 Apr 2023 09:18:37 +0200 Subject: [PATCH 11/34] Invoke instance methods for Any overrides (#6441) This change modifies method dispatch for methods that override Any's definitions. When an overrided method is invoked statically we call Any's method to stay consistent. This change primarily addresses the plethora of problems related to `to_text` invocations. It does not attempt to completely modify method dispatch logic. Closes #6300. --- .../Base/0.0.0-dev/src/Data/List.enso | 2 ++ .../Test/0.0.0-dev/src/Extensions.enso | 8 ++--- .../RuntimeVisualizationsTest.scala | 6 ++-- .../node/callable/InvokeMethodNode.java | 35 +++++++++++++++++-- .../callable/resolver/MethodResolverNode.java | 1 - .../enso/interpreter/runtime/data/Type.java | 4 +++ .../org/enso/interpreter/test/PrintTest.java | 4 +-- test/Tests/src/Semantic/Any_Spec.enso | 27 +++++++++++--- .../src/Semantic/Definitions/Any_Types.enso | 20 +++++++++++ .../Names.enso} | 0 test/Tests/src/Semantic/Meta_Spec.enso | 1 + test/Tests/src/Semantic/Names_Spec.enso | 20 +++++------ 12 files changed, 98 insertions(+), 30 deletions(-) create mode 100644 test/Tests/src/Semantic/Definitions/Any_Types.enso rename test/Tests/src/Semantic/{Names/Definitions.enso => Definitions/Names.enso} (100%) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso index ab4cb3ff9df5..0702fd08cc39 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso @@ -561,6 +561,8 @@ type List An error representing that the list is empty. type Empty_Error + Error + ## PRIVATE Pretty prints the empty error. diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso index f007ab9011cf..5c7793bbc78f 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso @@ -23,9 +23,7 @@ from project.Test import Test Any.should_fail_with : Any -> Integer -> Test_Result Any.should_fail_with self matcher frames_to_skip=0 = loc = Meta.get_source_location 1+frames_to_skip - matcher_text = case matcher.to_text of - text : Text -> text - _ -> Meta.meta matcher . to_text + matcher_text = matcher . to_text Test.fail ("Expected an error " + matcher_text + " but no error occurred, instead got: " + self.to_text + " (at " + loc + ").") ## Expect a function to fail with the provided dataflow error. @@ -48,9 +46,7 @@ Error.should_fail_with self matcher frames_to_skip=0 = caught = self.catch if caught == matcher || caught.is_a matcher then Nothing else loc = Meta.get_source_location 2+frames_to_skip - matcher_text = case matcher.to_text of - text : Text -> text - _ -> Meta.meta matcher . to_text + matcher_text = matcher . to_text Test.fail ("Expected error "+matcher_text+", but error " + caught.to_text + " has been returned (at " + loc + ").") ## Asserts that `self` value is equal to the expected value. diff --git a/engine/runtime-with-polyglot/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala b/engine/runtime-with-polyglot/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala index 96f20746b041..9a755b3b1ce0 100644 --- a/engine/runtime-with-polyglot/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala +++ b/engine/runtime-with-polyglot/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala @@ -2168,7 +2168,7 @@ class RuntimeVisualizationsTest val moduleName = "Enso_Test.Test.Main" val metadata = new Metadata - val idMain = metadata.addItem(106, 28) + val idMain = metadata.addItem(106, 34) val code = """import Standard.Base.Data.List @@ -2176,7 +2176,7 @@ class RuntimeVisualizationsTest |import Standard.Base.Error.Error | |main = - | Error.throw List.Empty_Error + | Error.throw List.Empty_Error.Error |""".stripMargin.linesIterator.mkString("\n") val contents = metadata.appendToCode(code) val mainFile = context.writeMain(contents) @@ -2259,7 +2259,7 @@ class RuntimeVisualizationsTest data } val stringified = new String(data) - stringified shouldEqual """{"kind":"Dataflow","message":"The List is empty. (at Main.main(Enso_Test.Test.Main:6:5-32)"}""" + stringified shouldEqual """{"kind":"Dataflow","message":"The List is empty. (at Main.main(Enso_Test.Test.Main:6:5-38)"}""" } it should "run visualisation default preprocessor" in { diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index fa58b5875a5f..db3ca78ae294 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -11,10 +11,12 @@ import com.oracle.truffle.api.library.CachedLibrary; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; +import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.profiles.BranchProfile; import com.oracle.truffle.api.profiles.ConditionProfile; import com.oracle.truffle.api.source.SourceSection; import org.enso.interpreter.node.BaseNode; +import org.enso.interpreter.node.MethodRootNode; import org.enso.interpreter.node.callable.dispatch.InvokeFunctionNode; import org.enso.interpreter.node.callable.resolver.HostMethodCallNode; import org.enso.interpreter.node.callable.resolver.MethodResolverNode; @@ -102,19 +104,46 @@ public void setTailStatus(TailStatus tailStatus) { public abstract Object execute( VirtualFrame frame, State state, UnresolvedSymbol symbol, Object self, Object[] arguments); - @Specialization(guards = {"dispatch.hasType(self)", "!dispatch.hasSpecialDispatch(self)"}) + @Specialization(guards = {"typesLibrary.hasType(self)", "!typesLibrary.hasSpecialDispatch(self)"}) Object doFunctionalDispatch( VirtualFrame frame, State state, UnresolvedSymbol symbol, Object self, Object[] arguments, - @CachedLibrary(limit = "10") TypesLibrary dispatch, + @CachedLibrary(limit = "10") TypesLibrary typesLibrary, @Cached MethodResolverNode methodResolverNode) { - Function function = methodResolverNode.expectNonNull(self, dispatch.getType(self), symbol); + + Type selfTpe = typesLibrary.getType(self); + Function function = methodResolverNode.expectNonNull(self, selfTpe, symbol); + + RootNode where = function.getCallTarget().getRootNode(); + // If both Any and the type where `function` is declared, define `symbol` + // and the method is invoked statically, i.e. type of self is the eigentype, + // then we want to disambiguate method resolution by always resolved to the one in Any. + if (where instanceof MethodRootNode node && typeCanOverride(node, EnsoContext.get(this))) { + Function anyFun = symbol.getScope().lookupMethodDefinition(EnsoContext.get(this).getBuiltins().any(), symbol.getName()); + if (anyFun != null) { + function = anyFun; + } + } return invokeFunctionNode.execute(function, frame, state, arguments); } + private boolean typeCanOverride(MethodRootNode node, EnsoContext ctx) { + Type methodOwnerType = node.getType(); + Builtins builtins = ctx.getBuiltins(); + Type any = builtins.any(); + Type warning = builtins.warning(); + Type panic = builtins.panic(); + return methodOwnerType.isEigenType() + + && builtins.nothing() != methodOwnerType + && any.getEigentype() != methodOwnerType + && panic.getEigentype() != methodOwnerType + && warning.getEigentype() != methodOwnerType; + } + @Specialization Object doDataflowError( VirtualFrame frame, diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/resolver/MethodResolverNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/resolver/MethodResolverNode.java index 1415e7b939f8..38f8bc9a755c 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/resolver/MethodResolverNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/resolver/MethodResolverNode.java @@ -49,7 +49,6 @@ Function resolveCached( } @Specialization(replaces = "resolveCached") - @CompilerDirectives.TruffleBoundary Function resolveUncached(Type self, UnresolvedSymbol symbol) { return symbol.resolveFor(self); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Type.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Type.java index fb9d4b2f580f..8cdef14ae5e5 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Type.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/Type.java @@ -307,6 +307,10 @@ public Type getEigentype() { return eigentype; } + public boolean isEigenType() { + return eigentype == this; + } + public void registerConstructor(AtomConstructor constructor) { constructors.put(constructor.getName(), constructor); } diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java index a8f90659eed0..4af2a5b6d46f 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/PrintTest.java @@ -228,8 +228,6 @@ public void testPrintToTextStaticMethod() throws Exception { IO.println a """; - checkPrint(code, "My_Object.type.to_text[test:6-16]"); - // We would want the following result: - //checkPrint(code, "My_Object"); + checkPrint(code, "My_Object"); } } diff --git a/test/Tests/src/Semantic/Any_Spec.enso b/test/Tests/src/Semantic/Any_Spec.enso index 8220798e7327..46a2b06cc95c 100644 --- a/test/Tests/src/Semantic/Any_Spec.enso +++ b/test/Tests/src/Semantic/Any_Spec.enso @@ -1,10 +1,8 @@ from Standard.Base import all -from Standard.Test import Test +from Standard.Test import Test, Test_Suite import Standard.Test.Extensions - -type My_Type - Value a +from project.Semantic.Definitions.Any_Types import all spec = Test.group "Any.map_nothing" <| @@ -39,3 +37,24 @@ spec = (1 != 2) . should_be_true (1 != 1) . should_be_false + Test.group "Any's methods" <| + Test.specify "should not be overridable when called statically" <| + My_Type.Value 33 . x . should_equal "Any:(My_Type.Value 33)" + With_X.Value 44 . x . should_equal "With_X:(With_X.Value 44)" + With_Y.Value 44 . x . should_equal "With_Y:With_Y(44)" + Any.x . to_text . should_equal "Any.type.x[Any_Types.enso:6-26]" + My_Type.x . should_equal "Any:My_Type" + With_X.x . to_text . should_equal "Any:With_X" + With_X.y . to_text . should_equal "With_X.type.y[Any_Types.enso:12-32]" + With_Y.x . to_text . should_equal "Any:With_Y" + With_Y.y . to_text . should_equal "With_Y.type.y[Any_Types.enso:18-38]" + With_X.to_text . to_text . should_equal "With_X" + With_Y.to_text . to_text . should_equal "With_Y" + Any.x self=With_X . should_equal "Any:With_X" + Any.x self=With_Y . should_equal "Any:With_Y" + Any.x (My_Type.Value 22) . should_equal "Any:(My_Type.Value 22)" + Any.x (With_X.Value 22) . should_equal "Any:(With_X.Value 22)" + Any.x (With_Y.Value 22) . should_equal "Any:With_Y(22)" + Date.to_display_text . should_equal "Date" + +main = Test_Suite.run_main spec diff --git a/test/Tests/src/Semantic/Definitions/Any_Types.enso b/test/Tests/src/Semantic/Definitions/Any_Types.enso new file mode 100644 index 000000000000..16f3ba8d9344 --- /dev/null +++ b/test/Tests/src/Semantic/Definitions/Any_Types.enso @@ -0,0 +1,20 @@ +from Standard.Base import all + +type My_Type + Value a + +Any.x self = "Any:" + self.to_text + +type With_X + Value b + + x self = "With_X:" + self.to_text + y self = "With_X:" + self.to_text + +type With_Y + Value b + + x self = "With_Y:" + self.to_text + y self = "With_Y:" + self.to_text + + to_text self = "With_Y("+self.b.to_text+")" diff --git a/test/Tests/src/Semantic/Names/Definitions.enso b/test/Tests/src/Semantic/Definitions/Names.enso similarity index 100% rename from test/Tests/src/Semantic/Names/Definitions.enso rename to test/Tests/src/Semantic/Definitions/Names.enso diff --git a/test/Tests/src/Semantic/Meta_Spec.enso b/test/Tests/src/Semantic/Meta_Spec.enso index 2779a0bfa608..b6e8c8f44d27 100644 --- a/test/Tests/src/Semantic/Meta_Spec.enso +++ b/test/Tests/src/Semantic/Meta_Spec.enso @@ -370,4 +370,5 @@ spec = (.is_nothing) . is_a Function . should_equal True Meta.type_of (_.is_nothing) . should_equal Function Meta.type_of (.is_nothing) . should_equal Function + main = Test_Suite.run_main spec diff --git a/test/Tests/src/Semantic/Names_Spec.enso b/test/Tests/src/Semantic/Names_Spec.enso index 524aec7a1b39..bd512318838d 100644 --- a/test/Tests/src/Semantic/Names_Spec.enso +++ b/test/Tests/src/Semantic/Names_Spec.enso @@ -1,13 +1,13 @@ from Standard.Base import all -from project.Semantic.Names.Definitions import another_method, another_constant, method_with_local_vars, Bar_Data, Bar -import project.Semantic.Names.Definitions +from project.Semantic.Definitions.Names import another_method, another_constant, method_with_local_vars, Bar_Data, Bar +import project.Semantic.Definitions.Names from Standard.Test import Test import Standard.Test.Extensions -Definitions.Foo.my_method self = case self of - Definitions.Foo.Value x y z -> x * y * z +Names.Foo.my_method self = case self of + Names.Foo.Value x y z -> x * y * z get_foo module = module.Foo @@ -18,14 +18,14 @@ add_one (x = 0) = x + 1 spec = Test.group "Qualified Names" <| Test.specify "should allow to call constructors in a qualified manner" <| - Definitions.Foo.Value 1 2 3 . sum . should_equal 6 + Names.Foo.Value 1 2 3 . sum . should_equal 6 Test.specify "should allow pattern matching in a qualified manner" <| - v = Definitions.Foo.Value 1 2 3 + v = Names.Foo.Value 1 2 3 res = case v of - Definitions.Foo.Value a b c -> a + b + c + Names.Foo.Value a b c -> a + b + c res.should_equal 6 Test.specify "should allow defining methods on qualified names" <| - v = Definitions.Foo.Value 2 3 5 + v = Names.Foo.Value 2 3 5 v.my_method.should_equal 30 Test.group "Lowercase Methods" <| Test.specify "should allow calling methods without a target" <| @@ -38,8 +38,8 @@ spec = another_method 10 . should_equal 10 another_constant . should_equal 10 Test.specify "should allow calling methods with fully qualified module name" <| - (Definitions.another_method 10).should_equal 10 - v = Definitions.another_method + (Names.another_method 10).should_equal 10 + v = Names.another_method v 10 . should_equal 10 Test.specify "should be resolved correctly in the presence of variables with the same name" <| method_with_local_vars 1 . should_equal 13 From 8cd28fc8bdf0fe72218eabad60f58fa57c540900 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Fri, 28 Apr 2023 14:03:43 +0200 Subject: [PATCH 12/34] Welcome Pavel to engine team (#6462) --- .github/CODEOWNERS | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 09e1cda57ad0..f87f6add305c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -15,7 +15,7 @@ Cargo.toml /lib/rust/ensogl/ @MichaelMauderer @wdanilo @farmaazon @kazcw /lib/rust/parser/ @kazcw @wdanilo @jaroslavtulach /integration-test/ @MichaelMauderer @wdanilo @farmaazon @kazcw -/tools/build-performance/ @kazcw @mwu-tow @wdanilo +/tools/build-performance/ @kazcw @mwu-tow @wdanilo @Akirathan # Scala Libraries /lib/scala/ @4e6 @jaroslavtulach @hubertp @@ -28,9 +28,9 @@ Cargo.toml # Engine (old) # This section should be removed once the engine moves to /app/engine -/build.sbt @4e6 @jaroslavtulach @hubertp +/build.sbt @4e6 @jaroslavtulach @hubertp @Akirathan /distribution/ @4e6 @jdunkerley @radeusgd @GregoryTravis -/engine/ @4e6 @jaroslavtulach @hubertp +/engine/ @4e6 @jaroslavtulach @hubertp @Akirathan /project/ @4e6 @jaroslavtulach @hubertp /test/ @jdunkerley @radeusgd @GregoryTravis /tools/ @4e6 @jaroslavtulach @radeusgd From 0dc9f3742cc52ac091b0338672fe02039931e097 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 28 Apr 2023 14:35:11 +0200 Subject: [PATCH 13/34] Unbreak CI after outdated merge (#6466) --- test/Table_Tests/src/Database/SQLite_Spec.enso | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index ead9985511b1..2129dfb7b3c0 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -180,7 +180,7 @@ spec = Test.specify 'should not duplicate warnings' <| c = Database.connect (SQLite In_Memory) t0 = Table.new [["X", ["a", "bc", "def"]]] - t1 = c.upload_table "Tabela" t0 + t1 = t0.create_database_table c "Tabela" t2 = t1.cast "X" (Value_Type.Char size=1) Warning.get_all t2 . length . should_equal 1 From fbfdba2b4a152fc1a407a2a5c010eab71dc39f42 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Fri, 28 Apr 2023 16:29:30 +0200 Subject: [PATCH 14/34] Don't attempt to resolve hostname for git init (#6463) During initialization JGit may attempt to resolve hostname. On some systems this can take more than desired triggering timeouts. This change does two things: - sets the default committer for changes, lack of which probably triggered the check - sets the default hostname to `localhost` (we don't care), in case something else in JGit still wants to resolve hostname Closes #6447. # Important Notes I wasn't able to reproduce this so relying on @mwu-tow since apparently he can repro it reliably. --- .../vcsmanager/EmptyUserConfigReader.scala | 2 +- .../scala/org/enso/languageserver/vcsmanager/Git.scala | 4 +++- .../org/enso/languageserver/vcsmanager/GitSpec.scala | 2 +- .../languageserver/websocket/json/VcsManagerTest.scala | 9 +++++++-- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/EmptyUserConfigReader.scala b/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/EmptyUserConfigReader.scala index 8a9703c2466e..b85f4265cd7c 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/EmptyUserConfigReader.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/EmptyUserConfigReader.scala @@ -18,7 +18,7 @@ final class EmptyUserConfigReader extends SystemReader { /** @inheritdoc */ override def getHostname: String = - proxy.getHostname + "localhost" /** @inheritdoc */ override def getenv(variable: String): String = { diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/Git.scala b/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/Git.scala index a9b2c0159860..6daeb4c06106 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/Git.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/vcsmanager/Git.scala @@ -113,6 +113,7 @@ private class Git(ensoDataDirectory: Option[Path], asyncInit: Boolean) .setAll(true) .setMessage("Initial commit") .setAuthor(AuthorName, AuthorEmail) + .setCommitter(AuthorName, AuthorEmail) .setNoVerify(true) .call() () @@ -166,6 +167,7 @@ private class Git(ensoDataDirectory: Option[Path], asyncInit: Boolean) .commit() .setMessage(commitName) .setAuthor(AuthorName, AuthorEmail) + .setCommitter(AuthorName, AuthorEmail) .call() RepoCommit(revCommit.getName(), revCommit.getShortMessage()) }.mapError(errorHandling) @@ -345,7 +347,7 @@ private class Git(ensoDataDirectory: Option[Path], asyncInit: Boolean) object Git { private val HeadRef = "HEAD" private val AuthorName = "Enso VCS" - private val AuthorEmail = "vcs@enso.io" + private val AuthorEmail = "vcs@enso.org" private class RepoExists extends Exception diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/vcsmanager/GitSpec.scala b/engine/language-server/src/test/scala/org/enso/languageserver/vcsmanager/GitSpec.scala index 08b9af3b16ef..294d6a39b598 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/vcsmanager/GitSpec.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/vcsmanager/GitSpec.scala @@ -444,7 +444,7 @@ class GitSpec .setAllowEmpty(true) .setAll(true) .setMessage("Initial commit") - .setAuthor("Enso VCS", "vcs@enso.io") + .setAuthor("Enso VCS", "vcs@enso.org") .call() } } diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala index 4db4bb780913..164c90164f0b 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/VcsManagerTest.scala @@ -1460,7 +1460,8 @@ class VcsManagerTest extends BaseServerTest with RetrySpec with FlakySpec { .setAllowEmpty(true) .setAll(true) .setMessage("Initial commit") - .setAuthor("Enso VCS", "vcs@enso.io") + .setAuthor("Enso VCS", "vcs@enso.org") + .setCommitter("Enso VCS", "vcs@enso.org") .call() test(client) } @@ -1491,7 +1492,11 @@ class VcsManagerTest extends BaseServerTest with RetrySpec with FlakySpec { def commit(root: File, msg: String): RevCommit = { val jgit = new JGit(repository(root.toPath)) - jgit.commit.setMessage(msg).setAuthor("Enso VCS", "vcs@enso.io").call() + jgit.commit + .setMessage(msg) + .setAuthor("Enso VCS", "vcs@enso.org") + .setCommitter("Enso VCS", "vcs@enso.org") + .call() } def add(root: File, paths: Path*): Boolean = { From 34e1c28d6b6b7904c237d1d8f107503e47535629 Mon Sep 17 00:00:00 2001 From: "Stijn (\"stain\") Seghers" Date: Fri, 28 Apr 2023 17:12:06 +0200 Subject: [PATCH 15/34] Hide profile button behind a feature flag (#6430) Closes #6413. --- app/gui/view/graph-editor/src/lib.rs | 4 +++- app/ide-desktop/lib/content-config/src/config.json | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index d0fbbcc41c0a..c3c20e61c1d3 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -1891,7 +1891,9 @@ impl GraphEditorModel { self.breadcrumbs.set_x(x_offset); self.scene().add_child(&self.tooltip); - self.add_child(&self.profiling_button); + if ARGS.groups.feature_preview.options.profiling.value { + self.add_child(&self.profiling_button); + } self.add_child(&*self.add_node_button); self } diff --git a/app/ide-desktop/lib/content-config/src/config.json b/app/ide-desktop/lib/content-config/src/config.json index 891b9cd3f89d..23c042eec492 100644 --- a/app/ide-desktop/lib/content-config/src/config.json +++ b/app/ide-desktop/lib/content-config/src/config.json @@ -123,6 +123,11 @@ "newDashboard": { "value": false, "description": "Determines whether the new dashboard with cloud integration is enabled." + }, + "profiling": { + "value": false, + "description": "Enable the button to profile workflows.", + "primary": false } } }, From efe904cd9f0c44c112a254229d94522802c246e5 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Fri, 28 Apr 2023 17:32:13 +0200 Subject: [PATCH 16/34] Introducing @BuiltinMethod.needsFrame and InlineableNode (#6442) Fixes #6416 by introducing `InlineableNode`. It runs fast even on GraalVM CE, fixes ([forever broken](https://github.com/enso-org/enso/pull/6442#discussion_r1178782635)) `Debug.eval` with `<|` and [removes discouraged subclassing](https://github.com/enso-org/enso/pull/6442#discussion_r1178778968) of `DirectCallNode`. Introduces `@BuiltinMethod.needsFrame` - something that was requested by #6293. Just in this PR the attribute is optional - its implicit value continues to be derived from `VirtualFrame` presence/absence in the builtin method argument list. A lot of methods had to be modified to pass the `VirtualFrame` parameter along to propagate it where needed. --- build.sbt | 1 + .../dsl/test/InliningBuiltinsInNode.java | 1 - .../dsl/test/InliningBuiltinsNeedNotNode.java | 15 ++++ .../dsl/test/InliningBuiltinsNeedsNode.java | 12 +++ .../dsl/test/InliningBuiltinsOutNode.java | 1 - .../dsl/test/InliningBuiltinsTest.java | 83 ++++++++++++++++--- .../semantic/ArrayProxyBenchmarks.java | 8 +- .../semantic/CurriedFunctionBenchmarks.java | 2 +- .../benchmarks/semantic/EqualsBenchmarks.java | 2 +- .../semantic/IfVsCaseBenchmarks.java | 7 +- .../benchmarks/semantic/ListBenchmarks.java | 2 +- .../NestedPatternCompilationBenchmarks.java | 2 +- .../bench/benchmarks/semantic/SrcUtil.java | 5 ++ .../benchmarks/semantic/StringBenchmarks.java | 10 ++- .../semantic/TypePatternBenchmarks.java | 11 ++- .../benchmarks/semantic/VectorBenchmarks.java | 2 +- .../semantic/WarningBenchmarks.java | 2 +- .../enso/interpreter/node/InlineableNode.java | 53 ++++++++++++ .../interpreter/node/InlineableRootNode.java | 56 ------------- .../node/callable/ExecuteCallNode.java | 67 ++++++++++++--- .../callable/IndirectInvokeCallableNode.java | 2 +- .../callable/IndirectInvokeMethodNode.java | 3 +- .../node/callable/InvokeCallableNode.java | 6 +- .../node/callable/InvokeMethodNode.java | 2 +- .../callable/argument/ArgumentSorterNode.java | 10 ++- .../argument/IndirectArgumentSorterNode.java | 10 ++- .../callable/dispatch/CallOptimiserNode.java | 8 +- .../node/callable/dispatch/CurryNode.java | 10 +-- .../callable/dispatch/IndirectCurryNode.java | 10 ++- .../dispatch/IndirectInvokeFunctionNode.java | 1 + .../callable/dispatch/InvokeFunctionNode.java | 3 +- .../dispatch/LoopingCallOptimiserNode.java | 6 +- .../dispatch/SimpleCallOptimiserNode.java | 12 ++- .../node/callable/thunk/ForceNode.java | 2 +- .../callable/thunk/ThunkExecutorNode.java | 11 ++- .../expression/builtin/BuiltinRootNode.java | 68 +-------------- .../node/expression/builtin/bool/AndNode.java | 9 +- .../builtin/bool/IfThenElseNode.java | 16 +++- .../expression/builtin/bool/IfThenNode.java | 11 ++- .../node/expression/builtin/bool/OrNode.java | 9 +- .../builtin/error/CatchPanicNode.java | 2 +- .../builtin/function/ApplicationOperator.java | 3 +- .../builtin/meta/GetAnnotationNode.java | 4 +- .../builtin/ordering/SortVectorNode.java | 6 +- .../builtin/resource/BracketNode.java | 2 +- .../builtin/runtime/NoInlineNode.java | 10 ++- .../RuntimeWithDisabledContextNode.java | 9 +- .../RuntimeWithEnabledContextNode.java | 9 +- .../builtin/special/RunThreadNode.java | 8 +- .../builtin/state/RunStateNode.java | 15 +++- .../thread/WithInterruptHandlerNode.java | 12 ++- .../node/expression/debug/EvalNode.java | 4 +- .../org/enso/interpreter/runtime/Module.java | 1 + .../runtime/data/hash/HashMapGetNode.java | 16 ++-- .../interpreter/test/semantic/EvalTest.scala | 14 ++++ .../org/enso/interpreter/dsl/Builtin.java | 14 ++++ .../enso/interpreter/dsl/BuiltinMethod.java | 17 +++- .../interpreter/dsl/BuiltinsProcessor.java | 19 ++++- .../enso/interpreter/dsl/MethodProcessor.java | 33 +++++--- .../builtins/MethodNodeClassGenerator.java | 6 +- .../dsl/model/MethodDefinition.java | 8 +- 61 files changed, 492 insertions(+), 271 deletions(-) create mode 100644 engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedNotNode.java create mode 100644 engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedsNode.java create mode 100644 engine/runtime/src/main/java/org/enso/interpreter/node/InlineableNode.java delete mode 100644 engine/runtime/src/main/java/org/enso/interpreter/node/InlineableRootNode.java diff --git a/build.sbt b/build.sbt index a9f11104fc83..63bb5f2cd625 100644 --- a/build.sbt +++ b/build.sbt @@ -247,6 +247,7 @@ lazy val enso = (project in file(".")) .settings(version := "0.1") .aggregate( `interpreter-dsl`, + `interpreter-dsl-test`, `json-rpc-server-test`, `json-rpc-server`, `language-server`, diff --git a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsInNode.java b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsInNode.java index b37ca62a7d18..8b44106e8752 100644 --- a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsInNode.java +++ b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsInNode.java @@ -9,5 +9,4 @@ final class InliningBuiltinsInNode extends Node { long execute(long a, long b) { return a + b; } - } diff --git a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedNotNode.java b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedNotNode.java new file mode 100644 index 000000000000..00a9995eab94 --- /dev/null +++ b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedNotNode.java @@ -0,0 +1,15 @@ +package org.enso.interpreter.dsl.test; + +import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.nodes.Node; +import org.enso.interpreter.dsl.BuiltinMethod; +import static org.junit.Assert.assertNotNull; + +@BuiltinMethod(type = "InliningBuiltins", name = "need_not", inlineable = true) +final class InliningBuiltinsNeedNotNode extends Node { + + long execute(VirtualFrame frame, long a, long b) { + assertNotNull("Some frame is still provided", frame); + return a + b; + } +} diff --git a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedsNode.java b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedsNode.java new file mode 100644 index 000000000000..751d4b3eb316 --- /dev/null +++ b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsNeedsNode.java @@ -0,0 +1,12 @@ +package org.enso.interpreter.dsl.test; + +import com.oracle.truffle.api.nodes.Node; +import org.enso.interpreter.dsl.BuiltinMethod; + +@BuiltinMethod(type = "InliningBuiltins", name = "needs", inlineable = false) +final class InliningBuiltinsNeedsNode extends Node { + + long execute(long a, long b) { + return a + b; + } +} diff --git a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsOutNode.java b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsOutNode.java index 4d3f95006288..7c7133569463 100644 --- a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsOutNode.java +++ b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsOutNode.java @@ -12,5 +12,4 @@ long execute(VirtualFrame frame, long a, long b) { Assert.assertNotNull("VirtualFrame is always provided", frame); return a + b; } - } diff --git a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsTest.java b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsTest.java index 37f3d7f50030..beb7fe92b02c 100644 --- a/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsTest.java +++ b/engine/interpreter-dsl-test/src/test/java/org/enso/interpreter/dsl/test/InliningBuiltinsTest.java @@ -1,10 +1,13 @@ package org.enso.interpreter.dsl.test; -import org.enso.interpreter.node.InlineableRootNode; +import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.nodes.DirectCallNode; +import com.oracle.truffle.api.nodes.RootNode; import org.enso.interpreter.runtime.callable.function.Function; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.junit.Test; +import org.enso.interpreter.node.InlineableNode; public class InliningBuiltinsTest { @@ -12,13 +15,15 @@ public class InliningBuiltinsTest { @Test public void executeWithoutVirtualFrame() { var fn = InliningBuiltinsInMethodGen.makeFunction(null); - if (fn.getCallTarget().getRootNode() instanceof InlineableRootNode root) { - var call = root.createDirectCallNode(); - var clazz = call.getClass().getSuperclass(); - assertEquals("InlinedCallNode", clazz.getSimpleName()); - assertEquals("BuiltinRootNode", clazz.getEnclosingClass().getSimpleName()); + if (fn.getCallTarget().getRootNode() instanceof InlineableNode.Root root) { + var call = root.createInlineableNode(); + var clazz = call.getClass(); + assertEquals("InlineableNode", clazz.getSuperclass().getSimpleName()); + assertEquals("org.enso.interpreter.node.InlineableNode$Root", clazz.getEnclosingClass().getInterfaces()[0].getName()); - var res = call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 5L, 7L })); + var res = WithFrame.invoke((frame) -> { + return call.call(frame, Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 5L, 7L })); + }); assertEquals(12L, res); } else { fail("It is inlineable: " + fn.getCallTarget().getRootNode()); @@ -29,15 +34,73 @@ public void executeWithoutVirtualFrame() { @Test public void executeWithVirtualFrame() { var fn = InliningBuiltinsOutMethodGen.makeFunction(null); - if (fn.getCallTarget().getRootNode() instanceof InlineableRootNode root) { - var call = root.createDirectCallNode(); + if (fn.getCallTarget().getRootNode() instanceof InlineableNode.Root root) { + fail("The node isn't inlineable: " + fn.getCallTarget().getRootNode()); + } else { + var call = DirectCallNode.create(fn.getCallTarget()); var clazz = call.getClass().getSuperclass(); assertEquals("com.oracle.truffle.api.nodes.DirectCallNode", clazz.getName()); - var res = call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 3L, 9L })); + var res = WithFrame.invoke((frame) -> { + return call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 3L, 9L })); + }); + assertEquals(12L, res); + } + } + + /** @see InliningBuiltinsNeedsNode#execute(long, long) */ + @Test + public void executeWhenNeedsVirtualFrame() { + var fn = InliningBuiltinsNeedsMethodGen.makeFunction(null); + if (fn.getCallTarget().getRootNode() instanceof InlineableNode.Root root) { + fail("The node isn't inlineable: " + fn.getCallTarget().getRootNode()); + } else { + var call = DirectCallNode.create(fn.getCallTarget()); + var clazz = call.getClass().getSuperclass(); + assertEquals("com.oracle.truffle.api.nodes.DirectCallNode", clazz.getName()); + + var res = WithFrame.invoke((frame) -> { + return call.call(Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 3L, 9L })); + }); + assertEquals(12L, res); + } + } + + /** @see InliningBuiltinsNeedNotNode#execute(com.oracle.truffle.api.frame.VirtualFrame, long, long) */ + @Test + public void executeWhenNeedNotVirtualFrame() { + var fn = InliningBuiltinsNeedNotMethodGen.makeFunction(null); + if (fn.getCallTarget().getRootNode() instanceof InlineableNode.Root root) { + var call = root.createInlineableNode(); + var clazz = call.getClass(); + assertEquals("InlineableNode", clazz.getSuperclass().getSimpleName()); + assertEquals("org.enso.interpreter.node.InlineableNode$Root", clazz.getEnclosingClass().getInterfaces()[0].getName()); + + var res = WithFrame.invoke((frame) -> { + return call.call(frame, Function.ArgumentsHelper.buildArguments(null, null, new Object[] { null, 5L, 7L })); + }); assertEquals(12L, res); } else { fail("It is inlineable: " + fn.getCallTarget().getRootNode()); } } + + private static final class WithFrame extends RootNode { + private final java.util.function.Function fn; + + private WithFrame(java.util.function.Function fn) { + super(null); + this.fn = fn; + } + + @Override + public Object execute(VirtualFrame frame) { + return fn.apply(frame); + } + + @SuppressWarnings("unchecked") + static T invoke(java.util.function.Function fn, Object... args) { + return (T) new WithFrame<>(fn).getCallTarget().call(args); + } + } } diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ArrayProxyBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ArrayProxyBenchmarks.java index cd7c170894eb..a74fc1b8ff2a 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ArrayProxyBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ArrayProxyBenchmarks.java @@ -30,7 +30,7 @@ public class ArrayProxyBenchmarks { private final long length = 100000; @Setup - public void initializeBenchmark(BenchmarkParams params) { + public void initializeBenchmark(BenchmarkParams params) throws Exception { Engine eng = Engine.newBuilder() .allowExperimentalOptions(true) @@ -59,13 +59,15 @@ Array_Proxy.new n (i -> 3 + 5*i) make_delegating_vector n = Vector.from_polyglot_array (make_delegating_proxy n) """; - var module = ctx.eval("enso", code); + var benchmarkName = SrcUtil.findName(params); + var src = SrcUtil.source(benchmarkName, code); + var module = ctx.eval(src); this.self = module.invokeMember("get_associated_type"); Function getMethod = (name) -> module.invokeMember("get_method", self, name); String test_builder; - switch (params.getBenchmark().replaceFirst(".*\\.", "")) { + switch (benchmarkName) { case "sumOverVector": test_builder = "make_vector"; break; diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/CurriedFunctionBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/CurriedFunctionBenchmarks.java index 0ba34c00a9e6..bdb9c4824f2d 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/CurriedFunctionBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/CurriedFunctionBenchmarks.java @@ -42,7 +42,7 @@ public void initializeBenchmark(BenchmarkParams params) throws Exception { Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + var benchmarkName = SrcUtil.findName(params); var code = """ avg fn len = sum acc i = if i == len then acc else diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/EqualsBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/EqualsBenchmarks.java index 2021799ca46a..10eb026eb964 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/EqualsBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/EqualsBenchmarks.java @@ -68,7 +68,7 @@ public void initializeBenchmark(BenchmarkParams params) throws Exception { Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + var benchmarkName = SrcUtil.findName(params); var codeBuilder = new StringBuilder(""" import Standard.Base.Data.Range.Extensions diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/IfVsCaseBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/IfVsCaseBenchmarks.java index c1848d13b87b..ac938e7d825b 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/IfVsCaseBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/IfVsCaseBenchmarks.java @@ -133,11 +133,8 @@ public void initializeBench(BenchmarkParams params) throws IOException { """; - var file = File.createTempFile("if_case", ".enso"); - try (var w = new FileWriter(file)) { - w.write(code); - } - var src = Source.newBuilder("enso", file).build(); + var benchmarkName = SrcUtil.findName(params); + var src = SrcUtil.source(benchmarkName, code); Value module = ctx.eval(src); ifBench3 = Objects.requireNonNull(module.invokeMember(Module.EVAL_EXPRESSION, "if_bench_3")); caseBench3 = Objects.requireNonNull(module.invokeMember(Module.EVAL_EXPRESSION, "case_bench_3")); diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ListBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ListBenchmarks.java index ef8b748ab812..c9db1bc92550 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ListBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/ListBenchmarks.java @@ -46,7 +46,7 @@ public void initializeBenchmark(BenchmarkParams params) throws Exception { Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + var benchmarkName = SrcUtil.findName(params); var code = """ from Standard.Base.Data.List.List import Cons, Nil import Standard.Base.IO diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/NestedPatternCompilationBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/NestedPatternCompilationBenchmarks.java index 7d2eca3b2e4a..0cb7b76ecb64 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/NestedPatternCompilationBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/NestedPatternCompilationBenchmarks.java @@ -46,7 +46,7 @@ public void initializeBenchmark(BenchmarkParams params) throws Exception { Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + benchmarkName = SrcUtil.findName(params); code = """ type List Cons a b diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/SrcUtil.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/SrcUtil.java index 95f6d1c5805c..3b2e24446a26 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/SrcUtil.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/SrcUtil.java @@ -4,11 +4,16 @@ import java.io.FileWriter; import java.io.IOException; import org.graalvm.polyglot.Source; +import org.openjdk.jmh.infra.BenchmarkParams; final class SrcUtil { private SrcUtil() { } + static String findName(BenchmarkParams params) { + return params.getBenchmark().replaceFirst(".*\\.", ""); + } + static Source source(String benchmarkName, String code) throws IOException { var d = new File(new File(new File("."), "target"), "bench-data"); d.mkdirs(); diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/StringBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/StringBenchmarks.java index 385de3bfb5d5..77c2b3fa30a2 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/StringBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/StringBenchmarks.java @@ -32,7 +32,7 @@ public class StringBenchmarks { private Value allLength; @Setup - public void initializeBenchmark(BenchmarkParams params) { + public void initializeBenchmark(BenchmarkParams params) throws Exception { var ctx = Context.newBuilder() .allowExperimentalOptions(true) .allowIO(true) @@ -42,7 +42,8 @@ public void initializeBenchmark(BenchmarkParams params) { "enso.languageHomeOverride", Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var module = ctx.eval("enso", """ + + var code =""" from Standard.Base import all all_length v = v.fold 0 (sum -> str -> sum + str.length) @@ -51,7 +52,10 @@ public void initializeBenchmark(BenchmarkParams params) { s = "Long string".repeat rep v = Vector.new len (_ -> s) v - """); + """; + var benchmarkName = SrcUtil.findName(params); + var src = SrcUtil.source(benchmarkName, code); + var module = ctx.eval(src); this.self = module.invokeMember("get_associated_type"); Function getMethod = (name) -> module.invokeMember("get_method", self, name); diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/TypePatternBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/TypePatternBenchmarks.java index 34244a328e7c..0dc4e2a38d23 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/TypePatternBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/TypePatternBenchmarks.java @@ -25,7 +25,7 @@ public class TypePatternBenchmarks { private Value self; @Setup - public void initializeBenchmark(BenchmarkParams params) { + public void initializeBenchmark(BenchmarkParams params) throws Exception { var ctx = Context.newBuilder() .allowExperimentalOptions(true) .allowIO(true) @@ -35,7 +35,7 @@ public void initializeBenchmark(BenchmarkParams params) { "enso.languageHomeOverride", Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var module = ctx.eval("enso", """ + var code =""" from Standard.Base import Integer, Vector, Any, Decimal avg arr = @@ -60,14 +60,17 @@ public void initializeBenchmark(BenchmarkParams params) { match_dec = v -> case v of n : Decimal -> n + 1 - """); + """; + var benchmarkName = SrcUtil.findName(params); + var src = SrcUtil.source(benchmarkName, code); + var module = ctx.eval(src); this.self = module.invokeMember("get_associated_type"); Function getMethod = (name) -> module.invokeMember("get_method", self, name); var length = 100; this.vec = getMethod.apply("gen_vec").execute(self, length, 1.1); - switch (params.getBenchmark().replaceFirst(".*\\.", "")) { + switch (SrcUtil.findName(params)) { case "matchOverAny": this.patternMatch = getMethod.apply("match_any"); break; diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/VectorBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/VectorBenchmarks.java index 2213245e1857..ae62b8b0b163 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/VectorBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/VectorBenchmarks.java @@ -44,7 +44,7 @@ public void initializeBenchmark(BenchmarkParams params) throws Exception { Paths.get("../../distribution/component").toFile().getAbsolutePath() ).build(); - var benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + var benchmarkName = SrcUtil.findName(params); var code = """ import Standard.Base.Data.Vector.Vector import Standard.Base.Data.Array_Proxy.Array_Proxy diff --git a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java index da08d9e081b6..58e42fb2d6d1 100644 --- a/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java +++ b/engine/runtime/src/bench/java/org/enso/interpreter/bench/benchmarks/semantic/WarningBenchmarks.java @@ -46,7 +46,7 @@ public class WarningBenchmarks extends TestBase { public void initializeBench(BenchmarkParams params) throws IOException { ctx = createDefaultContext(); - benchmarkName = params.getBenchmark().replaceFirst(".*\\.", ""); + benchmarkName = SrcUtil.findName(params); var code = """ from Standard.Base import all diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableNode.java new file mode 100644 index 000000000000..3b77df82cf85 --- /dev/null +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableNode.java @@ -0,0 +1,53 @@ +package org.enso.interpreter.node; + +import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.nodes.DirectCallNode; +import com.oracle.truffle.api.nodes.Node; +import com.oracle.truffle.api.nodes.RootNode; +import org.enso.interpreter.node.callable.ExecuteCallNode; + +/** + * More effective {@link DirectCallNode} alternative. Supports more aggressive inlining needed by + * {@link ExecuteCallNode}. + */ +public abstract class InlineableNode extends Node { + /** + * Invokes the computation represented by the node. + * + * @param frame current frame of the caller + * @param arguments arguments for the functionality + * @return result of the computation + */ + public abstract Object call(VirtualFrame frame, Object[] arguments); + + /** + * Special interface that allows various {@link RootNode} subclasses to provide more effective + * implementation of {@link DirectCallNode} alternative. Used by for example by {@code + * BuiltinRootNode}. + */ + public interface Root { + /** + * Provides access to {@link RootNode}. Usually the object shall inherit from {link RootNode} as + * well as implement the {@link InlineableNode} interface. This method thus usually returns + * {@code this}. + * + * @return {@code this} types as {link RootNode} + */ + public RootNode getRootNode(); + + /** + * Name of the {@link RootNode}. + * + * @return root node name + */ + public String getName(); + + /** + * Override to provide more effective implementation of {@link DirectCallNode} alternative. + * Suited more for Enso aggressive inlining. + * + * @return a node to call the associated {@link RootNode} - may return {@code null} + */ + public InlineableNode createInlineableNode(); + } +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableRootNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableRootNode.java deleted file mode 100644 index d660aad64bfe..000000000000 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/InlineableRootNode.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.enso.interpreter.node; - -import com.oracle.truffle.api.CallTarget; -import com.oracle.truffle.api.RootCallTarget; -import com.oracle.truffle.api.nodes.DirectCallNode; -import com.oracle.truffle.api.nodes.RootNode; - -/** - * Special interface that allows various {@link RootNode} subclasses to provide - * more effective implementation of {@link DirectCallNode}. Used by for example - * by {@code BuiltinRootNode}. - */ -public interface InlineableRootNode { - /** - * Provides access to {@link RootNode}. Usually the object shall inherit from - * {link RootNode} as well as implement the {@link InlineableRootNode} - * interface. This method thus usually returns {@code this}. - * - * @return {@code this} types as {link RootNode} - */ - public RootNode getRootNode(); - - /** - * Name of the {@link RootNode}. - * - * @return root node name - */ - public String getName(); - - /** - * Override to provide more effective implementation of {@link DirectCallNode} - * suited more for Enso aggressive inlining. - * - * @return a node to {@link DirectCallNode#call(java.lang.Object...) call} the - * associated {@link RootNode} - may return {@code null} - */ - public DirectCallNode createDirectCallNode(); - - /** - * * Obtain a {@link DirectCallNode} for given {@link CallTarget}.Either - * delegates to {@link #createDirectCallNode} or uses regular - * {@link DirectCallNode#create(com.oracle.truffle.api.CallTarget)} method. - * Use for example by {@code ExecuteCallNode}. - * - * @param target call target with regular or - * {@link InlineableRootNode} {@link RootCallTarget#getRootNode()} - * @return instance of {@link DirectCallNode} to use to invoke the - * {@link RootNode#execute(com.oracle.truffle.api.frame.VirtualFrame)}. - */ - public static DirectCallNode create(RootCallTarget target) { - if (target.getRootNode() instanceof InlineableRootNode inRoot && inRoot.createDirectCallNode() instanceof DirectCallNode node) { - return node; - } - return DirectCallNode.create(target); - } -} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/ExecuteCallNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/ExecuteCallNode.java index c734cbd3a5c1..6606f73a5e6a 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/ExecuteCallNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/ExecuteCallNode.java @@ -4,18 +4,21 @@ import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.GenerateUncached; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.DirectCallNode; import com.oracle.truffle.api.nodes.IndirectCallNode; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; -import org.enso.interpreter.node.InlineableRootNode; import org.enso.interpreter.runtime.callable.CallerInfo; import org.enso.interpreter.runtime.callable.function.Function; +import org.enso.interpreter.node.InlineableNode; /** - * This node is responsible for optimising function calls. - * - *

Where possible, it will make the call as a direct call, with potential for inlining. + * This node is responsible for optimising function calls. Where possible, it will handle the call via: + *

    + *
  • {@link InlineableNode} to force inlining
  • + *
  • {@link DirectCallNode} with potential for inlining
  • + *
*/ @NodeInfo(shortName = "ExecCall", description = "Optimises function calls") @GenerateUncached @@ -32,11 +35,39 @@ public static ExecuteCallNode build() { return ExecuteCallNodeGen.create(); } + /** + * Inlines the function if its root node implements {@link InlineableNode.Root}. + * + * @param frame current frame + * @param function the function to execute + * @param callerInfo the caller info to pass to the function + * @param state the current state value + * @param arguments the arguments passed to {@code function} in the expected positional order + * @param cachedTarget the cached call target for {@code function} + * @param callNode the cached call node for {@code cachedTarget} + * @return the result of executing {@code function} on {@code arguments} + */ + @Specialization(guards = { + "function.getCallTarget() == cachedTarget", + "callNode != null" + }) + protected Object callInlineable( + VirtualFrame frame, + Function function, + CallerInfo callerInfo, + Object state, + Object[] arguments, + @Cached("function.getCallTarget()") RootCallTarget cachedTarget, + @Cached("createInlineableNode(cachedTarget)") InlineableNode callNode) { + var args = Function.ArgumentsHelper.buildArguments(function, callerInfo, state, arguments); + return callNode.call(frame, args); + } + /** * Calls the function directly. * *

This specialisation comes into play where the call target for the provided function is - * already cached. THis means that the call can be made quickly. + * already cached. This means that the call can be made quickly. * * @param function the function to execute * @param callerInfo the caller info to pass to the function @@ -46,20 +77,29 @@ public static ExecuteCallNode build() { * @param callNode the cached call node for {@code cachedTarget} * @return the result of executing {@code function} on {@code arguments} */ - @Specialization(guards = "function.getCallTarget() == cachedTarget") + @Specialization(guards = { + "function.getCallTarget() == cachedTarget", + }) protected Object callDirect( Function function, CallerInfo callerInfo, Object state, Object[] arguments, @Cached("function.getCallTarget()") RootCallTarget cachedTarget, - @Cached("createCallNode(cachedTarget)") DirectCallNode callNode) { - return callNode.call( - Function.ArgumentsHelper.buildArguments(function, callerInfo, state, arguments)); + @Cached("createDirectCallNode(cachedTarget)") DirectCallNode callNode) { + var args = Function.ArgumentsHelper.buildArguments(function, callerInfo, state, arguments); + return callNode.call(args); + } + + static InlineableNode createInlineableNode(RootCallTarget t) { + if (t.getRootNode() instanceof InlineableNode.Root inlineNodeProvider) { + return inlineNodeProvider.createInlineableNode(); + } + return null; } - static DirectCallNode createCallNode(RootCallTarget t) { - return InlineableRootNode.create(t); + static DirectCallNode createDirectCallNode(RootCallTarget t) { + return DirectCallNode.create(t); } /** @@ -75,7 +115,7 @@ static DirectCallNode createCallNode(RootCallTarget t) { * @param callNode the cached call node for making indirect calls * @return the result of executing {@code function} on {@code arguments} */ - @Specialization(replaces = "callDirect") + @Specialization(replaces = { "callDirect", "callInlineable" }) protected Object callIndirect( Function function, CallerInfo callerInfo, @@ -90,6 +130,7 @@ protected Object callIndirect( /** * Executes the function call. * + * @param frame the caller's frame * @param function the function to execute * @param callerInfo the caller info to pass to the function * @param state the state value to pass to the function @@ -97,5 +138,5 @@ protected Object callIndirect( * @return the result of executing {@code function} on {@code arguments} */ public abstract Object executeCall( - Function function, CallerInfo callerInfo, Object state, Object[] arguments); + VirtualFrame frame, Function function, CallerInfo callerInfo, Object state, Object[] arguments); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeCallableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeCallableNode.java index 8fd86f6487a7..969d41d4db74 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeCallableNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeCallableNode.java @@ -182,7 +182,7 @@ public Object invokeDynamicSymbol( if (canApplyThis) { Object self = arguments[thisArgumentPosition]; if (argumentsExecutionMode.shouldExecute()) { - self = thisExecutor.executeThunk(self, state, BaseNode.TailStatus.NOT_TAIL); + self = thisExecutor.executeThunk(callerFrame, self, state, BaseNode.TailStatus.NOT_TAIL); arguments[thisArgumentPosition] = self; } return invokeMethodNode.execute( diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java index 79fafca1515c..5083da87b578 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/IndirectInvokeMethodNode.java @@ -178,7 +178,8 @@ Object doPolyglot( @Cached HostMethodCallNode hostMethodCallNode) { Object[] args = new Object[arguments.length - 1]; for (int i = 0; i < arguments.length - 1; i++) { - var r = argExecutor.executeThunk(arguments[i + 1], state, BaseNode.TailStatus.NOT_TAIL); + var r = + argExecutor.executeThunk(frame, arguments[i + 1], state, BaseNode.TailStatus.NOT_TAIL); if (r instanceof DataflowError) { return r; } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java index f12789694ad5..aa6c951380d5 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java @@ -215,8 +215,8 @@ public Object invokeConversion( lock.unlock(); } } - selfArgument = thisExecutor.executeThunk(selfArgument, state, TailStatus.NOT_TAIL); - thatArgument = thatExecutor.executeThunk(thatArgument, state, TailStatus.NOT_TAIL); + selfArgument = thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL); + thatArgument = thatExecutor.executeThunk(callerFrame, thatArgument, state, TailStatus.NOT_TAIL); arguments[thisArgumentPosition] = selfArgument; arguments[thatArgumentPosition] = thatArgument; @@ -248,7 +248,7 @@ public Object invokeDynamicSymbol( lock.unlock(); } } - selfArgument = thisExecutor.executeThunk(selfArgument, state, TailStatus.NOT_TAIL); + selfArgument = thisExecutor.executeThunk(callerFrame, selfArgument, state, TailStatus.NOT_TAIL); arguments[thisArgumentPosition] = selfArgument; } return invokeMethodNode.execute(callerFrame, state, symbol, selfArgument, arguments); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index db3ca78ae294..670882debfa3 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -336,7 +336,7 @@ Object doPolyglot( boolean anyWarnings = false; ArrayRope accumulatedWarnings = new ArrayRope<>(); for (int i = 0; i < argExecutors.length; i++) { - var r = argExecutors[i].executeThunk(arguments[i + 1], state, TailStatus.NOT_TAIL); + var r = argExecutors[i].executeThunk(frame, arguments[i + 1], state, TailStatus.NOT_TAIL); if (r instanceof DataflowError) { profiles[i].enter(); return r; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/ArgumentSorterNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/ArgumentSorterNode.java index cbad564b3a77..2b6fa645ee2e 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/ArgumentSorterNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/ArgumentSorterNode.java @@ -1,6 +1,7 @@ package org.enso.interpreter.node.callable.argument; import com.oracle.truffle.api.CompilerDirectives; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.NodeInfo; import org.enso.interpreter.node.BaseNode; @@ -66,7 +67,7 @@ private void initArgumentExecutors() { } @ExplodeLoop - private void executeArguments(Object[] arguments, State state) { + private void executeArguments(VirtualFrame frame, Object[] arguments, State state) { if (executors == null) { CompilerDirectives.transferToInterpreterAndInvalidate(); Lock lock = getLock(); @@ -81,7 +82,7 @@ private void executeArguments(Object[] arguments, State state) { } for (int i = 0; i < mapping.getArgumentShouldExecute().length; i++) { if (executors[i] != null) { - arguments[i] = executors[i].executeThunk(arguments[i], state, TailStatus.NOT_TAIL); + arguments[i] = executors[i].executeThunk(frame, arguments[i], state, TailStatus.NOT_TAIL); } } } @@ -94,9 +95,10 @@ private void executeArguments(Object[] arguments, State state) { * @param arguments the arguments to reorder * @return the provided {@code arguments} in the order expected by the cached {@link Function} */ - public MappedArguments execute(Function function, State state, Object[] arguments) { + public MappedArguments execute( + VirtualFrame frame, Function function, State state, Object[] arguments) { if (argumentsExecutionMode.shouldExecute()) { - executeArguments(arguments, state); + executeArguments(frame, arguments, state); } Object[] mappedAppliedArguments = prepareArguments( diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/IndirectArgumentSorterNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/IndirectArgumentSorterNode.java index 5f0e1ee2ebc3..dd8255172796 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/IndirectArgumentSorterNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/argument/IndirectArgumentSorterNode.java @@ -3,6 +3,7 @@ import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.GenerateUncached; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; @@ -35,6 +36,7 @@ public static IndirectArgumentSorterNode build() { @ExplodeLoop private void executeArguments( + VirtualFrame frame, ArgumentMapping mapping, Object[] arguments, State state, @@ -42,7 +44,8 @@ private void executeArguments( for (int i = 0; i < mapping.getArgumentShouldExecute().length; i++) { if (mapping.getArgumentShouldExecute()[i]) { arguments[i] = - thunkExecutorNode.executeThunk(arguments[i], state, BaseNode.TailStatus.NOT_TAIL); + thunkExecutorNode.executeThunk( + frame, arguments[i], state, BaseNode.TailStatus.NOT_TAIL); } } } @@ -50,6 +53,7 @@ private void executeArguments( /** * Reorders and executes the provided arguments in a way suitable for the called function. * + * @param frame current frame * @param preApplicationSchema the function schema before applying the arguments * @param mapping the pre-computed argument mapping for the function * @param argumentsExecutionMode whether arguments should be executed or not @@ -59,6 +63,7 @@ private void executeArguments( * @return the provided {@code arguments} in the order expected by the cached {@link Function} */ public abstract ArgumentSorterNode.MappedArguments execute( + VirtualFrame frame, FunctionSchema preApplicationSchema, ArgumentMapping mapping, InvokeCallableNode.ArgumentsExecutionMode argumentsExecutionMode, @@ -68,6 +73,7 @@ public abstract ArgumentSorterNode.MappedArguments execute( @Specialization ArgumentSorterNode.MappedArguments doExecute( + VirtualFrame frame, FunctionSchema preApplicationSchema, ArgumentMapping mapping, InvokeCallableNode.ArgumentsExecutionMode argumentsExecutionMode, @@ -77,7 +83,7 @@ ArgumentSorterNode.MappedArguments doExecute( @Cached ThunkExecutorNode thunkExecutorNode) { FunctionSchema postApplicationSchema = mapping.getPostApplicationSchema(); if (argumentsExecutionMode.shouldExecute()) { - executeArguments(mapping, arguments, state, thunkExecutorNode); + executeArguments(frame, mapping, arguments, state, thunkExecutorNode); } Object[] mappedAppliedArguments = ArgumentSorterNode.prepareArguments( diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CallOptimiserNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CallOptimiserNode.java index ada785524bb6..3d0422ea134a 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CallOptimiserNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CallOptimiserNode.java @@ -1,5 +1,6 @@ package org.enso.interpreter.node.callable.dispatch; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; import org.enso.interpreter.runtime.callable.CallerInfo; @@ -27,6 +28,7 @@ public static CallOptimiserNode build() { /** * Calls the provided {@code callable} using the provided {@code arguments}. * + * @param frame frame of the caller * @param callable the callable to execute * @param callerInfo the caller info to pass to the function * @param state the state to pass to the function @@ -34,5 +36,9 @@ public static CallOptimiserNode build() { * @return the result of executing {@code callable} using {@code arguments} */ public abstract Object executeDispatch( - Function callable, CallerInfo callerInfo, State state, Object[] arguments); + VirtualFrame frame, + Function callable, + CallerInfo callerInfo, + State state, + Object[] arguments); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CurryNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CurryNode.java index 4a2b022bb5ee..8803831db1c9 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CurryNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/CurryNode.java @@ -105,7 +105,7 @@ public Object execute( Object[] oversaturatedArguments) { if (appliesFully) { if (!postApplicationSchema.hasOversaturatedArgs()) { - var value = doCall(function, callerInfo, state, arguments); + var value = doCall(frame, function, callerInfo, state, arguments); if (defaultsExecutionMode.isExecute() && (value instanceof Function || (value instanceof AtomConstructor cons && cons.getConstructorFunction().getSchema().isFullyApplied()))) { @@ -133,7 +133,7 @@ public Object execute( return value; } } else { - var evaluatedVal = loopingCall.executeDispatch(function, callerInfo, state, arguments); + var evaluatedVal = loopingCall.executeDispatch(frame, function, callerInfo, state, arguments); return this.oversaturatedCallableNode.execute( evaluatedVal, frame, state, oversaturatedArguments); @@ -150,11 +150,11 @@ public Object execute( } private Object doCall( - Function function, CallerInfo callerInfo, State state, Object[] arguments) { + VirtualFrame frame, Function function, CallerInfo callerInfo, State state, Object[] arguments) { return switch (getTailStatus()) { - case TAIL_DIRECT -> directCall.executeCall(function, callerInfo, state, arguments); + case TAIL_DIRECT -> directCall.executeCall(frame, function, callerInfo, state, arguments); case TAIL_LOOP -> throw new TailCallException(function, callerInfo, arguments); - default -> loopingCall.executeDispatch(function, callerInfo, state, arguments); + default -> loopingCall.executeDispatch(frame, function, callerInfo, state, arguments); }; } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectCurryNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectCurryNode.java index 839e92e50ce7..489139633460 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectCurryNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectCurryNode.java @@ -4,6 +4,7 @@ import com.oracle.truffle.api.dsl.GenerateUncached; import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.frame.MaterializedFrame; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; import org.enso.interpreter.node.BaseNode; @@ -74,7 +75,7 @@ Object doCurry( if (appliesFully) { if (!postApplicationSchema.hasOversaturatedArgs()) { var value = - doCall(function, callerInfo, state, arguments, isTail, directCall, loopingCall); + doCall(frame, function, callerInfo, state, arguments, isTail, directCall, loopingCall); if (defaultsExecutionMode.isExecute() && (value instanceof Function || (value instanceof AtomConstructor cons && cons.getConstructorFunction().getSchema().isFullyApplied()))) { @@ -91,7 +92,7 @@ Object doCurry( return value; } } else { - var evaluatedVal = loopingCall.executeDispatch(function, callerInfo, state, arguments); + var evaluatedVal = loopingCall.executeDispatch(frame, function, callerInfo, state, arguments); return oversaturatedCallableNode.execute( evaluatedVal, @@ -114,6 +115,7 @@ Object doCurry( } private Object doCall( + VirtualFrame frame, Function function, CallerInfo callerInfo, State state, @@ -123,11 +125,11 @@ private Object doCall( CallOptimiserNode loopingCall) { switch (isTail) { case TAIL_DIRECT: - return directCall.executeCall(function, callerInfo, state, arguments); + return directCall.executeCall(frame, function, callerInfo, state, arguments); case TAIL_LOOP: throw new TailCallException(function, callerInfo, arguments); default: - return loopingCall.executeDispatch(function, callerInfo, state, arguments); + return loopingCall.executeDispatch(frame, function, callerInfo, state, arguments); } } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectInvokeFunctionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectInvokeFunctionNode.java index d3fb301ac277..c72642a09657 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectInvokeFunctionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/IndirectInvokeFunctionNode.java @@ -66,6 +66,7 @@ Object invokeUncached( ArgumentSorterNode.MappedArguments mappedArguments = mappingNode.execute( + callerFrame, function.getSchema(), argumentMapping, argumentsExecutionMode, diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/InvokeFunctionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/InvokeFunctionNode.java index 151c32cccbee..cd5acd0f17a0 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/InvokeFunctionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/InvokeFunctionNode.java @@ -82,7 +82,7 @@ Object invokeCached( "build(argumentMapping, getDefaultsExecutionMode(), getArgumentsExecutionMode(), getTailStatus())") CurryNode curryNode) { ArgumentSorterNode.MappedArguments mappedArguments = - mappingNode.execute(function, state, arguments); + mappingNode.execute(callerFrame, function, state, arguments); CallerInfo callerInfo = null; if (cachedSchema.getCallerFrameAccess().shouldFrameBePassed()) { callerInfo = captureCallerInfoNode.execute(callerFrame.materialize()); @@ -121,6 +121,7 @@ Object invokeUncached( ArgumentSorterNode.MappedArguments mappedArguments = mappingNode.execute( + callerFrame, function.getSchema(), argumentMapping, getArgumentsExecutionMode(), diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/LoopingCallOptimiserNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/LoopingCallOptimiserNode.java index 4fd7ccb5bc51..b78a94eb3a63 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/LoopingCallOptimiserNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/LoopingCallOptimiserNode.java @@ -7,6 +7,7 @@ import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.frame.FrameSlotKind; +import com.oracle.truffle.api.frame.MaterializedFrame; import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.LoopNode; import com.oracle.truffle.api.nodes.Node; @@ -72,6 +73,7 @@ public Object dispatch( @Specialization(replaces = "dispatch") @CompilerDirectives.TruffleBoundary public Object uncachedDispatch( + MaterializedFrame frame, Function function, CallerInfo callerInfo, State state, @@ -79,7 +81,7 @@ public Object uncachedDispatch( @Cached ExecuteCallNode executeCallNode) { while (true) { try { - return executeCallNode.executeCall(function, callerInfo, state, arguments); + return executeCallNode.executeCall(frame, function, callerInfo, state, arguments); } catch (TailCallException e) { function = e.getFunction(); callerInfo = e.getCallerInfo(); @@ -211,7 +213,7 @@ public boolean executeRepeating(VirtualFrame frame) { Object[] arguments = getNextArgs(frame); CallerInfo callerInfo = getCallerInfo(frame); frame.setObject( - resultSlotIdx, dispatchNode.executeCall(function, callerInfo, state, arguments)); + resultSlotIdx, dispatchNode.executeCall(frame, function, callerInfo, state, arguments)); return false; } catch (TailCallException e) { setNextCall(frame, e.getFunction(), e.getCallerInfo(), e.getArguments()); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/SimpleCallOptimiserNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/SimpleCallOptimiserNode.java index 97f0ca21d0c7..877ff2bced9d 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/SimpleCallOptimiserNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/dispatch/SimpleCallOptimiserNode.java @@ -1,6 +1,7 @@ package org.enso.interpreter.node.callable.dispatch; import com.oracle.truffle.api.CompilerDirectives; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.NodeInfo; import org.enso.interpreter.node.callable.ExecuteCallNode; import org.enso.interpreter.node.callable.ExecuteCallNodeGen; @@ -44,9 +45,13 @@ public static SimpleCallOptimiserNode build() { */ @Override public Object executeDispatch( - Function function, CallerInfo callerInfo, State state, Object[] arguments) { + VirtualFrame frame, + Function function, + CallerInfo callerInfo, + State state, + Object[] arguments) { try { - return executeCallNode.executeCall(function, callerInfo, state, arguments); + return executeCallNode.executeCall(frame, function, callerInfo, state, arguments); } catch (TailCallException e) { if (next == null) { CompilerDirectives.transferToInterpreterAndInvalidate(); @@ -60,7 +65,8 @@ public Object executeDispatch( lock.unlock(); } } - return next.executeDispatch(e.getFunction(), e.getCallerInfo(), state, e.getArguments()); + return next.executeDispatch( + frame, e.getFunction(), e.getCallerInfo(), state, e.getArguments()); } } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ForceNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ForceNode.java index b39319df53af..e51285a56aff 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ForceNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ForceNode.java @@ -30,6 +30,6 @@ public static ForceNode build(ExpressionNode target) { Object passToExecutorNode( VirtualFrame frame, Object thunk, @Cached("build()") ThunkExecutorNode thunkExecutorNode) { State state = Function.ArgumentsHelper.getState(frame.getArguments()); - return thunkExecutorNode.executeThunk(thunk, state, getTailStatus()); + return thunkExecutorNode.executeThunk(frame, thunk, state, getTailStatus()); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ThunkExecutorNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ThunkExecutorNode.java index 8552a3b3fe27..114e9fda306b 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ThunkExecutorNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/thunk/ThunkExecutorNode.java @@ -2,6 +2,7 @@ import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.dsl.*; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.DirectCallNode; import com.oracle.truffle.api.nodes.IndirectCallNode; import com.oracle.truffle.api.nodes.Node; @@ -35,12 +36,14 @@ public static ThunkExecutorNode build() { /** * Forces the thunk to its resulting value. * + * @param frame the current frame * @param thunk the thunk to force * @param state the state to pass to the thunk * @param isTail is the execution happening in a tail-call position * @return the return value of this thunk */ - public abstract Object executeThunk(Object thunk, State state, BaseNode.TailStatus isTail); + public abstract Object executeThunk( + VirtualFrame frame, Object thunk, State state, BaseNode.TailStatus isTail); boolean sameCallTarget(DirectCallNode callNode, Function function) { return function.getCallTarget() == callNode.getCallTarget(); @@ -50,6 +53,7 @@ boolean sameCallTarget(DirectCallNode callNode, Function function) { guards = {"function.isThunk()", "sameCallTarget(callNode, function)"}, limit = Constants.CacheSizes.THUNK_EXECUTOR_NODE) Object doCached( + VirtualFrame frame, Function function, State state, BaseNode.TailStatus isTail, @@ -63,13 +67,14 @@ Object doCached( return callNode.call(Function.ArgumentsHelper.buildArguments(function, state)); } catch (TailCallException e) { return loopingCallOptimiserNode.executeDispatch( - e.getFunction(), e.getCallerInfo(), state, e.getArguments()); + frame, e.getFunction(), e.getCallerInfo(), state, e.getArguments()); } } } @Specialization(replaces = "doCached", guards = "function.isThunk()") Object doUncached( + VirtualFrame frame, Function function, State state, BaseNode.TailStatus isTail, @@ -84,7 +89,7 @@ Object doUncached( function.getCallTarget(), Function.ArgumentsHelper.buildArguments(function, state)); } catch (TailCallException e) { return loopingCallOptimiserNode.executeDispatch( - e.getFunction(), e.getCallerInfo(), state, e.getArguments()); + frame, e.getFunction(), e.getCallerInfo(), state, e.getArguments()); } } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/BuiltinRootNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/BuiltinRootNode.java index 52d8b83d01c5..04d674f1fe34 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/BuiltinRootNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/BuiltinRootNode.java @@ -1,18 +1,14 @@ package org.enso.interpreter.node.expression.builtin; -import com.oracle.truffle.api.CallTarget; import org.enso.interpreter.EnsoLanguage; import com.oracle.truffle.api.frame.VirtualFrame; -import com.oracle.truffle.api.nodes.DirectCallNode; -import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; import com.oracle.truffle.api.nodes.RootNode; -import org.enso.interpreter.node.InlineableRootNode; /** Root node for use by all the builtin functions. */ @NodeInfo(shortName = "BuiltinRoot", description = "Root node for builtin functions.") -public abstract class BuiltinRootNode extends RootNode implements InlineableRootNode { +public abstract class BuiltinRootNode extends RootNode { protected BuiltinRootNode(EnsoLanguage language) { super(language); } @@ -33,66 +29,4 @@ protected BuiltinRootNode(EnsoLanguage language) { */ @Override public abstract String getName(); - - /** - * Factory method creating a {@link DirectCallNode} to invoke this builtin.Defaults to standard - * {@link DirectCallNode#create(com.oracle.truffle.api.CallTarget)} implementation. Subclasses may - * override this with the help of {@link InlinedCallNode}. - * - * @return new node to use to call this builtin - */ - public DirectCallNode createDirectCallNode() { - var callNode = DirectCallNode.create(cloneUninitialized().getCallTarget()); - callNode.forceInlining(); - return callNode; - } - - /** - * Helper class allowing better implementation of {@link #createDirectCallNode}. Subclass, pass in - * {@code extra} and {@code body} and override {@code call} method to do what has to be done. - * - * @param extra data to keep in the node - * @param node to delegate to from {@link #call(java.lang.Object...)} method - */ - protected abstract static class InlinedCallNode extends DirectCallNode { - protected final E extra; - @Child protected N body; - - protected InlinedCallNode(E extra, N body) { - super(null); - this.extra = extra; - this.body = body; - } - - @Override - public abstract Object call(Object... arguments); - - @Override - public final boolean isInlinable() { - return true; - } - - @Override - public final boolean isInliningForced() { - return true; - } - - @Override - public final void forceInlining() {} - - @Override - public final boolean isCallTargetCloningAllowed() { - return false; - } - - @Override - public final boolean cloneCallTarget() { - return false; - } - - @Override - public final CallTarget getClonedCallTarget() { - return getRootNode().getCallTarget(); - } - } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/AndNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/AndNode.java index 99313580807a..ca26e8a21e9d 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/AndNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/AndNode.java @@ -2,6 +2,7 @@ import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.ConditionProfile; import org.enso.interpreter.dsl.BuiltinMethod; @@ -13,7 +14,8 @@ @BuiltinMethod( type = "Boolean", name = "&&", - description = "Computes the logical conjunction of two booleans") + description = "Computes the logical conjunction of two booleans", + inlineable = true) public abstract class AndNode extends Node { private final ConditionProfile conditionProfile = ConditionProfile.createCountingProfile(); @@ -22,10 +24,11 @@ public static AndNode build() { return AndNodeGen.create(); } - abstract Object execute(State state, boolean self, @Suspend Object that); + abstract Object execute(VirtualFrame frame, State state, boolean self, @Suspend Object that); @Specialization Object executeBool( + VirtualFrame frame, State state, boolean self, Object that, @@ -33,6 +36,6 @@ Object executeBool( if (conditionProfile.profile(!self)) { return false; } - return rhsThunkExecutorNode.executeThunk(that, state, BaseNode.TailStatus.TAIL_DIRECT); + return rhsThunkExecutorNode.executeThunk(frame, that, state, BaseNode.TailStatus.TAIL_DIRECT); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenElseNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenElseNode.java index 6f29ff4edddb..95151c9901bd 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenElseNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenElseNode.java @@ -1,5 +1,6 @@ package org.enso.interpreter.node.expression.builtin.bool; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.ConditionProfile; import org.enso.interpreter.dsl.BuiltinMethod; @@ -11,18 +12,25 @@ @BuiltinMethod( type = "Boolean", name = "if_then_else", - description = "Performs the standard if-then-else control flow operation.") + description = "Performs the standard if-then-else control flow operation.", + inlineable = true) public final class IfThenElseNode extends Node { private @Child ThunkExecutorNode leftThunkExecutorNode = ThunkExecutorNode.build(); private @Child ThunkExecutorNode rightThunkExecutorNode = ThunkExecutorNode.build(); private final ConditionProfile condProfile = ConditionProfile.createCountingProfile(); public Object execute( - State state, boolean self, @Suspend Object if_true, @Suspend Object if_false) { + VirtualFrame frame, + State state, + boolean self, + @Suspend Object if_true, + @Suspend Object if_false) { if (condProfile.profile(self)) { - return leftThunkExecutorNode.executeThunk(if_true, state, BaseNode.TailStatus.TAIL_DIRECT); + return leftThunkExecutorNode.executeThunk( + frame, if_true, state, BaseNode.TailStatus.TAIL_DIRECT); } else { - return rightThunkExecutorNode.executeThunk(if_false, state, BaseNode.TailStatus.TAIL_DIRECT); + return rightThunkExecutorNode.executeThunk( + frame, if_false, state, BaseNode.TailStatus.TAIL_DIRECT); } } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenNode.java index e88986973fe7..868dde221304 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/IfThenNode.java @@ -1,6 +1,7 @@ package org.enso.interpreter.node.expression.builtin.bool; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.ConditionProfile; import org.enso.interpreter.dsl.BuiltinMethod; @@ -13,7 +14,8 @@ @BuiltinMethod( type = "Boolean", name = "if_then", - description = "Performs the standard if-then control flow operation.") + description = "Performs the standard if-then control flow operation.", + inlineable = true) public abstract class IfThenNode extends Node { private @Child ThunkExecutorNode leftThunkExecutorNode = ThunkExecutorNode.build(); private final ConditionProfile condProfile = ConditionProfile.createCountingProfile(); @@ -22,12 +24,13 @@ static IfThenNode build() { return IfThenNodeGen.create(); } - abstract Object execute(State state, boolean self, @Suspend Object if_true); + abstract Object execute(VirtualFrame frame, State state, boolean self, @Suspend Object if_true); @Specialization - Object doExecute(State state, boolean self, Object if_true) { + Object doExecute(VirtualFrame frame, State state, boolean self, Object if_true) { if (condProfile.profile(self)) { - return leftThunkExecutorNode.executeThunk(if_true, state, BaseNode.TailStatus.TAIL_DIRECT); + return leftThunkExecutorNode.executeThunk( + frame, if_true, state, BaseNode.TailStatus.TAIL_DIRECT); } else { return EnsoContext.get(this).getNothing(); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/OrNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/OrNode.java index ba6b77c533e9..01d459af3f9b 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/OrNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/bool/OrNode.java @@ -2,6 +2,7 @@ import com.oracle.truffle.api.dsl.Cached; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.profiles.ConditionProfile; import org.enso.interpreter.dsl.BuiltinMethod; @@ -13,7 +14,8 @@ @BuiltinMethod( type = "Boolean", name = "||", - description = "Computes the logical disjunction of two booleans") + description = "Computes the logical disjunction of two booleans", + inlineable = true) public abstract class OrNode extends Node { private final ConditionProfile conditionProfile = ConditionProfile.createCountingProfile(); @@ -22,10 +24,11 @@ public static OrNode build() { return OrNodeGen.create(); } - abstract Object execute(State state, boolean self, @Suspend Object that); + abstract Object execute(VirtualFrame frame, State state, boolean self, @Suspend Object that); @Specialization Object executeBool( + VirtualFrame frame, State state, boolean self, Object that, @@ -33,6 +36,6 @@ Object executeBool( if (conditionProfile.profile(self)) { return true; } - return rhsThunkExecutorNode.executeThunk(that, state, BaseNode.TailStatus.TAIL_DIRECT); + return rhsThunkExecutorNode.executeThunk(frame, that, state, BaseNode.TailStatus.TAIL_DIRECT); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/CatchPanicNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/CatchPanicNode.java index ac7a37d8e042..28822e1bbfd5 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/CatchPanicNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/CatchPanicNode.java @@ -60,7 +60,7 @@ Object doExecute( @Cached BranchProfile otherExceptionBranchProfile, @CachedLibrary(limit = "3") InteropLibrary interop) { try { - return thunkExecutorNode.executeThunk(action, state, BaseNode.TailStatus.TAIL_DIRECT); + return thunkExecutorNode.executeThunk(frame, action, state, BaseNode.TailStatus.TAIL_DIRECT); } catch (PanicException e) { panicBranchProfile.enter(); Object payload = e.getPayload(); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/function/ApplicationOperator.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/function/ApplicationOperator.java index ad5d39967321..4b67ea9bde92 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/function/ApplicationOperator.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/function/ApplicationOperator.java @@ -12,7 +12,8 @@ @BuiltinMethod( type = "Function", name = "<|", - description = "Takes a function and an argument and applies the function to the argument.") + description = "Takes a function and an argument and applies the function to the argument.", + inlineable = true) public class ApplicationOperator extends Node { private @Child InvokeCallableNode invokeCallableNode; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/GetAnnotationNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/GetAnnotationNode.java index edea86d0de32..7b6c2459e3b9 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/GetAnnotationNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/meta/GetAnnotationNode.java @@ -48,7 +48,7 @@ Object doExecute( if (annotation != null) { Function thunk = Function.thunk(annotation.getExpression().getCallTarget(), frame.materialize()); - return thunkExecutorNode.executeThunk(thunk, state, getTailStatus()); + return thunkExecutorNode.executeThunk(frame, thunk, state, getTailStatus()); } } AtomConstructor constructor = getAtomConstructor(targetType, methodName); @@ -59,7 +59,7 @@ Object doExecute( if (annotation != null) { Function thunk = Function.thunk(annotation.getExpression().getCallTarget(), frame.materialize()); - return thunkExecutorNode.executeThunk(thunk, state, getTailStatus()); + return thunkExecutorNode.executeThunk(frame, thunk, state, getTailStatus()); } } return EnsoContext.get(this).getNothing(); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/ordering/SortVectorNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/ordering/SortVectorNode.java index d23ee524e04c..49dfbe35c76b 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/ordering/SortVectorNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/ordering/SortVectorNode.java @@ -806,8 +806,8 @@ public int compare(Object x, Object y) { Object yConverted; if (hasCustomOnFunc) { // onFunc cannot have `self` argument, we assume it has just one argument. - xConverted = callNode.executeDispatch(onFunc.get(x), null, state, new Object[]{x}); - yConverted = callNode.executeDispatch(onFunc.get(y), null, state, new Object[]{y}); + xConverted = callNode.executeDispatch(null, onFunc.get(x), null, state, new Object[]{x}); + yConverted = callNode.executeDispatch(null, onFunc.get(y), null, state, new Object[]{y}); } else { xConverted = x; yConverted = y; @@ -818,7 +818,7 @@ public int compare(Object x, Object y) { } else { args = new Object[] {xConverted, yConverted}; } - Object res = callNode.executeDispatch(compareFunc.get(xConverted), null, state, args); + Object res = callNode.executeDispatch(null, compareFunc.get(xConverted), null, state, args); if (res == less) { return ascending ? -1 : 1; } else if (res == equal) { diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/resource/BracketNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/resource/BracketNode.java index a6e48e4ba3b2..d0f49b22da00 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/resource/BracketNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/resource/BracketNode.java @@ -65,7 +65,7 @@ Object doBracket( Object action, @Cached BranchProfile initializationFailedWithDataflowErrorProfile) { Object resource = - invokeConstructorNode.executeThunk(constructor, state, BaseNode.TailStatus.NOT_TAIL); + invokeConstructorNode.executeThunk(frame, constructor, state, BaseNode.TailStatus.NOT_TAIL); if (TypesGen.isDataflowError(resource)) { initializationFailedWithDataflowErrorProfile.enter(); return resource; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/NoInlineNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/NoInlineNode.java index 6f96e8040e83..ddbe2c5ca864 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/NoInlineNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/NoInlineNode.java @@ -1,6 +1,8 @@ package org.enso.interpreter.node.expression.builtin.runtime; import com.oracle.truffle.api.CompilerDirectives; +import com.oracle.truffle.api.frame.MaterializedFrame; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import org.enso.interpreter.dsl.BuiltinMethod; import org.enso.interpreter.dsl.Suspend; @@ -16,8 +18,12 @@ public class NoInlineNode extends Node { private @Child ThunkExecutorNode thunkExecutorNode = ThunkExecutorNode.build(); + Object execute(VirtualFrame frame, State state, @Suspend Object action) { + return executeImpl(frame.materialize(), state, action); + } + @CompilerDirectives.TruffleBoundary - Object execute(State state, @Suspend Object action) { - return thunkExecutorNode.executeThunk(action, state, BaseNode.TailStatus.NOT_TAIL); + private Object executeImpl(MaterializedFrame frame, State state, @Suspend Object action) { + return thunkExecutorNode.executeThunk(frame, action, state, BaseNode.TailStatus.NOT_TAIL); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithDisabledContextNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithDisabledContextNode.java index fe01515010f1..51601b3196dc 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithDisabledContextNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithDisabledContextNode.java @@ -1,5 +1,6 @@ package org.enso.interpreter.node.expression.builtin.runtime; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import org.enso.interpreter.dsl.BuiltinMethod; import org.enso.interpreter.dsl.Suspend; @@ -13,14 +14,16 @@ type = "Runtime", name = "with_disabled_context_builtin", description = "Disallows context in the specified scope.", - autoRegister = false) + autoRegister = false, + inlineable = true) public class RuntimeWithDisabledContextNode extends Node { private @Child ThunkExecutorNode thunkExecutorNode = ThunkExecutorNode.build(); private @Child ExpectStringNode expectStringNode = ExpectStringNode.build(); - Object execute(State state, Atom context, Object env_name, @Suspend Object action) { + Object execute( + VirtualFrame frame, State state, Atom context, Object env_name, @Suspend Object action) { String envName = expectStringNode.execute(env_name); return thunkExecutorNode.executeThunk( - action, state.withContextDisabledIn(context, envName), BaseNode.TailStatus.NOT_TAIL); + frame, action, state.withContextDisabledIn(context, envName), BaseNode.TailStatus.NOT_TAIL); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithEnabledContextNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithEnabledContextNode.java index 1d28351c93a1..aaac3a094080 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithEnabledContextNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/RuntimeWithEnabledContextNode.java @@ -1,5 +1,6 @@ package org.enso.interpreter.node.expression.builtin.runtime; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import org.enso.interpreter.dsl.BuiltinMethod; import org.enso.interpreter.dsl.Suspend; @@ -13,14 +14,16 @@ type = "Runtime", name = "with_enabled_context_builtin", description = "Allows context in the specified scope.", - autoRegister = false) + autoRegister = false, + inlineable = true) public class RuntimeWithEnabledContextNode extends Node { private @Child ThunkExecutorNode thunkExecutorNode = ThunkExecutorNode.build(); private @Child ExpectStringNode expectStringNode = ExpectStringNode.build(); - Object execute(State state, Atom context, Object env_name, @Suspend Object action) { + Object execute( + VirtualFrame frame, State state, Atom context, Object env_name, @Suspend Object action) { String envName = expectStringNode.execute(env_name); return thunkExecutorNode.executeThunk( - action, state.withContextEnabledIn(context, envName), BaseNode.TailStatus.NOT_TAIL); + frame, action, state.withContextEnabledIn(context, envName), BaseNode.TailStatus.NOT_TAIL); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/special/RunThreadNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/special/RunThreadNode.java index bd7480eca5c7..fc107706282c 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/special/RunThreadNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/special/RunThreadNode.java @@ -2,6 +2,8 @@ import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.MaterializedFrame; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import org.enso.interpreter.dsl.BuiltinMethod; import org.enso.interpreter.dsl.Suspend; @@ -16,11 +18,11 @@ static RunThreadNode build() { return RunThreadNodeGen.create(); } - abstract Thread execute(State state, @Suspend Object self); + abstract Thread execute(VirtualFrame frame, State state, @Suspend Object self); @CompilerDirectives.TruffleBoundary @Specialization - Thread doExecute(State state, Object self) { + Thread doExecute(MaterializedFrame frame, State state, Object self) { EnsoContext ctx = EnsoContext.get(this); Thread thread = ctx.getEnvironment() @@ -29,7 +31,7 @@ Thread doExecute(State state, Object self) { Object p = ctx.getThreadManager().enter(); try { ThunkExecutorNodeGen.getUncached() - .executeThunk(self, state, BaseNode.TailStatus.NOT_TAIL); + .executeThunk(frame, self, state, BaseNode.TailStatus.NOT_TAIL); } finally { ctx.getThreadManager().leave(p); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/state/RunStateNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/state/RunStateNode.java index 8904085b0dbc..61854b3e3dee 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/state/RunStateNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/state/RunStateNode.java @@ -3,6 +3,7 @@ import com.oracle.truffle.api.dsl.Bind; import com.oracle.truffle.api.dsl.ReportPolymorphism; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.library.CachedLibrary; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.object.DynamicObjectLibrary; @@ -16,7 +17,8 @@ type = "State", name = "run", description = "Runs a stateful computation in a local state environment.", - autoRegister = false) + autoRegister = false, + inlineable = true) @ReportPolymorphism public abstract class RunStateNode extends Node { static RunStateNode build() { @@ -25,10 +27,12 @@ static RunStateNode build() { private @Child ThunkExecutorNode thunkExecutorNode = ThunkExecutorNode.build(); - abstract Object execute(State state, Object key, Object local_state, @Suspend Object computation); + abstract Object execute( + VirtualFrame frame, State state, Object key, Object local_state, @Suspend Object computation); @Specialization(guards = "objects.containsKey(data, key)") Object doExisting( + VirtualFrame frame, State state, Object key, Object local, @@ -38,7 +42,8 @@ Object doExisting( var old = objects.getOrDefault(data, key, null); objects.put(data, key, local); try { - return thunkExecutorNode.executeThunk(computation, state, BaseNode.TailStatus.NOT_TAIL); + return thunkExecutorNode.executeThunk( + frame, computation, state, BaseNode.TailStatus.NOT_TAIL); } finally { objects.put(state.getContainer(), key, old); } @@ -46,6 +51,7 @@ Object doExisting( @Specialization(guards = "!objects.containsKey(data, key)") Object doFresh( + VirtualFrame frame, State state, Object key, Object local, @@ -54,7 +60,8 @@ Object doFresh( @CachedLibrary(limit = "10") DynamicObjectLibrary objects) { objects.put(data, key, local); try { - return thunkExecutorNode.executeThunk(computation, state, BaseNode.TailStatus.NOT_TAIL); + return thunkExecutorNode.executeThunk( + frame, computation, state, BaseNode.TailStatus.NOT_TAIL); } finally { objects.removeKey(data, key); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/thread/WithInterruptHandlerNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/thread/WithInterruptHandlerNode.java index e98c45129426..8b18ab402c03 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/thread/WithInterruptHandlerNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/thread/WithInterruptHandlerNode.java @@ -1,5 +1,6 @@ package org.enso.interpreter.node.expression.builtin.thread; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.Node; import org.enso.interpreter.dsl.BuiltinMethod; import org.enso.interpreter.dsl.Suspend; @@ -12,16 +13,19 @@ type = "Thread", name = "with_interrupt_handler", description = "Runs a computation with a handler for thread interrupts.", - autoRegister = false) + autoRegister = false, + inlineable = true) public class WithInterruptHandlerNode extends Node { private @Child ThunkExecutorNode actExecutorNode = ThunkExecutorNode.build(); private @Child ThunkExecutorNode handlerExecutorNode = ThunkExecutorNode.build(); - Object execute(State state, @Suspend Object action, @Suspend Object interrupt_handler) { + Object execute( + VirtualFrame frame, State state, @Suspend Object action, @Suspend Object interrupt_handler) { try { - return actExecutorNode.executeThunk(action, state, BaseNode.TailStatus.NOT_TAIL); + return actExecutorNode.executeThunk(frame, action, state, BaseNode.TailStatus.NOT_TAIL); } catch (ThreadInterruptedException e) { - handlerExecutorNode.executeThunk(interrupt_handler, state, BaseNode.TailStatus.NOT_TAIL); + handlerExecutorNode.executeThunk( + frame, interrupt_handler, state, BaseNode.TailStatus.NOT_TAIL); throw e; } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java index 40b8076a6aba..50d151a20241 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java @@ -101,7 +101,7 @@ Object doCached( RootCallTarget cachedCallTarget, @Cached("build()") ThunkExecutorNode thunkExecutorNode) { Function thunk = Function.thunk(cachedCallTarget, callerInfo.getFrame()); - return thunkExecutorNode.executeThunk(thunk, state, getTailStatus()); + return thunkExecutorNode.executeThunk(callerInfo.getFrame(), thunk, state, getTailStatus()); } @Specialization @@ -117,6 +117,6 @@ Object doUncached( callerInfo.getModuleScope(), toJavaStringNode.execute(expression)); Function thunk = Function.thunk(callTarget, callerInfo.getFrame()); - return thunkExecutorNode.executeThunk(thunk, state, getTailStatus()); + return thunkExecutorNode.executeThunk(callerInfo.getFrame(), thunk, state, getTailStatus()); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java index 1c2d94019d15..18667b30a151 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java @@ -626,6 +626,7 @@ private static Object evalExpression( .orElseThrow(); CallerInfo callerInfo = new CallerInfo(null, LocalScope.root(), scope); return callOptimiserNode.executeDispatch( + null, eval.getFunction(), callerInfo, context.emptyState(), diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java index ccf2bef3bee3..8004337561c5 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java @@ -4,6 +4,7 @@ import com.oracle.truffle.api.dsl.Fallback; import com.oracle.truffle.api.dsl.GenerateUncached; import com.oracle.truffle.api.dsl.Specialization; +import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.interop.InteropLibrary; import com.oracle.truffle.api.interop.UnknownKeyException; import com.oracle.truffle.api.interop.UnsupportedMessageException; @@ -21,7 +22,8 @@ description = """ Gets a value from the map on the specified key, or the given default. """, - autoRegister = false + autoRegister = false, + inlineable = true ) @GenerateUncached public abstract class HashMapGetNode extends Node { @@ -30,10 +32,12 @@ public static HashMapGetNode build() { return HashMapGetNodeGen.create(); } - public abstract Object execute(State state, Object self, Object key, @Suspend Object defaultValue); + public abstract Object execute(VirtualFrame frame, State state, Object self, Object key, @Suspend Object defaultValue); @Specialization(guards = "interop.hasHashEntries(self)", limit = "3") - Object hashMapGet(State state, Object self, Object key, Object defaultValue, + Object hashMapGet( + VirtualFrame frame, + State state, Object self, Object key, Object defaultValue, @CachedLibrary("self") InteropLibrary interop, @Cached("build()") ThunkExecutorNode thunkExecutorNode) { if (interop.isHashEntryReadable(self, key)) { @@ -43,13 +47,13 @@ Object hashMapGet(State state, Object self, Object key, Object defaultValue, throw new IllegalStateException(e); } } else { - return thunkExecutorNode.executeThunk(defaultValue, state, TailStatus.NOT_TAIL); + return thunkExecutorNode.executeThunk(frame, defaultValue, state, TailStatus.NOT_TAIL); } } @Fallback - Object fallback(State state, Object self, Object key, Object defaultValue, + Object fallback(VirtualFrame frame, State state, Object self, Object key, Object defaultValue, @Cached("build()") ThunkExecutorNode thunkExecutorNode) { - return thunkExecutorNode.executeThunk(defaultValue, state, TailStatus.NOT_TAIL); + return thunkExecutorNode.executeThunk(frame, defaultValue, state, TailStatus.NOT_TAIL); } } diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/EvalTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/EvalTest.scala index 07a568a09505..6828ec06277c 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/EvalTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/EvalTest.scala @@ -36,6 +36,20 @@ class EvalTest extends InterpreterTest { consumeOut shouldEqual List("Hello World!") } + "have access to the caller scope with <|" in { + val code = + s"""import Standard.Base.Runtime.Debug + |import Standard.Base.IO + | + |main = + | x = "Hello World!" + | Debug.eval <| $rawTQ + | IO.println x + |""".stripMargin + eval(code) + consumeOut shouldEqual List("Hello World!") + } + "have access to the caller module scope" in { val code = s"""import Standard.Base.Runtime.Debug diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/Builtin.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/Builtin.java index 81a91a84e2ae..5f07bb8e274b 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/Builtin.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/Builtin.java @@ -160,6 +160,20 @@ * type. Auto-registered methods do not have to be declared explicitly. */ boolean autoRegister() default true; + + /** + * Allow aggressive inlining or not. This argument doesn't need to be specified. If it is + * missing, its effective value is derived from the arguments of the annotated method. + * When the annotated method requires {@link VirtualFrame} as one of its arguments the value of + * unspecified {@link #inlineable()} is {@code false}. When no {@link VirtualFrame} is needed, + * the value is assumed to be {@code true}. + * + * @return explicitly specify whether the builtin can be inline and use caller's {@link + * VirtualFrame} + * @see BuiltinMethod + * @see BuiltinMethod#inlineable() + */ + boolean inlineable() default true; } /** diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinMethod.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinMethod.java index c5163ae96f92..67ee13b0bc4e 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinMethod.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinMethod.java @@ -8,9 +8,9 @@ /** * An annotation denoting a node that should be wrapped for standard library export. A subclass of * {@code BuiltinRootNode} is generated with implementation of {@code - * InlineableRootNode#createDirectCallNode()} that either delegates to regular {@link - * DirectCallNode} (when the {@code execute} method requires {@code VirtualFrame} as one of its - * arguments) or provides a special implementation, if no {@code VirtualFrame} is needed. + * InlineableNode.Root.createInlineableNode()} that either delegates to regular {@link + * DirectCallNode} or provides a special and faster implementation depending on implicit or explicit + * value of {@link #inlineable()} attribute. */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.SOURCE) @@ -29,4 +29,15 @@ /** @return whether a method should be registered automatically with a type */ boolean autoRegister() default true; + + /** + * Allow aggressive inlining or not. This argument doesn't need to be specified. If it is missing, + * its effective value is derived from the arguments of the annotated method. When the + * {@code execute} method requires {@code VirtualFrame} as one of its arguments the value of + * unspecified {@link #inlineable()} is {@code false}. When no {@code VirtualFrame} is needed, the + * value is assumed to be {@code true}. + * + * @return explicitly specify whether the builtin can share {@link VirtualFrame} with its caller. + */ + boolean inlineable() default true; } diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinsProcessor.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinsProcessor.java index 8743a09ab7db..424c76018fed 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinsProcessor.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/BuiltinsProcessor.java @@ -149,6 +149,7 @@ public void handleMethodElement(Element element, RoundEnvironment roundEnv) thro PackageElement pkgElement = (PackageElement) ownerTpeElement.getEnclosingElement(); Builtin.Method annotation = element.getAnnotation(Builtin.Method.class); + Boolean needsFrame = checkNeedsFrame(element); boolean isConstructor = method.getKind() == ElementKind.CONSTRUCTOR; if (annotation.expandVarargs() != 0) { @@ -183,7 +184,7 @@ public void handleMethodElement(Element element, RoundEnvironment roundEnv) thro methodName, annotation.description(), method.getSimpleName().toString(), - annotation.autoRegister()); + annotation.autoRegister(), needsFrame); } catch (IOException ioe) { throw new RuntimeException(ioe); } @@ -237,7 +238,7 @@ public void handleMethodElement(Element element, RoundEnvironment roundEnv) thro builtinMethodName, annotation.description(), method.getSimpleName().toString(), - annotation.autoRegister()); + annotation.autoRegister(), needsFrame); } } else { MethodNodeClassGenerator classGenerator = @@ -248,7 +249,7 @@ public void handleMethodElement(Element element, RoundEnvironment roundEnv) thro builtinMethodName, annotation.description(), method.getSimpleName().toString(), - annotation.autoRegister()); + annotation.autoRegister(), needsFrame); } } } else { @@ -256,6 +257,18 @@ public void handleMethodElement(Element element, RoundEnvironment roundEnv) thro } } + static Boolean checkNeedsFrame(Element element) { + for (var m : element.getAnnotationMirrors()) { + for (var entry : m.getElementValues().entrySet()) { + var name = entry.getKey().getSimpleName().toString(); + if (name.equals("inlineable")) { + return !(Boolean) entry.getValue().getValue(); + } + } + } + return null; + } + /** * Count the number of @Specialization or @Fallback annotations for a given method name. * diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java index 7e09c3b7c474..cc1c8b6856c4 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/MethodProcessor.java @@ -54,7 +54,8 @@ public boolean handleProcess(Set annotations, RoundEnviro for (Element elt : annotatedElements) { if (elt.getKind() == ElementKind.CLASS) { try { - handleTypeELement((TypeElement) elt, roundEnv); + var needsFrame = BuiltinsProcessor.checkNeedsFrame(elt); + handleTypeELement((TypeElement) elt, roundEnv, needsFrame); } catch (IOException e) { processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, e.getMessage()); } @@ -71,7 +72,7 @@ public boolean handleProcess(Set annotations, RoundEnviro return true; } - private void handleTypeELement(TypeElement element, RoundEnvironment roundEnv) + private void handleTypeELement(TypeElement element, RoundEnvironment roundEnv, Boolean needsFrame) throws IOException { ExecutableElement executeMethod = element.getEnclosedElements().stream() @@ -94,7 +95,7 @@ private void handleTypeELement(TypeElement element, RoundEnvironment roundEnv) String pkgName = processingEnv.getElementUtils().getPackageOf(element).getQualifiedName().toString(); - MethodDefinition def = new MethodDefinition(pkgName, element, executeMethod); + MethodDefinition def = new MethodDefinition(pkgName, element, executeMethod, needsFrame); if (!def.validate(processingEnv)) { return; } @@ -127,6 +128,7 @@ private void handleTypeELement(TypeElement element, RoundEnvironment roundEnv) "com.oracle.truffle.api.profiles.ConditionProfile", "java.nio.file.OpenOption", "org.enso.interpreter.EnsoLanguage", + "org.enso.interpreter.node.InlineableNode", "org.enso.interpreter.node.expression.builtin.BuiltinRootNode", "org.enso.interpreter.runtime.callable.argument.ArgumentDefinition", "org.enso.interpreter.runtime.callable.function.Function", @@ -157,7 +159,11 @@ private void generateCode(MethodDefinition methodDefinition) throws IOException out.println("@NodeInfo("); out.println(" shortName = \"" + methodDefinition.getDeclaredName() + "\","); out.println(" description = \"\"\"\n" + methodDefinition.getDescription() + "\"\"\")"); - out.println("public class " + methodDefinition.getClassName() + " extends BuiltinRootNode {"); + if (methodDefinition.needsFrame()) { + out.println("public class " + methodDefinition.getClassName() + " extends BuiltinRootNode {"); + } else { + out.println("public class " + methodDefinition.getClassName() + " extends BuiltinRootNode implements InlineableNode.Root {"); + } out.println(" private @Child " + methodDefinition.getOriginalClassName() + " bodyNode;"); out.println(" private static final class Internals {"); out.println(" Internals(boolean s) {"); @@ -231,13 +237,16 @@ private void generateCode(MethodDefinition methodDefinition) throws IOException out.println(); if (!methodDefinition.needsFrame()) { out.println(" @Override"); - out.println(" public final InlinedCallNode createDirectCallNode() {"); - out.println(" var n = " + methodDefinition.getConstructorExpression() + ";"); - out.println(" return new InlinedCallNode<>(new Internals(internals.staticOfInstanceMethod), n) {"); - out.println(" public Object call(Object[] args) {"); - out.println(" return handleExecute(extra, body, args);"); + out.println(" public final InlineableNode createInlineableNode() {"); + out.println(" class Inlineable extends InlineableNode {"); + out.println(" private final Internals extra = new Internals(internals.staticOfInstanceMethod);"); + out.println(" private @Child " + methodDefinition.getOriginalClassName() + " body = " + methodDefinition.getConstructorExpression() + ";"); + out.println(" @Override"); + out.println(" public Object call(VirtualFrame frame, Object[] args) {"); + out.println(" return handleExecute(frame, extra, body, args);"); out.println(" }"); - out.println(" };"); + out.println(" }"); + out.println(" return new Inlineable();"); out.println(" }"); } @@ -246,9 +255,9 @@ private void generateCode(MethodDefinition methodDefinition) throws IOException if (methodDefinition.needsFrame()) { out.println(" var args = frame.getArguments();"); } else { - out.println(" return handleExecute(this.internals, bodyNode, frame.getArguments());"); + out.println(" return handleExecute(frame, this.internals, bodyNode, frame.getArguments());"); out.println(" }"); - out.println(" private static Object handleExecute(Internals internals, " + methodDefinition.getOriginalClassName() + " bodyNode, Object[] args) {"); + out.println(" private static Object handleExecute(VirtualFrame frame, Internals internals, " + methodDefinition.getOriginalClassName() + " bodyNode, Object[] args) {"); } out.println(" var prefix = internals.staticOfInstanceMethod ? 1 : 0;"); out.println(" State state = Function.ArgumentsHelper.getState(args);"); diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/builtins/MethodNodeClassGenerator.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/builtins/MethodNodeClassGenerator.java index 1713abb68076..ced89aacd9b8 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/builtins/MethodNodeClassGenerator.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/builtins/MethodNodeClassGenerator.java @@ -39,7 +39,8 @@ public void generate( String methodName, String description, String ownerMethodName, - boolean isAutoRegister) + boolean isAutoRegister, + Boolean needsFrame) throws IOException { JavaFileObject gen = processingEnv.getFiler().createSourceFile(builtinNode.jvmFriendlyFullyQualifiedName()); @@ -58,6 +59,9 @@ public void generate( if (!isAutoRegister) { moduleOwnerInfo = ", autoRegister = " + isAutoRegister; } + if (needsFrame != null) { + moduleOwnerInfo = moduleOwnerInfo + ", needsFrame = " + needsFrame; + } out.println( "@BuiltinMethod(type = \"" + ensoTypeName diff --git a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/model/MethodDefinition.java b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/model/MethodDefinition.java index cdc618db0038..c9835cd4eca9 100644 --- a/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/model/MethodDefinition.java +++ b/lib/scala/interpreter-dsl/src/main/java/org/enso/interpreter/dsl/model/MethodDefinition.java @@ -34,8 +34,11 @@ public class MethodDefinition { * @param packageName the name of the package this method is declared in. * @param element the element (class) declaring this method. * @param execute the element (method) containing the logic. + * @param needsFrame optionally specify if we need own frame, if {@code null} the value is derived + * from presence/absence of {@code VirtualFrame} argument */ - public MethodDefinition(String packageName, TypeElement element, ExecutableElement execute) { + public MethodDefinition( + String packageName, TypeElement element, ExecutableElement execute, Boolean needsFrame) { this.annotation = element.getAnnotation(BuiltinMethod.class); this.element = element; this.executeMethod = execute; @@ -46,7 +49,8 @@ public MethodDefinition(String packageName, TypeElement element, ExecutableEleme this.arguments = initArguments(execute); this.imports = initImports(); this.needsCallerInfo = arguments.stream().anyMatch(ArgumentDefinition::isCallerInfo); - this.needsFrame = arguments.stream().anyMatch(ArgumentDefinition::isFrame); + this.needsFrame = + needsFrame != null ? needsFrame : arguments.stream().anyMatch(ArgumentDefinition::isFrame); this.constructorExpression = initConstructor(element); } From cdd00658007879705747e9ab928145e152b4f25e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Sat, 29 Apr 2023 02:13:29 +0200 Subject: [PATCH 17/34] Change defaults for `Connection.tables` and ensure that `Connection.query` recognizes all available tables (#6443) Closes #6398 --- .../0.0.0-dev/src/Connection/Connection.enso | 47 ++++++++++++++++--- .../Database/0.0.0-dev/src/Data/Dialect.enso | 6 +++ .../src/Extensions/Upload_Table.enso | 10 ++-- .../Postgres/Postgres_Connection.enso | 34 +++++++++----- .../Internal/Postgres/Postgres_Dialect.enso | 5 ++ .../Internal/Redshift/Redshift_Dialect.enso | 6 +++ .../Internal/SQLite/SQLite_Connection.enso | 30 ++++++++---- .../src/Internal/SQLite/SQLite_Dialect.enso | 6 +++ .../Table_Tests/src/Database/Common_Spec.enso | 7 +-- .../src/Database/Postgres_Spec.enso | 14 +++++- .../Table_Tests/src/Database/SQLite_Spec.enso | 14 +++++- .../Table_Tests/src/Database/Upload_Spec.enso | 3 +- .../src/Widgets/Database_Widgets_Spec.enso | 2 +- 13 files changed, 142 insertions(+), 42 deletions(-) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index a7223eb7d09a..3fe5e9dec279 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -1,7 +1,12 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Runtime.Managed_Resource.Managed_Resource +from Standard.Base.Metadata.Widget import Single_Choice +from Standard.Base.Metadata.Choice import Option +import Standard.Base.Metadata.Display + import Standard.Table.Data.Table.Table as Materialized_Table import project.Data.SQL_Query.SQL_Query @@ -95,18 +100,27 @@ type Connection self.jdbc_connection.with_metadata metadata-> read_column metadata.getTableTypes "TABLE_TYPE" - ## PRIVATE - Returns a materialized Table of all the matching views and tables. + ## Returns a materialized Table of all the matching views and tables. + + ? Temporary Tables + + Note that the temporary tables may be created in a different schema + than the current one, so take this into account when filtering by schema. Arguments: - name_like: The table name pattern to search for. Supports SQL wildcards (`%`, `_`). Defaults to `Nothing` which means all tables are selected. - database: The database name to search in (default is current database). - - schema: The schema name to search in (defaults to current schema). - - types: The table types to search for. The list of possible values can be obtained using the `table_types` method. + - schema: The schema name to search in (defaults to `Nothing`, meaning all + schemas are searched). + - types: The table types to search for. The list of possible values can + be obtained using the `table_types` method. Defaults to a set of most + commonly used table types, ignoring internal system tables or indices. - all_fields: Return all the fields in the metadata table. + @types make_table_types_selector + @schema make_schema_selector tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table - tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False = + tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = types_array = if types.is_nothing then Nothing else types.to_array name_map = Map.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]] self.jdbc_connection.with_metadata metadata-> @@ -133,11 +147,12 @@ type Connection raised. - If provided with a `Table_Name` or a text short-hand and the table is not found, a `Table_Not_Found` error is raised. + @query make_table_name_selector query : Text | SQL_Query -> Text -> Table ! Table_Not_Found | SQL_Error query self query alias="" = case query of _ : Text -> result = self.query alias=alias <| - if self.tables.at 'Name' . to_vector . contains query then (SQL_Query.Table_Name query) else + if (all_known_table_names self).contains query then (SQL_Query.Table_Name query) else SQL_Query.Raw_SQL query result.catch SQL_Error sql_error-> case self.dialect.is_probably_a_query query of @@ -167,6 +182,7 @@ type Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. + @query make_table_name_selector read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table read self query limit=Nothing = self.query query . read max_rows=limit @@ -218,3 +234,22 @@ type Connection drop_table : Text -> Nothing drop_table self table_name = self.execute_update (self.dialect.generate_sql (Query.Drop_Table table_name)) + +## PRIVATE +make_table_types_selector connection = + Single_Choice values=(connection.table_types.map t-> Option t t.pretty) + +## PRIVATE +make_schema_selector connection = + schemas_without_nothing = connection.schemas.filter Filter_Condition.Not_Nothing + Single_Choice values=(schemas_without_nothing.map t-> Option t t.pretty)+[Option "any schema" "Nothing"] + +## PRIVATE +all_known_table_names connection = + tables = connection.tables name_like=Nothing database=connection.database schema=Nothing types=Nothing all_fields=False + tables.at "Name" . to_vector + +## PRIVATE +make_table_name_selector connection = + tables_to_display = connection.tables.at "Name" . to_vector + Single_Choice display=Display.Always values=(tables_to_display.map t-> Option t t.pretty) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso index 7f278986aa4a..00bd7fe3cb53 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso @@ -187,6 +187,12 @@ type Dialect _ = operation Unimplemented.throw "This is an interface only." + ## PRIVATE + The default table types to use when listing tables. + default_table_types : Vector Text + default_table_types self = + Unimplemented.throw "This is an interface only." + ## PRIVATE The dialect of SQLite databases. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso index cb200039c909..90423669731f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso @@ -67,11 +67,15 @@ In_Memory_Table.create_database_table self connection table_name=Nothing primary sql_type = type_mapping.value_type_to_sql value_type on_problems sql_type_text = type_mapping.sql_type_to_text sql_type Pair.new name sql_type_text - create_statement = connection.dialect.generate_sql <| + create_table_statement = connection.dialect.generate_sql <| Query.Create_Table effective_table_name column_descriptors checked_primary_key temporary - upload_status = create_statement.if_not_error <| connection.jdbc_connection.run_within_transaction <| - Panic.rethrow <| connection.execute_update create_statement + ## `create_query.if_not_error` is used to ensure that if there are any + dataflow errors up to this point, we want to propagate them and not + continue. Otherwise, they could 'leak' to `Panic.rethrow` and be wrongly + raised as panics. + upload_status = create_table_statement.if_not_error <| connection.jdbc_connection.run_within_transaction <| + Panic.rethrow <| connection.execute_update create_table_statement if structure_only.not then column_names = column_descriptors.map .first insert_template = make_batched_insert_template connection effective_table_name column_names diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index fdd2cf4fe84a..3c21c341e277 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -13,12 +13,13 @@ import project.Data.SQL_Query.SQL_Query import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table.Table as Database_Table +import project.Internal.IR.Query.Query import project.Internal.JDBC_Connection import project.Internal.SQL_Type_Reference.SQL_Type_Reference -from project.Internal.Result_Set import read_column - +from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector from project.Errors import SQL_Error +from project.Internal.Result_Set import read_column type Postgres_Connection @@ -81,17 +82,27 @@ type Postgres_Connection table_types : Vector Text table_types self = self.connection.table_types - ## Returns a materialised Table of all the matching views and tables. + ## Returns a materialized Table of all the matching views and tables. + + ? Temporary Tables + + Note that the temporary tables may be created in a different schema + than the current one, so take this into account when filtering by schema. Arguments: - - name_like: The table name pattern to search for. Support SQL wildcards (`%`, `_`). + - name_like: The table name pattern to search for. Supports SQL wildcards (`%`, `_`). Defaults to `Nothing` which + means all tables are selected. - database: The database name to search in (default is current database). - - schema: The schema name to search in (defaults to current schema). - - types: The table types to search for. The list of values can be obtained using the `table_types` method. + - schema: The schema name to search in (defaults to `Nothing`, meaning all + schemas are searched). + - types: The table types to search for. The list of possible values can + be obtained using the `table_types` method. Defaults to a set of most + commonly used table types, ignoring internal system tables or indices. - all_fields: Return all the fields in the metadata table. - @types (self-> Single_Choice values=(self.table_types.map t-> Option t t.pretty)) + @types make_table_types_selector + @schema make_schema_selector tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table - tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False = + tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = self.connection.tables name_like database schema types all_fields ## Set up a query returning a Table object, which can be used to work with data within the database or load it into memory. @@ -100,7 +111,7 @@ type Postgres_Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - alias: optionally specify a friendly alias for the query. - @query (self-> Single_Choice display=Display.Always values=(self.tables.at "Name" . to_vector . map t-> Option t t.pretty)) + @query make_table_name_selector query : Text | SQL_Query -> Text -> Database_Table query self query alias="" = self.connection.query query alias @@ -110,7 +121,7 @@ type Postgres_Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. - @query (self-> Single_Choice display=Display.Always values=(self.tables.at "Name" . to_vector . map t-> Option t t.pretty)) + @query make_table_name_selector read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table read self query limit=Nothing = self.connection.read query limit @@ -127,7 +138,6 @@ type Postgres_Connection execute_update self query = self.connection.execute_update query - ## PRIVATE Access the dialect. dialect self = self.connection.dialect @@ -148,7 +158,7 @@ type Postgres_Connection Arguments: - url: The URL to connect to. - properties: A vector of properties for the connection. - - make_new: a function that returns a new connection. + - make_new: a function that returns a new connection. create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Connection create url properties make_new = jdbc_connection = JDBC_Connection.create url properties diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 3d6f5fcc0c17..9579eb6463c9 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -199,6 +199,11 @@ type Postgres_Dialect is_supported self operation = self.internal_generator_dialect.is_supported operation + ## PRIVATE + The default table types to use when listing tables. + default_table_types : Vector Text + default_table_types self = + ["TABLE", "VIEW", "TEMPORARY TABLE", "TEMPORARY VIEW", "MATERIALIZED VIEW", "FOREIGN TABLE", "PARTITIONED TABLE"] ## PRIVATE make_internal_generator_dialect = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso index c673f7e61cbc..80b1230c8f3d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso @@ -146,3 +146,9 @@ type Redshift_Dialect is_supported : Text -> Boolean is_supported self operation = self.internal_generator_dialect.is_supported operation + + ## PRIVATE + The default table types to use when listing tables. + default_table_types : Vector Text + default_table_types self = + ["TABLE", "VIEW", "TEMPORARY TABLE", "TEMPORARY VIEW", "MATERIALIZED VIEW", "FOREIGN TABLE", "PARTITIONED TABLE"] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index 8f1c796e8603..c10f17d79939 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -9,13 +9,15 @@ import Standard.Table.Data.Table.Table as Materialized_Table import project.Connection.Connection.Connection import project.Data.SQL_Query.SQL_Query +import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Dialect import project.Data.Table.Table as Database_Table +import project.Internal.IR.Query.Query import project.Internal.JDBC_Connection import project.Internal.SQL_Type_Reference.SQL_Type_Reference -import project.Data.SQL_Statement.SQL_Statement +from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector from project.Errors import SQL_Error type SQLite_Connection @@ -74,17 +76,27 @@ type SQLite_Connection table_types : Vector Text table_types self = self.connection.table_types - ## Returns a materialised Table of all the matching views and tables. + ## Returns a materialized Table of all the matching views and tables. + + ? Temporary Tables + + Note that the temporary tables may be created in a different schema + than the current one, so take this into account when filtering by schema. Arguments: - - name_like: The table name pattern to search for. Support SQL wildcards (`%`, `_`). + - name_like: The table name pattern to search for. Supports SQL wildcards (`%`, `_`). Defaults to `Nothing` which + means all tables are selected. - database: The database name to search in (default is current database). - - schema: The schema name to search in (defaults to current schema). - - types: The table types to search for. The list of values can be obtained using the `table_types` method. + - schema: The schema name to search in (defaults to `Nothing`, meaning all + schemas are searched). + - types: The table types to search for. The list of possible values can + be obtained using the `table_types` method. Defaults to a set of most + commonly used table types, ignoring internal system tables or indices. - all_fields: Return all the fields in the metadata table. - @types (self-> Single_Choice values=(self.table_types.map t-> Option t t.pretty)) + @types make_table_types_selector + @schema make_schema_selector tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table - tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False = + tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = self.connection.tables name_like database schema types all_fields ## Set up a query returning a Table object, which can be used to work with data within the database or load it into memory. @@ -93,7 +105,7 @@ type SQLite_Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - alias: optionally specify a friendly alias for the query. - @query (self-> Single_Choice display=Display.Always values=(self.tables.at "Name" . to_vector . map t-> Option t t.pretty)) + @query make_table_name_selector query : Text | SQL_Query -> Text -> Database_Table query self query alias="" = self.connection.query query alias @@ -103,7 +115,7 @@ type SQLite_Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. - @query (self-> Single_Choice display=Display.Always values=(self.tables.at "Name" . to_vector . map t-> Option t t.pretty)) + @query make_table_name_selector read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table read self query limit=Nothing = self.connection.read query limit diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index a0e00b421b4d..aa8fd0d67058 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -207,6 +207,12 @@ type SQLite_Dialect is_supported self operation = self.internal_generator_dialect.is_supported operation + ## PRIVATE + The default table types to use when listing tables. + default_table_types : Vector Text + default_table_types self = + ["TABLE", "VIEW", "GLOBAL TEMPORARY"] + ## PRIVATE make_internal_generator_dialect = diff --git a/test/Table_Tests/src/Database/Common_Spec.enso b/test/Table_Tests/src/Database/Common_Spec.enso index ba785af9822d..1876e71823dd 100644 --- a/test/Table_Tests/src/Database/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common_Spec.enso @@ -30,10 +30,7 @@ spec prefix connection = run_tests prefix connection upload run_tests prefix connection upload = - ## We have to upload the table as non-temporary, because otherwise it will - not be visible in the list of tables and make `Table.query` confused. - TODO [RW] remove temporary=False once #6398 is done. - t1 = upload "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) temporary=False + t1 = upload "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]]) Test.group prefix+"Basic Table Access" <| Test.specify "should allow to materialize tables and columns into local memory" <| df = t1.read @@ -99,7 +96,7 @@ run_tests prefix connection upload = Also, the table name cannot be too long as Postgres truncates at 63 chars (and we append 37 chars of uniqueness suffix) and the test logic will break then. - t4 = upload 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) temporary=False + t4 = upload 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]]) t5 = connection.query t4.name m5 = t5.read m5.column_names . should_equal ["X", "Y"] diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index c0c8ff9ab60e..b9f959e7b3c3 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -56,6 +56,9 @@ postgres_specific_spec connection db_name setup = vinfo = Name_Generator.random_name "TestView" connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' + temporary_table = Name_Generator.random_name "TemporaryTable" + (Table.new [["X", [1, 2, 3]]]).create_database_table connection temporary_table temporary=True + Test.specify "should be able to list table types" <| table_types = connection.table_types table_types.length . should_not_equal 0 @@ -66,8 +69,11 @@ postgres_specific_spec connection db_name setup = tables = connection.tables tables.row_count . should_not_equal 0 tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"] - tables.at "Name" . to_vector . contains tinfo . should_be_true - tables.at "Name" . to_vector . contains vinfo . should_be_true + + table_names = tables.at "Name" . to_vector + table_names.should_contain tinfo + table_names.should_contain vinfo + table_names.should_contain temporary_table Test.specify "should be able to filter tables by name" <| tables = connection.tables tinfo @@ -77,7 +83,11 @@ postgres_specific_spec connection db_name setup = tables.at "Schema" . to_vector . at 0 . should_equal "public" tables.at "Name" . to_vector . at 0 . should_equal tinfo tables.at "Type" . to_vector . at 0 . should_equal "TABLE" + connection.tables "TestT_ble%" . row_count . should_equal 1 + connection.tables "Temporary%ble%" . row_count . should_equal 1 + connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"] + connection.tables "N_nexistent%" . row_count . should_equal 0 Test.specify "should be able to filter tables by type" <| tables = connection.tables types=["VIEW"] diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 2129dfb7b3c0..241ca1f8a88a 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -40,6 +40,9 @@ sqlite_specific_spec prefix connection = vinfo = Name_Generator.random_name "TestView" connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' + temporary_table = Name_Generator.random_name "TemporaryTable" + (Table.new [["X", [1, 2, 3]]]).create_database_table connection temporary_table temporary=True + Test.specify "should be able to list table types" <| table_types = connection.table_types table_types.length . should_not_equal 0 @@ -50,8 +53,11 @@ sqlite_specific_spec prefix connection = tables = connection.tables tables.row_count . should_not_equal 0 tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"] - tables.at "Name" . to_vector . contains tinfo . should_be_true - tables.at "Name" . to_vector . contains vinfo . should_be_true + + table_names = tables.at "Name" . to_vector + table_names.should_contain tinfo + table_names.should_contain vinfo + table_names.should_contain temporary_table Test.specify "should be able to filter tables by name" <| tables = connection.tables tinfo @@ -60,7 +66,11 @@ sqlite_specific_spec prefix connection = tables.at "Schema" . to_vector . at 0 . should_equal Nothing tables.at "Name" . to_vector . at 0 . should_equal tinfo tables.at "Type" . to_vector . at 0 . should_equal "TABLE" + connection.tables "TestT_ble%" . row_count . should_equal 1 + connection.tables "Temporary%ble%" . row_count . should_equal 1 + connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["GLOBAL TEMPORARY"] + connection.tables "N_nexistent%" . row_count . should_equal 0 Test.specify "should be able to filter tables by type" <| tables = connection.tables types=["VIEW"] diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 4c323d4fc210..db41b1e38d78 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -41,8 +41,7 @@ spec make_new_connection prefix persistent_connector=True = connection.tables.at "Name" . to_vector . should_contain db_table.name connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] - postgres_pending = if prefix.contains "Postgre" then "TODO: See issue https://github.com/enso-org/enso/issues/6398" - Test.specify "should include the temporary table in the tables directory" pending=postgres_pending <| + Test.specify "should include the temporary table in the tables directory" <| db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "temporary_table_1") temporary=True db_table.at "X" . to_vector . should_equal [1, 2, 3] connection.tables.at "Name" . to_vector . should_contain db_table.name diff --git a/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso b/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso index 4c52332a93ad..762fb6ae7134 100644 --- a/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso +++ b/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso @@ -25,7 +25,7 @@ spec = Test.group "Widgets for In-Database Connection with table name sets" <| Test.specify "works for `query` and `read`" <| - choices = ['sqlite_schema', 'a_table', 'another', 'mock_table'] . map n-> Choice.Option n n.pretty + choices = ['a_table', 'another', 'mock_table'] . map n-> Choice.Option n n.pretty expect = [["query", Widget.Single_Choice choices Nothing Display.Always]] . to_json Widgets.get_widget_json connection "query" ["query"] . should_equal expect Widgets.get_widget_json connection "read" ["query"] . should_equal expect From 6b0c682b08b7745368cec3474cc774c51a5b48f5 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Sat, 29 Apr 2023 09:39:18 +0100 Subject: [PATCH 18/34] Add Execution Context control to Text.write (#6459) - Adjusted `Context.is_enabled` to support default argument (moved built in so can have defaults). - Made `environment` case-insensitive. - Bug fix for play button. - Short hand to execute within an enabled context. - Forbid file writing if the Output context is disabled with a `Forbidden_Operation` error. - Add temporary file support via `File.create_temporary_file` which is deleted on exit of JVM. - Execution Context first pass in `Text.write`. - Added dry run warning. - Writes to a temporary file if disabled. - Created a `DryRunFileManager` which will create and manage the temporary files. - Added `format` dropdown to `File.read` and `Data.read`. - Renamed `JSON_File` to `JSON_Format` to be consistent. (still to unit test). --- CHANGELOG.md | 3 + .../engine-protocol/src/language_server.rs | 2 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 5 +- .../0.0.0-dev/src/Data/Text/Encoding.enso | 11 ++ .../0.0.0-dev/src/Data/Text/Extensions.enso | 2 + .../Base/0.0.0-dev/src/Errors/Common.enso | 13 ++ .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 2 +- .../Standard/Base/0.0.0-dev/src/Runtime.enso | 21 ++- .../Base/0.0.0-dev/src/System/File.enso | 131 +++++++++++------- .../src/System/File/Write_Extensions.enso | 41 +++++- .../0.0.0-dev/src/System/File_Format.enso | 32 ++++- .../Standard/Examples/0.0.0-dev/src/Main.enso | 10 +- .../0.0.0-dev/src/Data/Table_Conversions.enso | 4 +- .../Test/0.0.0-dev/src/Test_Reporter.enso | 7 +- .../builtin/runtime/ContextIsEnabledNode.java | 2 +- .../interpreter/runtime/data/EnsoFile.java | 4 +- .../java/org/enso/base/DryRunFileManager.java | 42 ++++++ .../enso/base/file_format/JSONFormatSPI.java | 2 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 7 +- test/Table_Tests/src/IO/Csv_Spec.enso | 5 +- .../src/IO/Delimited_Read_Spec.enso | 7 +- .../src/IO/Delimited_Write_Spec.enso | 3 +- test/Table_Tests/src/IO/Excel_Spec.enso | 5 +- test/Table_Tests/src/IO/Formats_Spec.enso | 3 +- test/Tests/src/System/File_Read_Spec.enso | 4 +- test/Tests/src/System/File_Spec.enso | 20 +-- .../System/Reporting_Stream_Decoder_Spec.enso | 3 +- .../System/Reporting_Stream_Encoder_Spec.enso | 3 +- test/Visualization_Tests/src/Table_Spec.enso | 3 +- .../Standard/Base/0.0.0-dev/src/Runtime.enso | 7 +- 30 files changed, 295 insertions(+), 109 deletions(-) create mode 100644 std-bits/base/src/main/java/org/enso/base/DryRunFileManager.java diff --git a/CHANGELOG.md b/CHANGELOG.md index c5bc858c6b5e..0e64c5e290ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -398,6 +398,8 @@ `Time_Of_Day`, `Time_Zone`, and `URI` to `Text`.][6404] - [Implemented `create_database_table` allowing upload of in-memory tables.][6429] +- [Added execution context control to writing files and dry run capabilities to + `Text.write`.][6459] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -602,6 +604,7 @@ [6404]: https://github.com/enso-org/enso/pull/6404 [6347]: https://github.com/enso-org/enso/pull/6347 [6429]: https://github.com/enso-org/enso/pull/6429 +[6459]: https://github.com/enso-org/enso/pull/6459 #### Enso Compiler diff --git a/app/gui/controller/engine-protocol/src/language_server.rs b/app/gui/controller/engine-protocol/src/language_server.rs index 5ebe1784818c..9ae91a906ab3 100644 --- a/app/gui/controller/engine-protocol/src/language_server.rs +++ b/app/gui/controller/engine-protocol/src/language_server.rs @@ -157,7 +157,7 @@ trait API { /// Restart the program execution. #[MethodInput=RecomputeInput, rpc_name="executionContext/recompute"] - fn recompute(&self, context_id: ContextId, invalidated_expressions: InvalidatedExpressions, mode: Option) -> (); + fn recompute(&self, context_id: ContextId, invalidated_expressions: InvalidatedExpressions, execution_environment: Option) -> (); /// Obtain the full suggestions database. #[MethodInput=GetSuggestionsDatabaseInput, rpc_name="search/getSuggestionsDatabase"] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index 976093aadcb9..9ff73ff38a73 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -16,10 +16,9 @@ import project.Network.HTTP.Request.Request import project.Network.HTTP.Request_Error import project.Nothing.Nothing import project.System.File.File -import project.System.File_Format.Auto_Detect -import project.System.File_Format.File_Format from project.Data.Boolean import Boolean, True, False +from project.System.File_Format import Auto_Detect, File_Format, format_widget ## ALIAS Load, Open Reads a file into Enso. @@ -56,6 +55,7 @@ from project.Data.Boolean import Boolean, True, False import Standard.Examples example_xls_to_table = Data.read Examples.xls (Excel (Worksheet 'Dates')) +@format format_widget read : Text | File -> File_Format -> Problem_Behavior -> Any ! File_Error read path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) = File.new path . read format on_problems @@ -81,6 +81,7 @@ read path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) = import Standard.Examples example_read = Data.read_text Examples.csv_path +@encoding Encoding.default_widget read_text : (Text | File) -> Encoding -> Problem_Behavior -> Text read_text path (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) = File.new path . read_text encoding on_problems diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso index 21749c2334ad..b2478bcc5dc9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso @@ -9,8 +9,19 @@ polyglot java import java.nio.charset.Charset polyglot java import java.nio.charset.UnsupportedCharsetException polyglot java import org.enso.base.Text_Utils +from project.Metadata.Widget import Single_Choice +from project.Metadata.Choice import Option +import project.Metadata.Display + ## Represents a character encoding. type Encoding + ## PRIVATE + Gets the default drop down option for this encoding. + default_widget : Single_Choice + default_widget = + values = [Option "UTF-8" "Encoding.utf_8", Option "ASCII" "Encoding.ascii", Option "UTF-16LE" "Encoding.utf_16_le", Option "UTF-16BE" "Encoding.utf_16_be", Option "UTF-32LE" "Encoding.utf_32_le", Option "UTF-32BE" "Encoding.utf_32_be", Option "Windows-1250" "Encoding.windows_1250", Option "Windows-1251" "Encoding.windows_1251", Option "Windows-1252" "Encoding.windows_1252", Option "Windows-1253" "Encoding.windows_1253", Option "Windows-1254" "Encoding.windows_1254", Option "Windows-1255" "Encoding.windows_1255", Option "Windows-1256" "Encoding.windows_1256", Option "Windows-1257" "Encoding.windows_1257", Option "Windows-1258" "Encoding.windows_1258"] + Single_Choice values=values display=Display.When_Modified + ## PRIVATE ADVANCED Get all available character sets from Java as Encodings. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso index d1c7918dc9fb..c36c4ba34b72 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso @@ -644,6 +644,7 @@ Text.is_whitespace self = Get the ASCII bytes of the text "Hello". "Hello".bytes (Encoding.ascii) +@encoding Encoding.default_widget Text.bytes : Encoding -> Problem_Behavior -> Vector Integer Text.bytes self encoding on_problems=Problem_Behavior.Report_Warning = result = Encoding_Utils.get_bytes self (encoding . to_java_charset) @@ -664,6 +665,7 @@ Text.bytes self encoding on_problems=Problem_Behavior.Report_Warning = Get the ASCII bytes of the text "Hello". "Hello".bytes (Encoding.ascii) +@encoding Encoding.default_widget Text.from_bytes : Vector Integer -> Encoding -> Problem_Behavior -> Text Text.from_bytes bytes encoding on_problems=Problem_Behavior.Report_Error = result = Encoding_Utils.from_bytes bytes.to_array (encoding . to_java_charset) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso index f509acd7b384..6f0fb3e0c146 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso @@ -336,3 +336,16 @@ type Forbidden_Operation Convert the Forbidden_Operation error to a human-readable format. to_display_text : Text to_display_text self = "Forbidden operation: "+self.operation+"." + +type Dry_Run_Operation + ## PRIVATE + A warning that the operation has only been performed in a test mode. + + Arguments: + - message: The message to be displayed. + Warning message + + ## PRIVATE + Convert the Dry_Run_Operation to a human-readable format. + to_display_text : Text + to_display_text self = self.message diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index 3d3749e1893d..6f978b0b6fc3 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -63,7 +63,7 @@ export project.Warning.Warning from project.Data.Boolean export Boolean, True, False from project.Function export all from project.Data.Numbers export Number, Integer, Decimal -from project.System.File_Format export File_Format, Plain_Text_Format, Plain_Text, Bytes, Infer, Auto_Detect, JSON_File +from project.System.File_Format export File_Format, Plain_Text_Format, Plain_Text, Bytes, Infer, Auto_Detect, JSON_Format import project.Data import project.Data.Filter_Condition.Filter_Condition diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso index 40ecb4a4dae8..f505f80f77bb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso @@ -1,6 +1,8 @@ import project.Any.Any import project.Data.Array.Array import project.Data.Boolean.Boolean +import project.Data.Text.Case.Case +import project.Data.Text.Extensions import project.Data.Text.Text import project.Data.Vector.Vector import project.Errors.Common.Forbidden_Operation @@ -159,7 +161,18 @@ type Context - environment: Name of the execution environment. - context: The context to enable. is_enabled : Text -> Boolean - is_enabled self environment=Runtime.current_execution_environment = @Builtin_Method "Context.is_enabled" + is_enabled self environment=Runtime.current_execution_environment = + self.is_enabled_builtin (environment.to_case Case.Lower) + + ## PRIVATE + is_enabled_builtin : Text -> Boolean + is_enabled_builtin self environment = @Builtin_Method "Context.is_enabled_builtin" + + ## PRIVATE + Run an action with the Context enabled. + with_enabled : Function -> Any + with_enabled self ~action = + with_enabled_context self Runtime.current_execution_environment action ## PRIVATE @@ -179,7 +192,8 @@ current_execution_environment = @Builtin_Method "Runtime.current_execution_envir - context: The context to enable. - action: Action to be performed with the context enabled. with_enabled_context : Context -> Text -> Function -> Any -with_enabled_context context environment=Runtime.current_execution_environment ~action = with_enabled_context_builtin context environment action +with_enabled_context context environment=Runtime.current_execution_environment ~action = + with_enabled_context_builtin context (environment.to_case Case.Lower) action ## PRIVATE ADVANCED @@ -205,7 +219,8 @@ with_enabled_context_builtin context environment ~action = @Builtin_Method "Runt - context: The context to disable. - action: Action to be performed with the context disabled. with_disabled_context : Context -> Text -> Function -> Any -with_disabled_context context environment=Runtime.current_execution_environment ~action = with_disabled_context_builtin context environment action +with_disabled_context context environment=Runtime.current_execution_environment ~action = + with_disabled_context_builtin context (environment.to_case Case.Lower) action ## PRIVATE ADVANCED diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso index 4a542edd7a60..b3447407e838 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso @@ -11,6 +11,7 @@ import project.Data.Text.Text import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Vector.Vector import project.Error.Error +import project.Errors.Common.Forbidden_Operation import project.Errors.Encoding_Error.Encoding_Error import project.Errors.File_Error.File_Error import project.Errors.Illegal_Argument.Illegal_Argument @@ -18,21 +19,23 @@ import project.Errors.Problem_Behavior.Problem_Behavior import project.Meta import project.Nothing.Nothing import project.Panic.Panic +import project.Runtime.Context import project.Runtime.Managed_Resource.Managed_Resource import project.System.File.File_Access.File_Access -import project.System.File_Format.Auto_Detect -import project.System.File_Format.File_Format import project.System.File.File_Permissions.File_Permissions from project.Data.Boolean import Boolean, True, False +from project.System.File_Format import Auto_Detect, File_Format, format_widget import project.Metadata.Widget from project.Metadata.Choice import Option import project.Metadata.Display +polyglot java import org.enso.base.DryRunFileManager polyglot java import org.enso.base.Encoding_Utils polyglot java import org.enso.base.encoding.ReportingStreamDecoder polyglot java import org.enso.base.encoding.ReportingStreamEncoder +polyglot java import java.io.File as Java_File polyglot java import java.io.InputStream as Java_Input_Stream polyglot java import java.io.OutputStream as Java_Output_Stream polyglot java import java.nio.file.FileSystems @@ -64,6 +67,28 @@ type File _ : File -> path _ -> Error.throw (Illegal_Argument.Error "new file should be either a File or a Text") + ## Creates a temporary file which will be deleted when Enso exits. + create_temporary_file : Text -> Text -> File + create_temporary_file prefix="temp" suffix=".tmp" = + java_file = Java_File.createTempFile prefix suffix + java_file.deleteOnExit + File.new java_file.getAbsolutePath + + ## PRIVATE + Create a dry run temporary file which will be deleted when Enso exits. + + For an absolute path the same temporary file is returned. + If this file is a temporary file that was generated by + `create_dry_run_file` on another file, it is returned as-is. + create_dry_run_file : Boolean -> File ! File_Error + create_dry_run_file self copy_original=False = + temp_path = DryRunFileManager.getTemporaryFile self.absolute.path + if temp_path.is_nothing then Error.throw (File_Error.IO_Error "Unable to create a temporary file.") else + temp = File.new temp_path + if self.exists && copy_original then + self.copy_to temp replace_existing=True + temp + ## ALIAS Current Directory Returns the current working directory (CWD) of the current program. @@ -116,7 +141,27 @@ type File file.with_output_stream [File_Access.Create, File_Access.Write] action with_output_stream : Vector File_Access -> (Output_Stream -> Any ! File_Error) -> Any ! File_Error with_output_stream self open_options action = - Managed_Resource.bracket (self.new_output_stream open_options) (_.close) action + new_output_stream : File -> Vector File_Access -> Output_Stream ! File_Error + new_output_stream file open_options = + opts = open_options . map (_.to_java) . to_array + stream = File_Error.handle_java_exceptions file <| + file.output_stream_builtin opts + ## We re-wrap the File Not Found error to return the parent directory + instead of the file itself - because the file that is being written + may not exist and it will not be an error, it is the parent directory + that does not exist is what prevents the write operation from + succeeding. + ## Until #5792 properly fixes catch, we cannot catch + `File_Error.Not_Found` specifically, so instead we catch all + `File_Error`s and match the needed one. + stream_2 = stream.catch File_Error error-> case error of + File_Error.Not_Found file_path -> Error.throw (File_Error.Not_Found file_path.parent) + _ -> stream + resource = Managed_Resource.register stream_2 close_stream + Output_Stream.Value file resource + + if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "File writing is forbidden as the Output context is disabled") else + Managed_Resource.bracket (new_output_stream self open_options) (_.close) action ## PRIVATE Creates a new output stream for this file. Recommended to use @@ -125,8 +170,8 @@ type File Arguments: - options: A vector of `File_Access` objects determining how to open the stream. These options set the access properties of the stream. - output_stream : Vector File_Access -> Output_Stream - output_stream self options = @Builtin_Method "File.output_stream" + output_stream_builtin : Vector File_Access -> Output_Stream + output_stream_builtin self options = @Builtin_Method "File.output_stream_builtin" ## PRIVATE Creates a new input stream for this file. Recommended to use @@ -196,6 +241,7 @@ type File import Standard.Examples example_xls_to_table = Examples.xls.read (Excel (Worksheet 'Dates')) + @format format_widget read : File_Format -> Problem_Behavior -> Any ! File_Error read self format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) = format.read self on_problems @@ -231,6 +277,7 @@ type File import Standard.Examples example_read = Examples.csv.read + @encoding Encoding.default_widget read_text : Encoding -> Problem_Behavior -> Text ! File_Error read_text self (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) = file = File.new self @@ -366,7 +413,15 @@ type File example_is_directory = (Examples.data_dir / "my_directory") . create_directory create_directory : Nothing - create_directory self = @Builtin_Method "File.create_directory" + create_directory self = + if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "Directory creation is forbidden as the Output context is disabled") else + self.create_directory_builtin + + + ## PRIVATE + Creates the directory represented by this file if it did not exist. + create_directory_builtin : Nothing + create_directory_builtin self = @Builtin_Method "File.create_directory_builtin" ## Checks whether the file exists and is a regular file. @@ -491,7 +546,8 @@ type File file.delete delete : Nothing ! File_Error delete self = - File_Error.handle_java_exceptions self self.delete_builtin + if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "File deleting is forbidden as the Output context is disabled") else + File_Error.handle_java_exceptions self self.delete_builtin ## Moves the file to the specified destination. @@ -501,11 +557,12 @@ type File destination file already exists. Defaults to `False`. copy_to : File -> Boolean -> Nothing ! File_Error copy_to self destination replace_existing=False = - File_Error.handle_java_exceptions self <| case replace_existing of - True -> - copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array - self.copy_builtin destination copy_options - False -> self.copy_builtin destination Array.empty + if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "File copying is forbidden as the Output context is disabled") else + File_Error.handle_java_exceptions self <| case replace_existing of + True -> + copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array + self.copy_builtin destination copy_options + False -> self.copy_builtin destination Array.empty ## Moves the file to the specified destination. @@ -515,11 +572,12 @@ type File destination file already exists. Defaults to `False`. move_to : File -> Boolean -> Nothing ! File_Error move_to self destination replace_existing=False = - File_Error.handle_java_exceptions self <| case replace_existing of - True -> - copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array - self.move_builtin destination copy_options - False -> self.move_builtin destination Array.empty + if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "File moving is forbidden as the Output context is disabled") else + File_Error.handle_java_exceptions self <| case replace_existing of + True -> + copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array + self.move_builtin destination copy_options + False -> self.move_builtin destination Array.empty ## Deletes the file if it exists on disk. @@ -553,35 +611,6 @@ type File resource = Managed_Resource.register stream close_stream Input_Stream.Value self resource - ## PRIVATE - ADVANCED - Returns a new output stream for this file. - - Arguments: - - open_options: A vector of `File_Access` objects determining how to open - the stream. These options set the access properties of the stream. - - The returned stream should be closed as soon as it is not used anymore. - The `with_output_stream` method should be preferred whenever possible. - new_output_stream : Vector File_Access -> Output_Stream ! File_Error - new_output_stream self open_options = - opts = open_options . map (_.to_java) . to_array - stream = File_Error.handle_java_exceptions self <| - self.output_stream opts - ## We re-wrap the File Not Found error to return the parent directory - instead of the file itself - because the file that is being written - may not exist and it will not be an error, it is the parent directory - that does not exist is what prevents the write operation from - succeeding. - ## Until #5792 properly fixes catch, we cannot catch - `File_Error.Not_Found` specifically, so instead we catch all - `File_Error`s and match the needed one. - stream_2 = stream.catch File_Error error-> case error of - File_Error.Not_Found file_path -> Error.throw (File_Error.Not_Found file_path.parent) - _ -> stream - resource = Managed_Resource.register stream_2 close_stream - Output_Stream.Value self resource - ## PRIVATE Reads last `n` bytes from the file (or less if the file is too small) and returns a vector of bytes. @@ -711,9 +740,9 @@ type Output_Stream example_write_bytes = file = Examples.scratch_file - out_stream = file.new_output_stream [File_Access.Create, File_Access.Write] - out_stream.write_bytes "hello".utf_8 - out_stream.close + file.with_output_stream [File_Access.Create, File_Access.Write] out_stream-> + out_stream.write_bytes "hello".utf_8 + out_stream.close write_bytes : Vector File_Access -> Nothing ! File_Error write_bytes self contents = self.stream_resource . with java_stream-> File_Error.handle_java_exceptions self.file <| @@ -737,8 +766,8 @@ type Output_Stream example_write_bytes = file = Examples.scratch_file - out_stream = file.new_output_stream [File_Access.Create] - out_stream.close + file.with_output_stream [File_Access.Create] out_stream-> + out_stream.close close : Nothing close self = self.stream_resource . finalize diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso index ac3f4bd2fb54..f44213fd67a1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso @@ -2,17 +2,21 @@ import project.Data.Text.Text import project.Data.Text.Encoding.Encoding import project.Data.Text.Extensions import project.Data.Vector.Vector -import project.Errors.Common.Unsupported_Argument_Types import project.Error.Error +import project.Errors.Common.Dry_Run_Operation +import project.Errors.Common.Unsupported_Argument_Types import project.Errors.Encoding_Error.Encoding_Error import project.Errors.File_Error.File_Error import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Problem_Behavior.Problem_Behavior import project.Nothing.Nothing import project.Panic.Panic +import project.Runtime.Context import project.System.File.Existing_File_Behavior.Existing_File_Behavior import project.System.File.File +import project.Warning.Warning +from project.Data.Boolean import Boolean, True, False polyglot java import org.enso.base.Array_Builder @@ -37,14 +41,39 @@ polyglot java import org.enso.base.Array_Builder Otherwise, the file is created with the encoded text written to it. The method returns a `File` object for the written file. + + ? Dry Run + + If writing to Output context is not enabled (such as in "Design" mode), + then this function will write to a temporary file. This temporary file will + be automatically deleted on exit of the Enso process. + + This allows for building the workflow without affecting the real files. +@encoding Encoding.default_widget Text.write : (File|Text) -> Encoding -> Existing_File_Behavior -> Problem_Behavior -> File ! Encoding_Error | Illegal_Argument | File_Error Text.write self path encoding=Encoding.utf_8 on_existing_file=Existing_File_Behavior.Backup on_problems=Problem_Behavior.Report_Warning = bytes = self.bytes encoding on_problems - file = File.new path - r = on_existing_file.write file stream-> - bytes.if_not_error <| - stream.write_bytes bytes - r.if_not_error file + + actual = File.new path + + is_enabled = Context.Output.is_enabled + + effective_existing_behaviour = if is_enabled then on_existing_file else + case on_existing_file of + Existing_File_Behavior.Backup -> Existing_File_Behavior.Overwrite + Existing_File_Behavior.Error -> if actual.exists then Error.throw (File_Error.Already_Exists actual) else Existing_File_Behavior.Overwrite + _ -> on_existing_file + + file = if is_enabled then actual else actual.create_dry_run_file copy_original=on_existing_file==Existing_File_Behavior.Append + + Context.Output.with_enabled <| + r = effective_existing_behaviour.write file stream-> + bytes.if_not_error <| + stream.write_bytes bytes + r.if_not_error <| + if is_enabled then file else + warning = Dry_Run_Operation.Warning "Only a dry run has occurred, with data written to a temporary file." + Warning.attach warning file ## Writes (or appends) the Vector of bytes into the specified file. The behavior specified in the `existing_file` parameter will be used if the file exists. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 5cbface4a5f6..5c70c16ed923 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -9,6 +9,7 @@ import project.Errors.File_Error.File_Error import project.Errors.Problem_Behavior.Problem_Behavior import project.Errors.Unimplemented.Unimplemented import project.Function.Function +import project.Meta import project.Network.HTTP.Response.Response import project.Network.URI.URI import project.Nothing.Nothing @@ -17,6 +18,10 @@ import project.System.File.File from project.Data.Boolean import Boolean, True, False from project.Data.Json import Invalid_JSON +from project.Metadata.Widget import Single_Choice +from project.Metadata.Choice import Option +import project.Metadata.Display + polyglot java import org.enso.base.file_format.FileFormatSPI ## PRIVATE @@ -35,6 +40,21 @@ get_format callback = @Tail_Call reader (idx + 1) reader 0 +## PRIVATE +format_widget : Single_Choice +format_widget = + all_types = [Auto_Detect] + format_types + make_ctor type_obj = + type_name = Meta.get_qualified_type_name type_obj + + ## Temporary work around to work out if need to add the constructor name + is_singleton_type = type_obj==JSON_Format || (type_name.ends_with "_Format" . not) + if is_singleton_type then type_name else + simple_name = Meta.get_simple_type_name type_obj + "(" + type_name + "." + (simple_name.replace "_Format" "") + ")" + make_name type_obj = type_obj.to_text.replace "_Format" "" . replace "_" " " + Single_Choice display=Display.Always values=(all_types.map n->(Option (make_name n) (make_ctor n))) + type Auto_Detect ## PRIVATE Implements the `File.read` for this `File_Format` @@ -127,23 +147,23 @@ type Bytes read self file _ = file.read_bytes -type JSON_File +type JSON_Format ## PRIVATE If the File_Format supports reading from the file, return a configured instance. - for_file : File -> JSON_File | Nothing + for_file : File -> JSON_Format | Nothing for_file file = case file.extension of - ".json" -> JSON_File - ".geojson" -> JSON_File + ".json" -> JSON_Format + ".geojson" -> JSON_Format _ -> Nothing ## PRIVATE If the File_Format supports reading from the web response, return a configured instance. - for_web : Text -> URI -> JSON_File | Nothing + for_web : Text -> URI -> JSON_Format | Nothing for_web content_type _ = first = content_type.split ';' . first . trim case first of - "application/json" -> JSON_File + "application/json" -> JSON_Format _ -> Nothing ## PRIVATE diff --git a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso index b2447c02368f..850b7e614052 100644 --- a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso @@ -4,6 +4,7 @@ import Standard.Base.Data.Text.Regex.Match.Match import Standard.Base.Errors.Common.No_Such_Method import Standard.Base.Network.HTTP.Response.Response import Standard.Base.Network.HTTP.Response_Body.Response_Body +import Standard.Base.Runtime.Context from Standard.Table import Table, Column @@ -40,7 +41,7 @@ xls = url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xls" file = enso_project.data / 'spreadsheet.xls' if file.exists.not then - HTTP.fetch url . to_file file + Context.Output.with_enabled <| HTTP.fetch url . to_file file file ## An example XLSX file for experimenting with Table and its APIs. @@ -55,14 +56,15 @@ xlsx = url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xlsx" file = enso_project.data / 'spreadsheet.xlsx' if file.exists.not then - HTTP.fetch url . to_file file + Context.Output.with_enabled <| HTTP.fetch url . to_file file file ## A file that is used for writing temporary data as part of tests. scratch_file : File scratch_file = file = enso_project.data / "scratch_file" - if file.exists then file.delete else Nothing + if file.exists.not then Nothing else + Context.Output.with_enabled <| file.delete file ## An example duration for experimenting with duration APIs. @@ -172,7 +174,7 @@ image_file = url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e9/Hue_alpha_falloff.png/320px-Hue_alpha_falloff.png" file = enso_project.data / "image.png" if file.exists.not then - HTTP.fetch url . to_file file + Context.Output.with_enabled <| HTTP.fetch url . to_file file file ## A PNG image. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso index 97769b430be9..8f8b7e67b26b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso @@ -128,8 +128,8 @@ Text.parse_to_table self pattern="." case_sensitivity=Case_Sensitivity.Sensitive - match_columns: How to match columns between the table and the file. Not used for JSON. - on_problems: What to do if there are problems reading the file. -JSON_File.write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File -JSON_File.write_table self file table on_existing_file match_columns on_problems = +JSON_Format.write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File +JSON_Format.write_table self file table on_existing_file match_columns on_problems = _ = [match_columns, on_problems] if file.exists.not then table.to_json.write file else case on_existing_file of diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso index 3bc26b25ab1e..580f0e13d692 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Context import project.Suite_Config.Suite_Config import project.Test.Test @@ -18,8 +19,10 @@ wrap_junit_testsuites config builder ~action = if config.should_output_junit then builder.append '\n' - config.output_path.parent.create_directory - builder.toString.write config.output_path + + Context.Output.with_enabled <| + config.output_path.parent.create_directory + builder.toString.write config.output_path result diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/ContextIsEnabledNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/ContextIsEnabledNode.java index b5d50efa8e3a..15bd5e09d427 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/ContextIsEnabledNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/runtime/ContextIsEnabledNode.java @@ -11,7 +11,7 @@ @BuiltinMethod( type = "Context", - name = "is_enabled", + name = "is_enabled_builtin", description = "Check if the context is enabled in the provided execution environment.") public class ContextIsEnabledNode extends Node { private @Child ExpectStringNode expectStringNode = ExpectStringNode.build(); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/EnsoFile.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/EnsoFile.java index 0841799aa5ab..3d73e1e2eb2f 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/EnsoFile.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/EnsoFile.java @@ -42,7 +42,7 @@ public EnsoFile(TruffleFile truffleFile) { this.truffleFile = truffleFile; } - @Builtin.Method + @Builtin.Method(name = "output_stream_builtin") @Builtin.WrapException(from = IOException.class) @Builtin.ReturningGuestObject @CompilerDirectives.TruffleBoundary @@ -155,7 +155,7 @@ public boolean isDirectory() { return this.truffleFile.isDirectory(); } - @Builtin.Method(name = "create_directory") + @Builtin.Method(name = "create_directory_builtin") @CompilerDirectives.TruffleBoundary public void createDirectories() { try { diff --git a/std-bits/base/src/main/java/org/enso/base/DryRunFileManager.java b/std-bits/base/src/main/java/org/enso/base/DryRunFileManager.java new file mode 100644 index 000000000000..f6136568a8df --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/DryRunFileManager.java @@ -0,0 +1,42 @@ +package org.enso.base; + +import java.io.File; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class DryRunFileManager { + static final Map files = new HashMap<>(); + + /** + * Creates a temporary file for the given path. If the path is already a dry run temporary file, + * the same path will be returned. + * + * @param path the path to the file to make a temporary file of. + * @return the path to the temporary file. + */ + public static String getTemporaryFile(String path) { + return files.computeIfAbsent( + path, + k -> { + if (files.containsValue(k)) { + // Existing temporary file so return this. + return k; + } + + var filename = new File(k).getName(); + var lastDot = filename.lastIndexOf('.'); + var prefix = lastDot == -1 ? filename : filename.substring(0, lastDot); + prefix = prefix + "_ensodryrun"; + var extension = lastDot == -1 ? "" : filename.substring(lastDot); + + try { + var temp = File.createTempFile(prefix, extension); + temp.deleteOnExit(); + return temp.getAbsolutePath(); + } catch (IOException e) { + return null; + } + }); + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/file_format/JSONFormatSPI.java b/std-bits/base/src/main/java/org/enso/base/file_format/JSONFormatSPI.java index 91313fb5bde9..4266b2bd42fc 100644 --- a/std-bits/base/src/main/java/org/enso/base/file_format/JSONFormatSPI.java +++ b/std-bits/base/src/main/java/org/enso/base/file_format/JSONFormatSPI.java @@ -9,6 +9,6 @@ protected String getModuleName() { @Override protected String getTypeName() { - return "JSON_File"; + return "JSON_Format"; } } diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 241ca1f8a88a..07c47eee01be 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Runtime.Ref.Ref +import Standard.Base.Runtime.Context import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import Table, Value_Type @@ -154,11 +155,11 @@ sqlite_spec connection prefix = spec = enso_project.data.create_directory file = enso_project.data / "sqlite_test.db" - file.delete_if_exists + Context.Output.with_enabled <| file.delete_if_exists in_file_prefix = "[SQLite File] " sqlite_spec (Database.connect (SQLite file)) in_file_prefix Upload_Spec.spec (_ -> Database.connect (SQLite file)) in_file_prefix - file.delete + Context.Output.with_enabled <| file.delete in_memory_prefix = "[SQLite In-Memory] " sqlite_spec (Database.connect (SQLite In_Memory)) in_memory_prefix @@ -166,7 +167,7 @@ spec = SQLite_Type_Mapping_Spec.spec - Test.group "SQLite_Format should allow connecting to SQLite files" <| + Test.group "SQLite_Format should allow connecting to SQLite files" <| Context.Output.with_enabled <| file.delete_if_exists connection = Database.connect (SQLite file) diff --git a/test/Table_Tests/src/IO/Csv_Spec.enso b/test/Table_Tests/src/IO/Csv_Spec.enso index 024b3865a719..50b0392ad6e8 100644 --- a/test/Table_Tests/src/IO/Csv_Spec.enso +++ b/test/Table_Tests/src/IO/Csv_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Delimited import Standard.Table.Main as Table_Module @@ -91,7 +92,7 @@ spec = res.should_equal expected - Test.specify 'should write CSV to a file' <| + Test.specify 'should write CSV to a file' <| Context.Output.with_enabled <| varied_column = (enso_project.data / "varied_column.csv") . read out = enso_project.data / "transient" / "out.csv" out.delete_if_exists @@ -109,7 +110,7 @@ spec = out.read_text.should_equal exp out.delete_if_exists - Test.group "Integration" <| + Test.group "Integration" <| Context.Output.with_enabled <| Test.specify "should be able to round-trip a table with all kinds of weird characters to CSV and back" <| names = ['Śłąęźż");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,', '\'', 'a\n\nb', 'a\tc', Nothing, Nothing, Nothing, '42', '💁👌🎍😍', '', 'null?\0?', 'FFFD', '\uFFFD', '\r\n', 'a\r\nb\n\rc\rd\ne', 'what about these # ?? // /* hmm */ is it included?', 'and the rare \v vertical tab?'] d = Date_Time.new 2015 10 29 23 55 49 diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index d5906c33664d..a51697a18805 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Delimited import Standard.Table.Data.Table_Conversions @@ -102,7 +103,7 @@ spec = r2.should_fail_with File_Error r2.catch.should_be_a File_Error.IO_Error - Test.specify "should work with all kinds of line endings" <| + Test.specify "should work with all kinds of line endings" <| Context.Output.with_enabled <| path name = enso_project.data / 'transient' / name create_file name ending_style = lines = ['a,b,c', 'd,e,f', '1,2,3'] @@ -129,7 +130,7 @@ spec = ['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete) - Test.specify "should allow to override line endings style" <| + Test.specify "should allow to override line endings style" <| Context.Output.with_enabled <| file = enso_project.data / "transient" / "lf.csv" lines = ['a,b,c', 'd,e,f', '1,2,3'] text = lines.join '\n' @@ -170,7 +171,7 @@ spec = table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺'] table.at 'ć😎' . to_vector . should_equal ['แมวมีสี่ขา'] - Test.specify "should report errors when encountering malformed characters" <| + Test.specify "should report errors when encountering malformed characters" <| Context.Output.with_enabled <| utf8_file = (enso_project.data / "transient" / "utf8_invalid.csv") utf8_bytes = [97, 44, 98, 44, 99, 10, -60, -123, 44, -17, -65, -65, 44, -61, 40, -61, 40, 10] utf8_bytes.write_bytes utf8_file diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index 61ca23091b8c..1822c1f8b3a5 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Match_Columns, Delimited from Standard.Table.Errors import all @@ -24,7 +25,7 @@ join_lines lines trailing_newline=True = spec = line_ending_pairs = [[Line_Ending_Style.Unix, '\n'], [Line_Ending_Style.Windows, '\r\n'], [Line_Ending_Style.Mac_Legacy, '\r']] - Test.group "Delimited File Writing" <| + Test.group "Delimited File Writing" <| Context.Output.with_enabled <| Test.specify "should correctly write a simple table and return the written file object on success" <| table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]], ["D", ["a", 2, My_Type.Value 10]]] file = (enso_project.data / "transient" / "written.csv") diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index ea36ff0c278e..ac77439b0dfa 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -1,6 +1,7 @@ from Standard.Base import all import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Context from Standard.Table import Table, Match_Columns, Excel, Excel_Range, Data_Formatter, Sheet_Names, Range_Names, Worksheet, Cell_Range, Delimited, Excel_Workbook @@ -67,7 +68,7 @@ spec_fmt header file read_method sheet_count=5 = t_3.at 'C' . to_vector . should_equal [43.2, 54] spec_write suffix test_sheet_name = - Test.group ("Write " + suffix + " Files") <| + Test.group ("Write " + suffix + " Files") <| Context.Output.with_enabled <| out = enso_project.data / ('out.' + suffix) out_bak = enso_project.data / ('out.' + suffix + '.bak') table = enso_project.data/'varied_column.csv' . read @@ -678,7 +679,7 @@ spec = r2.should_fail_with File_Error r2.catch.should_be_a File_Error.Corrupted_Format - Test.specify "should handle malformed XLS files gracefully" <| + Test.specify "should handle malformed XLS files gracefully" <| Context.Output.with_enabled <| bad_file = enso_project.data / "transient" / "malformed.xls" "not really an XLS file contents...".write bad_file on_existing_file=Existing_File_Behavior.Overwrite diff --git a/test/Table_Tests/src/IO/Formats_Spec.enso b/test/Table_Tests/src/IO/Formats_Spec.enso index af435ebc97c7..660b0380b456 100644 --- a/test/Table_Tests/src/IO/Formats_Spec.enso +++ b/test/Table_Tests/src/IO/Formats_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.File_Error.File_Error +import Standard.Base.Runtime.Context from Standard.Table import all import Standard.Table.Errors.Invalid_JSON_Format @@ -10,7 +11,7 @@ import Standard.Test.Extensions import project.Util -spec = Test.group 'Various File Format support on Table' <| +spec = Test.group 'Various File Format support on Table' <| Context.Output.with_enabled <| t1 = Table.new [["X", [1, 2, 3]]] transient = enso_project.data / "transient" simple_empty = enso_project.data/'simple_empty.csv' . read diff --git a/test/Tests/src/System/File_Read_Spec.enso b/test/Tests/src/System/File_Read_Spec.enso index 6057e438a75d..71b7ec1f5b62 100644 --- a/test/Tests/src/System/File_Read_Spec.enso +++ b/test/Tests/src/System/File_Read_Spec.enso @@ -53,7 +53,7 @@ spec = Problems.expect_only_warning Encoding_Error <| windows_log.read (Plain_Text Encoding.ascii) - Test.group "JSON_File" <| + Test.group "JSON_Format" <| Test.specify "should be able to read a file as Json" <| f1 = enso_project.data / "sample.json" j1 = f1.read @@ -62,7 +62,7 @@ spec = j1.at "not" . should_equal Nothing f2 = enso_project.data / "sample-json.weird-extension" - j2 = f2.read JSON_File + j2 = f2.read JSON_Format j2.at 0 . at "foo" . should_equal "bar" j2.at 1 . should_equal 42 j2.at 2 . should_equal Nothing diff --git a/test/Tests/src/System/File_Spec.enso b/test/Tests/src/System/File_Spec.enso index 08a86d38eaf8..5e151ec092b0 100644 --- a/test/Tests/src/System/File_Spec.enso +++ b/test/Tests/src/System/File_Spec.enso @@ -3,6 +3,7 @@ import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State +import Standard.Base.Runtime.Context polyglot java import org.enso.base_test_helpers.FileSystemHelper @@ -18,7 +19,7 @@ spec = windows_file = enso_project.data / "windows.txt" non_existent_file = File.new "does_not_exist.txt" - Test.group "File Operations" <| + Test.group "File Operations" <| Context.Output.with_enabled <| Test.specify "should allow creating a new file" <| path = sample_file.path File.new path @@ -132,13 +133,14 @@ spec = Test.specify "should allow to read last n bytes from a file" <| file = enso_project.data / "transient" / "bytes.txt" data = [1, 0, 0, 1, 2, 100, 20] - data.write_bytes file - file.read_last_bytes 0 . should_equal [] - file.read_last_bytes 1 . should_equal [20] - file.read_last_bytes 2 . should_equal [100, 20] - file.read_last_bytes 5 . should_equal [0, 1, 2, 100, 20] - file.read_last_bytes 1000 . should_equal data - file.delete + Context.Output.with_enabled <| + data.write_bytes file + file.read_last_bytes 0 . should_equal [] + file.read_last_bytes 1 . should_equal [20] + file.read_last_bytes 2 . should_equal [100, 20] + file.read_last_bytes 5 . should_equal [0, 1, 2, 100, 20] + file.read_last_bytes 1000 . should_equal data + file.delete Test.specify "should handle exceptions when reading a non-existent file" <| file = File.new "does_not_exist.txt" @@ -203,7 +205,7 @@ spec = contents_2 = Data.read_text file contents_2.should_start_with "Cupcake ipsum dolor sit amet." - Test.group "write operations" <| + Test.group "write operations" <| Context.Output.with_enabled <| data = [32, 127, -128, 0] data_2 = [10, 15, 20, 30] diff --git a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso index eea119f7912a..fd40c0765728 100644 --- a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso +++ b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error +import Standard.Base.Runtime.Context polyglot java import java.nio.CharBuffer @@ -24,7 +25,7 @@ spec = f.delete f.exists.should_be_false - Test.specify "should work correctly when reading chunks of varying sizes" <| + Test.specify "should work correctly when reading chunks of varying sizes" <| Context.Output.with_enabled <| f = enso_project.data / "transient" / "varying_chunks.txt" fragment = 'Hello 😎🚀🚧!' contents = 1.up_to 1000 . map _->fragment . join '\n' diff --git a/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso b/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso index 42cee53b6e34..163a1362c878 100644 --- a/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso +++ b/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso @@ -1,6 +1,7 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.Illegal_State.Illegal_State +import Standard.Base.Runtime.Context polyglot java import org.enso.base.Encoding_Utils polyglot java import java.nio.CharBuffer @@ -9,7 +10,7 @@ from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions spec = - Test.group "ReportingStreamEncoder" <| + Test.group "ReportingStreamEncoder" <| Context.Output.with_enabled <| Test.specify "should allow writing a file codepoint by codepoint" <| f = enso_project.data / "transient" / "char-by-char.txt" f.delete_if_exists diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index ccf9b2e1878e..8b69831b4c42 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Context from Standard.Table import Table, Aggregate_Column, Value_Type @@ -99,7 +100,7 @@ visualization_spec connection = Visualization.prepare_visualization Value_Type.Char . should_equal (make_json Value_Type.Char) Visualization.prepare_visualization Value_Type.Unsupported_Data_Type . should_equal (make_json Value_Type.Unsupported_Data_Type) -spec = +spec = Context.Output.with_enabled <| enso_project.data.create_directory file = enso_project.data / "sqlite_test.db" file.delete_if_exists diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso index 8120f266f04a..208f0e0dcfda 100644 --- a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso @@ -23,7 +23,12 @@ type Context if self.is_enabled environment then action else Panic.throw (Forbidden_Operation.Error self.name) is_enabled : Text -> Boolean - is_enabled self environment="design" = @Builtin_Method "Context.is_enabled" + is_enabled self environment="design" = + self.is_enabled_builtin environment + + ## PRIVATE + is_enabled_builtin : Text -> Boolean + is_enabled_builtin self environment = @Builtin_Method "Context.is_enabled_builtin" current_execution_environment : Text current_execution_environment = @Builtin_Method "Runtime.current_execution_environment" From a83954571a47da986568a56e12f60d4634ca5c3d Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Mon, 1 May 2023 07:29:59 -0700 Subject: [PATCH 19/34] Fix disappearing cached shapes (#6458) * Fix GL parameter * Don't leak GL Framebuffers * Resize: Don't rebuild screen-size-independent passes * Don't drop Texture while in use * Fix concurrency bug --- .../core/src/display/render/composer.rs | 20 ++++- .../ensogl/core/src/display/render/pass.rs | 11 ++- .../src/display/render/passes/cache_shapes.rs | 87 ++++++++++++------- .../core/src/system/gpu/data/texture/class.rs | 6 +- 4 files changed, 89 insertions(+), 35 deletions(-) diff --git a/lib/rust/ensogl/core/src/display/render/composer.rs b/lib/rust/ensogl/core/src/display/render/composer.rs index bdb95ff1f5f1..09134edeeffd 100644 --- a/lib/rust/ensogl/core/src/display/render/composer.rs +++ b/lib/rust/ensogl/core/src/display/render/composer.rs @@ -51,12 +51,28 @@ impl { self.init_passes(); } - /// Resize the composer and reinitialize all of its layers. + /// Resize the composer and reinitialize all of its screen-size-dependent layers. pub fn resize(&mut self, width: i32, height: i32, pixel_ratio: f32) { + if width == self.width && height == self.height && pixel_ratio == self.pixel_ratio { + // Some resize events are spurious; it is not necessary to reinitialize the passes if + // the size hasn't actually changed. + return; + } self.width = width; self.height = height; self.pixel_ratio = pixel_ratio; - self.init_passes(); + let ctx = &self.context; + let vars = &self.variables; + let defs = self.pipeline.passes_clone(); + let old_passes = self.passes.drain(..); + let passes = defs.into_iter().zip(old_passes).map(|(def, pass)| { + if def.is_screen_size_independent() { + pass + } else { + ComposerPass::new(ctx, vars, def, width, height, pixel_ratio) + } + }).collect_vec(); + self.passes = passes; } /// Initialize all pass definitions from the [`Pipeline`]. diff --git a/lib/rust/ensogl/core/src/display/render/pass.rs b/lib/rust/ensogl/core/src/display/render/pass.rs index 332a4cf8662a..62078713d785 100644 --- a/lib/rust/ensogl/core/src/display/render/pass.rs +++ b/lib/rust/ensogl/core/src/display/render/pass.rs @@ -20,6 +20,9 @@ use crate::system::gpu::data::texture::class::TextureOps; pub trait Definition: CloneBoxedForDefinition + Debug + 'static { fn initialize(&mut self, _instance: &Instance) {} fn run(&mut self, _instance: &Instance, update_status: UpdateStatus); + fn is_screen_size_independent(&self) -> bool { + false + } } clone_boxed!(Definition); @@ -170,7 +173,7 @@ impl OutputDefinition { /// Constructor of the RGBA u8 output with default texture parameters. It is the most popular /// option and you should use it to render colors with your passes. pub fn new_rgba(name: Name) -> Self { - let internal_format = texture::Rgba; + let internal_format = texture::Rgba8; let item_type = texture::item_type::u8; let texture_parameters = default(); OutputDefinition::new(name, internal_format, item_type, texture_parameters) @@ -217,3 +220,9 @@ impl Framebuffer { result } } + +impl Drop for Framebuffer { + fn drop(&mut self) { + self.context.delete_framebuffer(Some(&self.native)); + } +} diff --git a/lib/rust/ensogl/core/src/display/render/passes/cache_shapes.rs b/lib/rust/ensogl/core/src/display/render/passes/cache_shapes.rs index 537acbe2c98c..e854f68bbb5d 100644 --- a/lib/rust/ensogl/core/src/display/render/passes/cache_shapes.rs +++ b/lib/rust/ensogl/core/src/display/render/passes/cache_shapes.rs @@ -35,24 +35,31 @@ use crate::gui::component::AnyShapeView; #[derive(Clone, Derivative)] #[derivative(Debug)] pub struct CacheShapesPass { - scene: Scene, - framebuffer: Option, + scene: Scene, + framebuffer: Option, + texture: Option, #[derivative(Debug = "ignore")] - shapes_to_render: Vec>, + shapes_to_render: Vec>, /// Texture size in device pixels. texture_size_device: Vector2, - layer: Layer, + layer: Layer, + camera_ready: Rc>, + #[derivative(Debug = "ignore")] + display_object_update_handler: Option>, } impl CacheShapesPass { /// Constructor. pub fn new(scene: &Scene) -> Self { Self { - framebuffer: default(), - shapes_to_render: default(), - layer: Layer::new("Cached Shapes"), - scene: scene.clone_ref(), + framebuffer: default(), + texture: default(), + shapes_to_render: default(), + layer: Layer::new("Cached Shapes"), + scene: scene.clone_ref(), texture_size_device: default(), + camera_ready: default(), + display_object_update_handler: default(), } } } @@ -83,38 +90,60 @@ impl pass::Definition for CacheShapesPass { self.layer.camera().update(&self.scene); self.scene.display_object.update(&self.scene); self.layer.update(); + // The asynchronous update of the scene's display object initiated above will eventually + // set our layer's camera's transformation. Handle the camera update when all + // previously-initiated FRP events finish being processed. + let handle = frp::microtasks::next_microtask({ + let camera = self.layer.camera(); + let scene = self.scene.clone_ref(); + let camera_ready = Rc::clone(&self.camera_ready); + move || { + camera.update(&scene); + camera_ready.set(true); + } + }); + self.display_object_update_handler = Some(Rc::new(handle)); let output = pass::OutputDefinition::new_rgba("cached_shapes"); let texture = instance.new_texture(&output, self.texture_size_device.x, self.texture_size_device.y); self.framebuffer = Some(instance.new_framebuffer(&[&texture])); + self.texture = Some(texture); } fn run(&mut self, instance: &Instance, _update_status: UpdateStatus) { - let is_shader_compiled = - |shape: &mut Rc| shape.sprite().symbol.shader().program().is_some(); - let mut ready_to_render = self.shapes_to_render.drain_filter(is_shader_compiled).peekable(); - if ready_to_render.peek().is_some() { - if let Some(framebuffer) = self.framebuffer.as_ref() { - framebuffer.with_bound(|| { - instance.with_viewport( - self.texture_size_device.x, - self.texture_size_device.y, - || { - with_display_mode(DisplayModes::CachedShapesTexture, || { - with_context(|ctx| ctx.set_camera(&self.layer.camera())); - for shape in ready_to_render { - shape.sprite().symbol.render(); - } - }) - }, - ); - }); - } else { - reportable_error!("Impossible happened: The CacheShapesPass was run without initialized framebuffer."); + if self.camera_ready.get() { + let is_shader_compiled = |shape: &mut Rc| { + shape.sprite().symbol.shader().program().is_some() + }; + let mut ready_to_render = + self.shapes_to_render.drain_filter(is_shader_compiled).peekable(); + if ready_to_render.peek().is_some() { + if let Some(framebuffer) = self.framebuffer.as_ref() { + framebuffer.with_bound(|| { + instance.with_viewport( + self.texture_size_device.x, + self.texture_size_device.y, + || { + with_display_mode(DisplayModes::CachedShapesTexture, || { + with_context(|ctx| ctx.set_camera(&self.layer.camera())); + for shape in ready_to_render { + shape.sprite().symbol.render(); + } + }) + }, + ); + }); + } else { + reportable_error!("Impossible happened: The CacheShapesPass was run without initialized framebuffer."); + } } } } + + fn is_screen_size_independent(&self) -> bool { + true + } } fn with_display_mode(mode: DisplayModes, f: impl FnOnce() -> R) -> R { diff --git a/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs b/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs index 8c93b45fe987..2c76aaecf79d 100644 --- a/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs +++ b/lib/rust/ensogl/core/src/system/gpu/data/texture/class.rs @@ -58,9 +58,9 @@ impl Drop for TextureBindGuard { /// For more background see: https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/texParameter #[derive(Copy, Clone, Debug, Default)] pub struct Parameters { - /// Specifies the setting for the texture magnification filter (`Context::TEXTURE_MIN_FILTER`). + /// Specifies the setting for the texture minification filter (`Context::TEXTURE_MIN_FILTER`). pub min_filter: MinFilter, - /// Specifies the setting for the texture minification filter (`Context::TEXTURE_MAG_FILTER`). + /// Specifies the setting for the texture magnification filter (`Context::TEXTURE_MAG_FILTER`). pub mag_filter: MagFilter, /// Specifies the setting for the wrapping function for texture coordinate s /// (`Context::TEXTURE_WRAP_S`). @@ -75,7 +75,7 @@ impl Parameters { pub fn apply_parameters(self, context: &Context) { let target = Context::TEXTURE_2D; context.tex_parameteri(*target, *Context::TEXTURE_MIN_FILTER, *self.min_filter as i32); - context.tex_parameteri(*target, *Context::TEXTURE_MIN_FILTER, *self.mag_filter as i32); + context.tex_parameteri(*target, *Context::TEXTURE_MAG_FILTER, *self.mag_filter as i32); context.tex_parameteri(*target, *Context::TEXTURE_WRAP_S, *self.wrap_s as i32); context.tex_parameteri(*target, *Context::TEXTURE_WRAP_T, *self.wrap_t as i32); } From cd92d90f9ff70320096bb514194c50796dcfcba0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wojciech=20Dani=C5=82o?= Date: Mon, 1 May 2023 18:11:05 +0200 Subject: [PATCH 20/34] Proper handling of multiple list views. (#6461) --- .../ensogl/component/list-editor/src/lib.rs | 236 +++++++++--------- lib/rust/ensogl/component/slider/src/model.rs | 3 + lib/rust/ensogl/core/src/data/bounding_box.rs | 6 + lib/rust/ensogl/core/src/gui/cursor.rs | 234 +++++++++++++++-- .../ensogl/examples/list-editor/src/lib.rs | 47 ++-- lib/rust/frp/src/nodes.rs | 77 ++++++ 6 files changed, 444 insertions(+), 159 deletions(-) diff --git a/lib/rust/ensogl/component/list-editor/src/lib.rs b/lib/rust/ensogl/component/list-editor/src/lib.rs index 07c96aab0b4c..a9a90e515eef 100644 --- a/lib/rust/ensogl/component/list-editor/src/lib.rs +++ b/lib/rust/ensogl/component/list-editor/src/lib.rs @@ -77,16 +77,21 @@ use ensogl_core::display::world::*; use ensogl_core::prelude::*; use ensogl_core::control::io::mouse; +use ensogl_core::data::bounding_box::BoundingBox; +use ensogl_core::data::color; use ensogl_core::display; use ensogl_core::display::object::Event; use ensogl_core::display::object::ObjectOps; +use ensogl_core::display::shape::compound::rectangle::*; use ensogl_core::gui::cursor; use ensogl_core::gui::cursor::Cursor; -use ensogl_core::Animation; +use ensogl_core::gui::cursor::Trash; use ensogl_core::Easing; use item::Item; use placeholder::Placeholder; use placeholder::StrongPlaceholder; +use placeholder::WeakPlaceholder; + // ============== @@ -294,6 +299,10 @@ ensogl_core::define_endpoints_2! { /// Enable insertion points (plus icons) when moving mouse after the last list item. enable_last_insertion_point(bool), + + /// A flag controlling this FRP debug mode. If enabled, additional logs can might be printed + /// to console. + debug(bool), } Output { /// Fires whenever a new element was added to the list. @@ -321,11 +330,13 @@ pub struct ListEditor { #[derive(Debug)] pub struct Model { - cursor: Cursor, - items: VecIndexedBy, ItemOrPlaceholderIndex>, - root: display::object::Instance, - layout: display::object::Instance, - gap: f32, + cursor: Cursor, + items: VecIndexedBy, ItemOrPlaceholderIndex>, + root: display::object::Instance, + layout_with_icons: display::object::Instance, + layout: display::object::Instance, + gap: f32, + add_elem_icon: Rectangle, } impl Model { @@ -335,10 +346,19 @@ impl Model { let items = default(); let root = display::object::Instance::new(); let layout = display::object::Instance::new(); + let layout_with_icons = display::object::Instance::new(); let gap = default(); + layout_with_icons.use_auto_layout(); layout.use_auto_layout(); - root.add_child(&layout); - Self { cursor, items, root, layout, gap } + layout_with_icons.add_child(&layout); + root.add_child(&layout_with_icons); + let add_elem_icon = Rectangle().build(|t| { + t.set_corner_radius_max() + .set_size((24.0, 24.0)) + .set_color(color::Rgba::new(0.0, 0.0, 0.0, 0.2)); + }); + layout_with_icons.add_child(&add_elem_icon); + Self { cursor, items, root, layout, layout_with_icons, gap, add_elem_icon } } } @@ -370,11 +390,20 @@ impl ListEditor { let network = self.frp.network(); let model = &self.model; + let on_add_elem_icon_down = model.borrow().add_elem_icon.on_event::(); let on_down = model.borrow().layout.on_event_capturing::(); let on_up_source = scene.on_event::(); let on_move = scene.on_event::(); + let dragged_item_network: Rc>> = default(); + let on_resized = model.borrow().layout.on_resized.clone_ref(); + let drag_target = cursor::DragTarget::new(); frp::extend! { network + + frp.private.output.request_new_item <+ on_add_elem_icon_down.map(f_!([model] { + Response::gui(model.borrow().len()) + })); + target <= on_down.map(|event| event.target()); on_up <- on_up_source.identity(); @@ -394,17 +423,48 @@ impl ListEditor { pos_diff <- any3(&pos_diff_on_move, &pos_diff_on_down, &pos_diff_on_up); eval frp.gap((t) model.borrow_mut().set_gap(*t)); + + // When an item is being dragged, we are connecting to it's `on_resized` endpoint to + // watch for size changes while dragging. We want to disconnect from it as soon as the + // drag ends, and thus we are storing a local FRP network here. + dragged_item_offset <- source::(); + dragged_item_size <- any(...); + eval_ cursor.frp.stop_drag([dragged_item_network] + *dragged_item_network.borrow_mut() = None + ); + eval_ cursor.frp.start_drag ([cursor, dragged_item_size, dragged_item_offset] { + if let Some(obj) = cursor.dragged_display_object() { + let subnet = frp::Network::new("dragged_item_network"); + frp::extend! { subnet + // Identity creates an explicit node in this network. + dragged_item_size <+ obj.on_resized.identity(); + } + dragged_item_size.emit(obj.computed_size()); + dragged_item_offset.emit(obj.position().xy()); + *dragged_item_network.borrow_mut() = Some(subnet); + } + }); + + this_bbox <- on_resized.map(|t| BoundingBox::from_size(*t)); + dragged_item_bbox <- all_with3(&dragged_item_size, &dragged_item_offset, &pos_on_move, + |size, offset, pos| BoundingBox::from_position_and_size(*pos + *offset, *size) + ); + is_close <- all_with(&this_bbox, &dragged_item_bbox, |a, b| a.intersects(b)).on_change(); + dragged_item_bbox_center <- dragged_item_bbox.map(|bbox| bbox.center()); + cursor.frp.switch_drag_target <+ is_close.map(f!([drag_target] (t) (drag_target.clone(), *t))); } self.init_add_and_remove(); - let (is_dragging, drag_diff, no_drag) = - self.init_dragging(&on_up, &on_down, &target, &pos_diff); - let (is_trashing, trash_pointer_style) = self.init_trashing(&on_up, &drag_diff); - self.init_dropping(&on_up, &pos_on_move_down, &is_trashing); + let (is_dragging, _drag_diff, no_drag) = + self.init_dragging(cursor, &on_up, &on_up_cleaning_phase, &on_down, &target, &pos_diff); + frp::extend! { network + on_up_close <- on_up.gate(&is_close); + } + self.init_dropping(&on_up_close, &dragged_item_bbox_center, &is_close); let insert_pointer_style = self.init_insertion_points(&on_up, &pos_on_move, &is_dragging); frp::extend! { network - cursor.frp.set_style_override <+ all [insert_pointer_style, trash_pointer_style].fold(); + cursor.frp.set_style_override <+ insert_pointer_style; on_down_drag <- on_down.gate_not(&no_drag); // Do not pass events to children, as we don't know whether we are about to drag // them yet. @@ -412,6 +472,9 @@ impl ListEditor { _eval <- no_drag.on_true().map3(&on_down, &target, |_, event, target| { target.emit_event(event.payload.clone()); }); + + item_count_changed <- any_(&frp.on_item_added, &frp.on_item_removed); + eval_ item_count_changed (model.borrow().item_count_changed()); } self } @@ -453,7 +516,7 @@ impl ListEditor { enabled.and_option_from(|| model.item_or_placeholder_index_to_index(gap)) }) }) - ); + ).on_change(); index <= opt_index; enabled <- opt_index.is_some(); pointer_style <- enabled.then_constant(cursor::Style::plus()); @@ -491,13 +554,16 @@ impl ListEditor { /// Implementation of item dragging logic. See docs of this crate to learn more. fn init_dragging( &self, + cursor: &Cursor, on_up: &frp::Stream>, + on_up_cleaning_phase: &frp::Stream>, on_down: &frp::Stream>, target: &frp::Stream, pos_diff: &frp::Stream, ) -> (frp::Stream, frp::Stream, frp::Stream) { let model = &self.model; let on_up = on_up.clone_ref(); + let on_up_cleaning_phase = on_up_cleaning_phase.clone_ref(); let on_down = on_down.clone_ref(); let target = target.clone_ref(); let pos_diff = pos_diff.clone_ref(); @@ -517,16 +583,18 @@ impl ListEditor { init_drag <- all_with(&pos_diff, init_drag_threshold, |p, t| p.y.abs() > *t).on_true(); drag_disabled <- bool(&on_up, &init_no_drag).on_change(); init_drag_not_disabled <- init_drag.gate_not(&drag_disabled); - is_dragging <- bool(&on_up, &init_drag_not_disabled).on_change(); + is_dragging <- bool(&on_up_cleaning_phase, &init_drag_not_disabled).on_change(); drag_diff <- pos_diff.gate(&is_dragging); no_drag <- drag_disabled.gate_not(&is_dragging).on_change(); - status <- bool(&on_up, &drag_diff).on_change(); + status <- bool(&on_up_cleaning_phase, &drag_diff).on_change(); start <- status.on_true(); target_on_start <- target.sample(&start); let on_item_removed = &frp.private.output.on_item_removed; - eval target_on_start([model, on_item_removed] (t) { - if let Some((index, item)) = model.borrow_mut().start_item_drag(t) { + eval target_on_start([model, on_item_removed, cursor] (t) { + let indexed_item = model.borrow_mut().start_item_drag(t); + if let Some((index, item)) = indexed_item { + cursor.start_drag(item.clone_ref()); on_item_removed.emit(Response::gui((index, Rc::new(RefCell::new(Some(item)))))); } }); @@ -534,44 +602,16 @@ impl ListEditor { (status, drag_diff, no_drag) } - /// Implementation of item trashing logic. See docs of this crate to learn more. - fn init_trashing( - &self, - on_up: &frp::Stream>, - drag_diff: &frp::Stream, - ) -> (frp::Stream, frp::Stream>) { - let on_up = on_up.clone_ref(); - let drag_diff = drag_diff.clone_ref(); - let model = &self.model; - let layout = model.borrow().layout.clone_ref(); - let frp = &self.frp; - let network = self.frp.network(); - frp::extend! { network - required_offset <- all_with(&frp.thrashing_offset_ratio, &layout.on_resized, - |ratio, size| size.y * ratio - ); - status <- drag_diff.map2(&required_offset, |t, m| t.y.abs() >= *m).on_change(); - status_on_up <- on_up.constant(false); - status_cleaning_phase <- any(&status, &status_on_up).on_change(); - cursor_style <- status_cleaning_phase.then_constant(cursor::Style::trash()); - on <- status.on_true(); - perform <- on_up.gate(&status); - eval_ on (model.collapse_all_placeholders()); - eval_ perform (model.borrow_mut().trash_dragged_item()); - } - (status, cursor_style) - } - /// Implementation of dropping items logic, including showing empty placeholders when the item /// is dragged over a place where it could be dropped. fn init_dropping( &self, on_up: &frp::Stream>, pos_on_move: &frp::Stream, - is_trashing: &frp::Stream, + is_close: &frp::Stream, ) { let pos_on_move = pos_on_move.clone_ref(); - let is_trashing = is_trashing.clone_ref(); + let is_close = is_close.clone_ref(); let model = &self.model; let frp = &self.frp; @@ -579,20 +619,21 @@ impl ListEditor { let model_borrowed = model.borrow(); frp::extend! { network + on_far <- is_close.on_false(); center_points <- model_borrowed.layout.on_resized.map(f_!(model.center_points())); - insert_index <- pos_on_move.map2(¢er_points, f!((p, c) model.insert_index(p.x, c))); + pos_close <- pos_on_move.sampled_gate(&is_close); + insert_index <- pos_close.map2(¢er_points, f!((p, c) model.insert_index(p.x, c))); insert_index <- insert_index.on_change(); - insert_index_on_drop <- insert_index.sample(on_up).gate_not(&is_trashing); - insert_index_not_trashing <- insert_index.gate_not(&is_trashing); + insert_index <- insert_index.sampled_gate(&is_close); - on_stop_trashing <- is_trashing.on_false(); - insert_index_on_stop_trashing <- insert_index.sample(&on_stop_trashing); - update_insert_index <- any(&insert_index_not_trashing, &insert_index_on_stop_trashing); - eval update_insert_index ((i) model.borrow_mut().add_insertion_point(*i)); + eval_ on_far (model.collapse_all_placeholders()); + eval insert_index ((i) model.borrow_mut().add_insertion_point_if_type_match(*i)); let on_item_added = &frp.private.output.on_item_added; + insert_index_on_drop <- insert_index.sample(on_up).gate(&is_close); eval insert_index_on_drop ([model, on_item_added] (index) - if let Some(index) = model.borrow_mut().place_dragged_item(*index) { + let index = model.borrow_mut().place_dragged_item(*index); + if let Some(index) = index { on_item_added.emit(Response::gui(index)); } ); @@ -864,7 +905,7 @@ impl Model { /// /// See docs of [`Self::start_item_drag_at`] for more information. fn start_item_drag(&mut self, target: &display::object::Instance) -> Option<(Index, T)> { - let objs = target.rev_parent_chain(); + let objs = target.rev_parent_chain().reversed(); let tarrget_index = objs.into_iter().find_map(|t| self.item_index_of(&t)); if let Some((index, index_or_placeholder_index)) = tarrget_index { self.start_item_drag_at(index_or_placeholder_index).map(|item| (index, item)) @@ -914,10 +955,7 @@ impl Model { /// ╰─────╯ ╰╌╌╌╌╯ ╰─────╯ ╰╌╌╌╌╯ ╰─────╯ ╰─────╯ ╰╌╌╌╌╌╌╌╌╌╌╌╌◀╌╯ ╰─────╯ /// ``` fn start_item_drag_at(&mut self, index: ItemOrPlaceholderIndex) -> Option { - self.replace_item_with_placeholder(index).map(|item| { - self.cursor.start_drag(item.clone_ref()); - item - }) + self.replace_item_with_placeholder(index) } fn replace_item_with_placeholder(&mut self, index: ItemOrPlaceholderIndex) -> Option { @@ -944,7 +982,7 @@ impl Model { /// Prepare place for the dragged item by creating or reusing a placeholder and growing it to /// the dragged object size. - fn add_insertion_point(&mut self, index: ItemOrPlaceholderIndex) { + fn add_insertion_point_if_type_match(&mut self, index: ItemOrPlaceholderIndex) { if let Some(item) = self.cursor.with_dragged_item_if_is::(|t| t.display_object().clone()) { @@ -952,13 +990,15 @@ impl Model { let item_size = item.computed_size().x + self.margin_at(index); let placeholder = self.get_merged_placeholder_at(index).unwrap_or_else(|| { let placeholder = StrongPlaceholder::new(); - self.items.insert(index, placeholder.clone().into()); + if index >= ItemOrPlaceholderIndex::from(self.items.len()) { + self.items.push(placeholder.clone().into()); + } else { + self.items.insert(index, placeholder.clone().into()); + } placeholder }); placeholder.set_target_size(item_size); self.reposition_items(); - } else { - warn!("Called function to find insertion point while no element is being dragged.") } } @@ -975,9 +1015,9 @@ impl Model { Item::new_from_placeholder(item.clone_ref(), placeholder).into(); } else { // This branch should never be reached, as when dragging an item we always create - // a placeholder for it (see the [`Self::add_insertion_point`] function). However, - // in case something breaks, we want it to still provide the user with the correct - // outcome. + // a placeholder for it (see the [`Self::add_insertion_point_if_type_match`] + // function). However, in case something breaks, we want it to still + // provide the user with the correct outcome. self.items.insert(index, Item::new(item.clone_ref()).into()); warn!("An element was inserted without a placeholder. This should not happen."); } @@ -1058,6 +1098,15 @@ impl Model { fn insert_index(&self, x: f32, center_points: &[f32]) -> ItemOrPlaceholderIndex { center_points.iter().position(|t| x < *t).unwrap_or(self.items.len()).into() } + + /// If the item count drops to 0, display a button to add new items. + fn item_count_changed(&self) { + if self.len() == 0 { + self.layout_with_icons.add_child(&self.add_elem_icon); + } else { + self.add_elem_icon.unset_parent(); + } + } } impl display::Object for ListEditor { @@ -1065,54 +1114,3 @@ impl display::Object for ListEditor { &self.root } } - - -// ============= -// === Trash === -// ============= - -mod trash { - use super::*; - ensogl_core::define_endpoints_2! {} - - #[derive(Debug, CloneRef, Derivative)] - #[derivative(Clone(bound = ""))] - pub struct Trash { - model: Rc>, - } - - #[derive(Debug)] - pub struct TrashModel { - _frp: Frp, - elem: T, - } - - impl Trash { - pub fn new(elem: T) -> Self { - let self_ref = Rc::new(RefCell::new(None)); - let _frp = Frp::new(); - let display_object = elem.display_object(); - let network = &_frp.network; - let scale_animation = Animation::::new_with_init(network, 1.0); - scale_animation.simulator.update_spring(|s| s * DEBUG_ANIMATION_SPRING_FACTOR); - frp::extend! { network - eval scale_animation.value ((t) display_object.set_scale_xy(Vector2(*t,*t))); - eval_ scale_animation.on_end (self_ref.borrow_mut().take();); - } - scale_animation.target.emit(0.0); - - let model = TrashModel { _frp, elem }; - let model = Rc::new(model); - *self_ref.borrow_mut() = Some(model.clone()); - Self { model } - } - } - - impl display::Object for Trash { - fn display_object(&self) -> &display::object::Instance { - self.model.elem.display_object() - } - } -} -use crate::placeholder::WeakPlaceholder; -use trash::Trash; diff --git a/lib/rust/ensogl/component/slider/src/model.rs b/lib/rust/ensogl/component/slider/src/model.rs index f02dd82b7326..58eb468afa81 100644 --- a/lib/rust/ensogl/component/slider/src/model.rs +++ b/lib/rust/ensogl/component/slider/src/model.rs @@ -213,6 +213,9 @@ impl Model { self.background.set_x(size.x / 2.0); self.track.set_x(size.x / 2.0); self.value.set_x(size.x / 2.0); + self.background.set_y(size.y / 2.0); + self.track.set_y(size.y / 2.0); + self.value.set_y(size.y / 2.0); } /// Set the color of the slider track or thumb. diff --git a/lib/rust/ensogl/core/src/data/bounding_box.rs b/lib/rust/ensogl/core/src/data/bounding_box.rs index 25dc7f48f459..59a7875c0fa3 100644 --- a/lib/rust/ensogl/core/src/data/bounding_box.rs +++ b/lib/rust/ensogl/core/src/data/bounding_box.rs @@ -55,6 +55,12 @@ impl BoundingBox { Self::from_corners(position - size / 2.0, position + size / 2.0) } + /// Constructor of the bounding box with left bottom corner placed at the origin and the given + /// size. + pub fn from_size(size: Vector2) -> Self { + Self::from_corners(Vector2::zeros(), size) + } + /// Check whether the given `pos` lies within the bounding box. pub fn contains(&self, pos: Vector2) -> bool { self.contains_x(pos.x) && self.contains_y(pos.y) diff --git a/lib/rust/ensogl/core/src/gui/cursor.rs b/lib/rust/ensogl/core/src/gui/cursor.rs index 094c27485dae..25572a5ccfb6 100644 --- a/lib/rust/ensogl/core/src/gui/cursor.rs +++ b/lib/rust/ensogl/core/src/gui/cursor.rs @@ -5,6 +5,7 @@ use crate::gui::style::*; use crate::prelude::*; use crate::application::command::FrpNetworkProvider; +use crate::control::io::mouse; use crate::data::color; use crate::define_style; use crate::display; @@ -200,6 +201,7 @@ crate::define_endpoints_2! { Input { set_style_override (Option