From 5b8bc86cf91a3523a159df89b7b703d25bc7d777 Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Tue, 29 Oct 2024 08:48:11 -0400 Subject: [PATCH 001/286] Clean up SQLite file after the SQLite file tests have run. (#11416) --- test/Table_Tests/src/Database/SQLite_Spec.enso | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index e658f5d91737..c71ae0ebb799 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -424,6 +424,9 @@ add_specs suite_builder = suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder-> data = File_Connection.setup database_file + group_builder.teardown <| + data.teardown + group_builder.specify "should recognise a SQLite database file" <| Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format From 15575b495a0b2039313d4ae411b567355ee396c2 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Tue, 29 Oct 2024 15:47:12 +0100 Subject: [PATCH 002/286] Skeletal PanicExceptionTest and more logging when AssertionError happens (#11393) --- .../builtin/error/PanicExceptionTest.java | 59 +++++++++++++++++++ .../interpreter/runtime/data/text/Text.java | 17 ++++++ .../runtime/error/PanicException.java | 15 ++++- 3 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java new file mode 100644 index 000000000000..3492a973523e --- /dev/null +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/node/expression/builtin/error/PanicExceptionTest.java @@ -0,0 +1,59 @@ +package org.enso.interpreter.node.expression.builtin.error; + +import static org.junit.Assert.assertEquals; + +import com.oracle.truffle.api.interop.InteropLibrary; +import org.enso.interpreter.node.expression.builtin.interop.syntax.HostValueToEnsoNode; +import org.enso.interpreter.runtime.data.text.Text; +import org.enso.interpreter.runtime.error.PanicException; +import org.enso.test.utils.ContextUtils; +import org.enso.test.utils.TestRootNode; +import org.graalvm.polyglot.Context; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class PanicExceptionTest { + + private static final InteropLibrary interop = InteropLibrary.getUncached(); + + private static Context context; + private static CatchPanicNode catchPanicNode; + private static HostValueToEnsoNode hostValueToEnsoNode; + private static TestRootNode testRootNode; + + @BeforeClass + public static void initContextAndData() { + context = ContextUtils.createDefaultContext(); + ContextUtils.executeInContext( + context, + () -> { + catchPanicNode = CatchPanicNode.build(); + hostValueToEnsoNode = HostValueToEnsoNode.build(); + testRootNode = new TestRootNode(); + testRootNode.insertChildren(catchPanicNode, hostValueToEnsoNode); + return null; + }); + } + + @AfterClass + public static void disposeContext() { + context.close(); + context = null; + } + + @Test + public void panicExceptionMessageForAssertionError() throws Exception { + ContextUtils.executeInContext( + context, + () -> { + var text = Text.create("Some text for the exception"); + var thrown = new java.lang.AssertionError(text.toString()); + var ex = new PanicException(text, thrown, null); + assertEquals(text.toString(), ex.getMessage()); + var msg = InteropLibrary.getUncached().getExceptionMessage(ex); + assertEquals(text, msg); + return null; + }); + } +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java index b9ba191c7c6a..5a882bf17e02 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/text/Text.java @@ -285,4 +285,21 @@ private static String flattenIfNecessary(Text text) { } return result; } + + @Override + public int hashCode() { + int hash = 7 * toString().hashCode(); + return hash; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Text other) { + return this.toString().equals(other.toString()); + } + return false; + } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java index 91930a1e2dab..c85698eef44b 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/error/PanicException.java @@ -23,6 +23,7 @@ import org.enso.interpreter.runtime.callable.argument.CallArgumentInfo; import org.enso.interpreter.runtime.data.EnsoObject; import org.enso.interpreter.runtime.data.Type; +import org.enso.interpreter.runtime.data.atom.Atom; import org.enso.interpreter.runtime.data.text.Text; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; import org.enso.interpreter.runtime.state.State; @@ -87,7 +88,19 @@ private String computeMessage() { var info = library.getExceptionMessage(this); msg = library.asString(info); } catch (StackOverflowError | AssertionError | UnsupportedMessageException e) { - logger().atError().log("Cannot compute message for " + payload, e); + var l = logger(); + l.atError().log("Cannot compute message for " + payload, e); + l.error("Exception location: " + getLocation()); + if (getLocation() != null) { + l.error(" location source: " + getLocation().getEncapsulatingSourceSection()); + l.error(" location class: " + getLocation().getClass().getName()); + l.error(" location string: " + getLocation()); + } + l.error(" payload class: " + payload.getClass().getName()); + if (payload instanceof Atom atom) { + l.error(" payload cons: " + atom.getConstructor()); + l.error(" payload type: " + atom.getConstructor().getType()); + } msg = TypeToDisplayTextNode.getUncached().execute(payload); } cacheMessage = msg; From 74220f243ae72f8d5c98b45a20bed2c79ce62378 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Tue, 29 Oct 2024 18:33:53 +0300 Subject: [PATCH 003/286] Dependency tracking between nodes is too coarse grained (#11428) close #11237 Changelog: - update: implement special case for a line removal when calculating the changeset # Important Notes Note that the graph is still re-calculated when the node is re-added (by pressing `ctrl-z`). The reason is that the engine processes edits on the textual level and there is not enough information to do similar workarounds. The issue becomes irrelevant when we switch to the direct tree manipulation in Ydoc. https://github.com/user-attachments/assets/c85afde8-6386-44df-82b5-6fb0cca5205b --- .../instrument/ChangesetBuilder.scala | 88 ++++++++++++++++--- .../test/context/ChangesetBuilderTest.scala | 22 +++++ 2 files changed, 100 insertions(+), 10 deletions(-) diff --git a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala index c872efd9fe71..8f6b45f1d9b3 100644 --- a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala +++ b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/ChangesetBuilder.scala @@ -190,10 +190,19 @@ final class ChangesetBuilder[A: TextEditor: IndexedSource]( val edit = edits.dequeue() val locationEdit = ChangesetBuilder.toLocationEdit(edit, source) var invalidatedSet = - ChangesetBuilder.invalidated(tree, locationEdit.location, true) + ChangesetBuilder.invalidated( + tree, + locationEdit.location, + locationEdit.isNodeRemoved, + true + ) if (invalidatedSet.isEmpty) { - invalidatedSet = - ChangesetBuilder.invalidated(tree, locationEdit.location, false) + invalidatedSet = ChangesetBuilder.invalidated( + tree, + locationEdit.location, + locationEdit.isNodeRemoved, + false + ) } val newTree = ChangesetBuilder.updateLocations(tree, locationEdit) val newSource = TextEditor[A].edit(source, edit) @@ -260,8 +269,13 @@ object ChangesetBuilder { * * @param location the location of the edit * @param length the length of the inserted text + * @param isNodeRemoved the flag indicating that the edit removes a node */ - private case class LocationEdit(location: Location, length: Int) { + private case class LocationEdit( + location: Location, + length: Int, + isNodeRemoved: Boolean + ) { /** The difference in length between the edited text and the inserted text. * Determines how much the rest of the text will be shifted after applying @@ -409,19 +423,50 @@ object ChangesetBuilder { /** Calculate the invalidated subset of the tree affected by the edit by * comparing the source locations. * + * The `isNodeRemoved` argument covers the case when the user removes a + * single line, for example: + * + * {{{ + * 0|main = + * | + * 1| x = 0 + * | ^^^^^^ + * 2| y = 1 + * |^^^^ + * 3| y + * }}} + * + * In this case, when removing the line (1) `x = 0`, the expression `y = 1` + * on the line (2) should not be affected by the edit because it causes + * invalidation of all the subsequent expressions in the body of `main` + * function. Instead, the algorithm detects that only the `x` variable name + * was changed and later finds all its usages through the `DataflowAnalysis` + * metadata. Also note that we ignore the right hand side of the `x = ...` + * binding because the removal of rhs expression does not affect other + * expressions in the `main` body, while the usage of a common symbol, i.e. + * `foo`: + * {{{ + * x = foo + * y = foo + * }}} + * will lead to the invalidation of the `y` expression as well (when looking + * for dynamic usages of the `foo` symbol) which is unwanted. + * * @param tree the source tree * @param edit the location of the edit + * @param isNodeRemoved flag indicating that the edit removes a single node * @return the invalidated nodes of the tree */ private def invalidated( tree: Tree, edit: Location, + isNodeRemoved: Boolean, onlyLeafs: Boolean ): Tree = { val invalidated = mutable.TreeSet[ChangesetBuilder.Node]() tree.iterator.foreach { node => if (!onlyLeafs || node.leaf) { - if (intersect(edit, node)) { + if (intersect(edit, node, isNodeRemoved)) { invalidated += node tree -= node } @@ -438,12 +483,14 @@ object ChangesetBuilder { */ private def intersect( edit: Location, - node: ChangesetBuilder.Node + node: ChangesetBuilder.Node, + isNodeRemoved: Boolean ): Boolean = { - intersect(edit, node.location) + if (isNodeRemoved) intersectWhenNodeRemoved(edit, node.location) + else intersect(edit, node.location) } - /** Check if the node location intersects the edit location. + /** Check if the node location intersects or borders with the edit location. * * @param edit location of the edit * @param node location of the node @@ -456,7 +503,23 @@ object ChangesetBuilder { inside(edit.end, node) } - /** Check if the character position index is inside the location. + /** Check if the node location intersects the edit that removes the line. + * + * In this case we assume that the edit removes the binding + * `name = expression`, and we only interested in detecting the `name` part. + * + * @param edit location of the edit + * @param node location of the node + * @return true if the node and edit locations are intersecting + */ + private def intersectWhenNodeRemoved( + edit: Location, + node: Location + ): Boolean = { + node.start == edit.start && node.end < edit.end + } + + /** Check if the character position index is inside or on the border of the location. * * @param index the character position * @param location the location @@ -476,7 +539,12 @@ object ChangesetBuilder { edit: TextEdit, source: A ): LocationEdit = { - LocationEdit(toLocation(edit, source), edit.text.length) + def isSameOffset: Boolean = + edit.range.end.character == edit.range.start.character + def isAcrossLines: Boolean = + edit.range.end.line > edit.range.start.line + val isNodeRemoved = edit.text.isEmpty && isSameOffset && isAcrossLines + LocationEdit(toLocation(edit, source), edit.text.length, isNodeRemoved) } /** Convert [[TextEdit]] location to [[Location]] in the provided source. diff --git a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala index 8579d3b93dc8..76015824f639 100644 --- a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala +++ b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala @@ -221,6 +221,27 @@ class ChangesetBuilderTest ) } + "multiline remove node" in { + val code = + """x -> + | y = foo 5 + | z = foo 7 + | y + x""".stripMargin.linesIterator.mkString("\n") + val edit = TextEdit(Range(Position(2, 4), Position(3, 4)), "") + + val ir = code + .preprocessExpression(freshInlineContext) + .get + .asInstanceOf[Function.Lambda] + + val secondLine = ir.body.children()(1).asInstanceOf[Expression.Binding] + val zName = secondLine.name + + invalidated(ir, code, edit) should contain theSameElementsAs Seq( + zName.getId + ) + } + "multiline insert line 1" in { val code = """x -> @@ -434,6 +455,7 @@ class ChangesetBuilderTest atCode ) } + } def findIR(ir: IR, uuid: String): IR = { From 442123bba0d0497fdaa7e3cfd06c6ae8cef733ba Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Tue, 29 Oct 2024 18:39:32 -0400 Subject: [PATCH 004/286] Fix `Float.parse` benchmark regression (#11402) --- .../Base/0.0.0-dev/src/Data/Numbers.enso | 21 ++++++++++++------- test/Base_Tests/src/Data/Numbers_Spec.enso | 2 +- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index dfc0f655e539..91a40f88d28e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -794,13 +794,20 @@ type Float Float.parse "(123,456,789.654)" format="###,###.##;(###,###.##)" # => -123456789.654 parse : Text -> Locale | Nothing -> Float ! Number_Parse_Error ! Illegal_Argument - parse text locale:Locale=Locale.default format:Text="" -> Float ! Number_Parse_Error ! Illegal_Argument = - Illegal_Argument.handle_java_exception <| - # `getInstance` returns `DecimalFormat` or a subclass of `DecimalFormat`. - decimal_format = NumberFormat.getInstance locale.java_locale - decimal_format.applyLocalizedPattern format - Panic.catch ParseException (decimal_format.parse text) _-> - Error.throw (Number_Parse_Error.Error text) + parse text (locale : Locale | Nothing = Nothing) (format : Text | Nothing = Nothing) -> Float ! Number_Parse_Error ! Illegal_Argument = + case locale.is_nothing && format.is_nothing of + True -> + Panic.catch NumberFormatException (Double.parseDouble text) _-> + Error.throw (Number_Parse_Error.Error text) + False -> + Illegal_Argument.handle_java_exception <| + defaulted_locale = locale.if_nothing Locale.default + defaulted_format = format.if_nothing "" + # `getInstance` returns `DecimalFormat` or a subclass of `DecimalFormat`. + decimal_format = NumberFormat.getInstance defaulted_locale.java_locale + decimal_format.applyLocalizedPattern defaulted_format + Panic.catch ParseException (decimal_format.parse text) _-> + Error.throw (Number_Parse_Error.Error text) ## ICON input_number diff --git a/test/Base_Tests/src/Data/Numbers_Spec.enso b/test/Base_Tests/src/Data/Numbers_Spec.enso index e86ed651ac97..4a66dc988ae6 100644 --- a/test/Base_Tests/src/Data/Numbers_Spec.enso +++ b/test/Base_Tests/src/Data/Numbers_Spec.enso @@ -303,7 +303,7 @@ add_specs suite_builder = Float.parse "aaaa" l . should_fail_with Number_Parse_Error group_builder.specify "should parse correctly with format and/or locale" <| - Float.parse "123,456,789.87654" . should_equal 123456789.87654 + Float.parse "123,456,789.87654" locale=Locale.default . should_equal 123456789.87654 Float.parse "123.456.789,87654" locale=Locale.italy . should_equal 123456789.87654 Float.parse "123,456,789.88" format="#,###.##" . should_equal 123456789.88 From 39c44e7adbc4cc47bec207bd50a4f6bfc8a0d03c Mon Sep 17 00:00:00 2001 From: Adam Obuchowicz Date: Wed, 30 Oct 2024 10:34:58 +0100 Subject: [PATCH 005/286] Table Input Widget: Size persistence (#11435) Fixes #10861 Every widget may set metadata on its AST. Because once widget picker will be implemented a single AST node may have many possible widgets, their settings are kept in a map keyed by their name/key. --- CHANGELOG.md | 3 ++ .../components/GraphEditor/NodeWidgetTree.vue | 25 +++++++++------ .../GraphEditor/widgets/WidgetFunction.vue | 5 +++ .../GraphEditor/widgets/WidgetTableEditor.vue | 28 ++++++++++++++-- .../project-view/providers/widgetRegistry.ts | 25 ++++++++------- app/gui/src/project-view/util/ast/reactive.ts | 12 ++++++- app/ydoc-server/src/edits.ts | 17 +++++++--- app/ydoc-server/src/fileFormat.ts | 6 ++-- app/ydoc-server/src/languageServerSession.ts | 32 +++++++++++++------ app/ydoc-shared/src/ast/mutableModule.ts | 32 +++++++++++++++++-- app/ydoc-shared/src/ast/tree.ts | 23 +++++++++++++ 11 files changed, 164 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 570388a6a1b1..97754e3e8796 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ - [Changed the way of adding new column in Table Input Widget][11388]. The "virtual column" is replaced with an explicit (+) button. - [New dropdown-based component menu][11398]. +- [Size of Table Input Widget is preserved and restored after project + re-opening][11435] [11151]: https://github.com/enso-org/enso/pull/11151 [11271]: https://github.com/enso-org/enso/pull/11271 @@ -20,6 +22,7 @@ [11383]: https://github.com/enso-org/enso/pull/11383 [11388]: https://github.com/enso-org/enso/pull/11388 [11398]: https://github.com/enso-org/enso/pull/11398 +[11435]: https://github.com/enso-org/enso/pull/11435 #### Enso Standard Library diff --git a/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue b/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue index c16fd3fc12c6..ea545d34b788 100644 --- a/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue +++ b/app/gui/src/project-view/components/GraphEditor/NodeWidgetTree.vue @@ -67,16 +67,23 @@ function handleWidgetUpdates(update: WidgetUpdate) { selectNode() const edit = update.edit ?? graph.startEdit() if (update.portUpdate) { - const { value, origin } = update.portUpdate + const { origin } = update.portUpdate if (Ast.isAstId(origin)) { - const ast = - value instanceof Ast.Ast ? value - : value == null ? Ast.Wildcard.new(edit) - : undefined - if (ast) { - edit.replaceValue(origin, ast) - } else if (typeof value === 'string') { - edit.tryGet(origin)?.syncToCode(value) + if ('value' in update.portUpdate) { + const value = update.portUpdate.value + const ast = + value instanceof Ast.Ast ? value + : value == null ? Ast.Wildcard.new(edit) + : undefined + if (ast) { + edit.replaceValue(origin, ast) + } else if (typeof value === 'string') { + edit.tryGet(origin)?.syncToCode(value) + } + } + if ('metadata' in update.portUpdate) { + const { metadataKey, metadata } = update.portUpdate + edit.tryGet(origin)?.setWidgetMetadata(metadataKey, metadata) } } else { console.error(`[UPDATE ${origin}] Invalid top-level origin. Expected expression ID.`) diff --git a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue index 2616049e19eb..386ff90e0a3a 100644 --- a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue +++ b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetFunction.vue @@ -82,6 +82,11 @@ const innerInput = computed(() => { function handleArgUpdate(update: WidgetUpdate): boolean { const app = application.value if (update.portUpdate && app instanceof ArgumentApplication) { + if (!('value' in update.portUpdate)) { + if (!Ast.isAstId(update.portUpdate.origin)) + console.error('Tried to set metadata on arg placeholder. This is not implemented yet!') + return false + } const { value, origin } = update.portUpdate const edit = update.edit ?? graph.startEdit() // Find the updated argument by matching origin port/expression with the appropriate argument. diff --git a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue index da5a17f3f257..7733a7f444f3 100644 --- a/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue +++ b/app/gui/src/project-view/components/GraphEditor/widgets/WidgetTableEditor.vue @@ -29,12 +29,29 @@ import type { } from 'ag-grid-enterprise' import { computed, markRaw, ref } from 'vue' import type { ComponentExposed } from 'vue-component-type-helpers' +import { z } from 'zod' const props = defineProps(widgetProps(widgetDefinition)) const graph = useGraphStore() const suggestionDb = useSuggestionDbStore() const grid = ref>>() +const configSchema = z.object({ size: z.object({ x: z.number(), y: z.number() }) }) +type Config = z.infer + +const DEFAULT_CFG: Config = { size: { x: 200, y: 150 } } + +const config = computed(() => { + const configObj = props.input.value.widgetMetadata('WidgetTableEditor') + if (configObj == null) return DEFAULT_CFG + const parsed = configSchema.safeParse(configObj) + if (parsed.success) return parsed.data + else { + console.warn('Table Editor Widget: could not read config; invalid format: ', parsed.error) + return DEFAULT_CFG + } +}) + const { rowData, columnDefs, moveColumn, moveRow, pasteFromClipboard } = useTableNewArgument( () => props.input, graph, @@ -131,15 +148,22 @@ const headerEditHandler = new HeaderEditing() // === Resizing === -const size = ref(new Vec2(200, 150)) const graphNav = injectGraphNavigator() +const size = computed(() => Vec2.FromXY(config.value.size)) + const clientBounds = computed({ get() { return new Rect(Vec2.Zero, size.value.scale(graphNav.scale)) }, set(value) { - size.value = new Vec2(value.width / graphNav.scale, value.height / graphNav.scale) + props.onUpdate({ + portUpdate: { + origin: props.input.portId, + metadataKey: 'WidgetTableEditor', + metadata: { size: { x: value.width / graphNav.scale, y: value.height / graphNav.scale } }, + }, + }) }, }) diff --git a/app/gui/src/project-view/providers/widgetRegistry.ts b/app/gui/src/project-view/providers/widgetRegistry.ts index c196783702a4..3f4afbd3a5f6 100644 --- a/app/gui/src/project-view/providers/widgetRegistry.ts +++ b/app/gui/src/project-view/providers/widgetRegistry.ts @@ -12,7 +12,7 @@ import type { WidgetEditHandlerParent } from './widgetRegistry/editHandler' export type WidgetComponent = Component> export namespace WidgetInput { - /** TODO: Add docs */ + /** Create a basic {@link WidgetInput } from AST node. */ export function FromAst(ast: A): WidgetInput & { value: A } { return { portId: ast.id, @@ -20,7 +20,7 @@ export namespace WidgetInput { } } - /** TODO: Add docs */ + /** Create a basic {@link WidgetInput } from AST node with enforced port. */ export function FromAstWithPort( ast: A, ): WidgetInput & { value: A } { @@ -31,7 +31,7 @@ export namespace WidgetInput { } } - /** TODO: Add docs */ + /** A string representation of widget's value - the code in case of AST value. */ export function valueRepr(input: WidgetInput): string | undefined { if (typeof input.value === 'string') return input.value else return input.value?.code() @@ -56,24 +56,24 @@ export namespace WidgetInput { isPlaceholder(input) || input.value instanceof nodeType } - /** TODO: Add docs */ + /** Check if input's value is existing AST node (not placeholder or token). */ export function isAst(input: WidgetInput): input is WidgetInput & { value: Ast.Ast } { return input.value instanceof Ast.Ast } - /** Rule out token inputs. */ + /** Check if input's value is existing AST node or placeholder. Rule out token inputs. */ export function isAstOrPlaceholder( input: WidgetInput, ): input is WidgetInput & { value: Ast.Ast | string | undefined } { return isPlaceholder(input) || isAst(input) } - /** TODO: Add docs */ + /** Check if input's value is an AST token. */ export function isToken(input: WidgetInput): input is WidgetInput & { value: Ast.Token } { return input.value instanceof Ast.Token } - /** TODO: Add docs */ + /** Check if input's value is an AST which potentially may be a function call. */ export function isFunctionCall( input: WidgetInput, ): input is WidgetInput & { value: Ast.App | Ast.Ident | Ast.PropertyAccess | Ast.OprApp } { @@ -163,15 +163,18 @@ export interface WidgetProps { * port may not represent any existing AST node) with `edit` containing any additional modifications * (like inserting necessary imports). * + * The same way widgets may set their metadata (as this is also technically an AST modification). + * Every widget type should set it's name as `metadataKey`. + * * The handlers interested in a specific port update should apply it using received edit. The edit * is committed in {@link NodeWidgetTree}. */ export interface WidgetUpdate { edit?: MutableModule | undefined - portUpdate?: { - value: Ast.Owned | string | undefined - origin: PortId - } + portUpdate?: { origin: PortId } & ( + | { value: Ast.Owned | string | undefined } + | { metadataKey: string; metadata: unknown } + ) } /** diff --git a/app/gui/src/project-view/util/ast/reactive.ts b/app/gui/src/project-view/util/ast/reactive.ts index 5223dc1ed109..aa8e596d01ad 100644 --- a/app/gui/src/project-view/util/ast/reactive.ts +++ b/app/gui/src/project-view/util/ast/reactive.ts @@ -2,13 +2,23 @@ import { markRaw, shallowReactive } from 'vue' import { MutableModule } from 'ydoc-shared/ast' import * as Y from 'yjs' -/** TODO: Add docs */ +/** + * Make AST structures inside the module reactive (including the node's and widgets' metadata). + * + * Note that non-Ast structured fields (e.g. ArgumentDefinition) are not themselves reactive -- + * an access is tracked when obtaining the object from the Ast, not when accessing the inner + * object's fields. + */ export function reactiveModule(doc: Y.Doc, onCleanup: (f: () => void) => void): MutableModule { const module = markRaw(new MutableModule(doc)) const handle = module.observe((update) => { update.nodesAdded.forEach((astId) => { const fields = module.get(astId).fields ;(fields as any)._map = shallowReactive((fields as any)._map) + const metadata = fields.get('metadata') + ;(metadata as any)._map = shallowReactive((metadata as any)._map) + const widgetsMetadata = metadata.get('widget') + ;(widgetsMetadata as any)._map = shallowReactive((widgetsMetadata as any)._map) }) }) onCleanup(() => module.unobserve(handle)) diff --git a/app/ydoc-server/src/edits.ts b/app/ydoc-server/src/edits.ts index 8274e0412da3..2e72a1b79b39 100644 --- a/app/ydoc-server/src/edits.ts +++ b/app/ydoc-server/src/edits.ts @@ -38,7 +38,7 @@ const MAX_SIZE_FOR_NORMAL_DIFF = 30000 interface AppliedUpdates { newCode: string | undefined newIdMap: IdMap | undefined - newMetadata: fileFormat.IdeMetadata['node'] | undefined + newMetadata: fileFormat.IdeMetadata | undefined } /** Return an object containing updated versions of relevant fields, given an update payload. */ @@ -49,7 +49,7 @@ export function applyDocumentUpdates( ): AppliedUpdates { const codeChanged = update.nodesUpdated.size || update.nodesAdded.size || update.nodesDeleted.size let idsChanged = false - let metadataChanged = false + let metadataChanged = update.widgetMetadataUpdated.size > 0 for (const { changes } of update.metadataUpdated) { for (const [key] of changes) { if (key === 'externalId') { @@ -63,7 +63,7 @@ export function applyDocumentUpdates( let newIdMap = undefined let newCode = undefined - let newMetadata = undefined + let newMetadata: fileFormat.IdeMetadata | undefined = undefined const syncModule = new MutableModule(doc.ydoc) const root = syncModule.root() @@ -76,19 +76,26 @@ export function applyDocumentUpdates( if (codeChanged || idsChanged || metadataChanged) { // Update the metadata object. // Depth-first key order keeps diffs small. - newMetadata = {} satisfies fileFormat.IdeMetadata['node'] + newMetadata = { node: {}, widget: {} } root.visitRecursiveAst(ast => { let pos = ast.nodeMetadata.get('position') const vis = ast.nodeMetadata.get('visualization') const colorOverride = ast.nodeMetadata.get('colorOverride') if (vis && !pos) pos = { x: 0, y: 0 } if (pos) { - newMetadata![ast.externalId] = { + newMetadata!.node[ast.externalId] = { position: { vector: [Math.round(pos.x), Math.round(-pos.y)] }, visualization: vis && translateVisualizationToFile(vis), colorOverride, } } + const widgets = ast.widgetsMetadata() + if (!widgets.entries().next().done) { + if (newMetadata!.widget == null) newMetadata!.widget = {} + newMetadata!.widget[ast.externalId] = Object.fromEntries( + widgets.entries() as IterableIterator<[string, Record]>, + ) + } }) } diff --git a/app/ydoc-server/src/fileFormat.ts b/app/ydoc-server/src/fileFormat.ts index 6cf6c8787479..213219e5c965 100644 --- a/app/ydoc-server/src/fileFormat.ts +++ b/app/ydoc-server/src/fileFormat.ts @@ -34,15 +34,12 @@ export const nodeMetadata = z }) .passthrough() -export type ImportMetadata = z.infer -export const importMetadata = z.object({}).passthrough() - export type IdeMetadata = z.infer export const ideMetadata = z .object({ node: z.record(z.string().uuid(), nodeMetadata), - import: z.record(z.string(), importMetadata), snapshot: z.string().optional(), + widget: z.optional(z.record(z.string().uuid(), z.record(z.string(), z.unknown()))), }) .passthrough() .default(() => defaultMetadata().ide) @@ -87,6 +84,7 @@ function defaultMetadata() { ide: { node: {}, import: {}, + widget: {}, }, } } diff --git a/app/ydoc-server/src/languageServerSession.ts b/app/ydoc-server/src/languageServerSession.ts index 80f7f42cad8f..98d7923d3b9e 100644 --- a/app/ydoc-server/src/languageServerSession.ts +++ b/app/ydoc-server/src/languageServerSession.ts @@ -474,12 +474,14 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { private static getIdMapToPersist( idMap: IdMap | undefined, - metadata: fileFormat.IdeMetadata['node'], + metadata: fileFormat.IdeMetadata, ): IdMap | undefined { if (idMap === undefined) { return } else { - const entriesIntersection = idMap.entries().filter(([, id]) => id in metadata) + const entriesIntersection = idMap + .entries() + .filter(([, id]) => id in metadata.node || id in (metadata.widget ?? {})) return new IdMap(entriesIntersection) } } @@ -496,7 +498,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { synced: EnsoFileParts, newCode: string | undefined, newIdMap: IdMap | undefined, - newMetadata: fileFormat.IdeMetadata['node'] | undefined, + newMetadata: fileFormat.IdeMetadata | undefined, ) { if (this.syncedContent == null || this.syncedVersion == null) return @@ -508,14 +510,13 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { json.stringify({ ...this.syncedMeta, ide: { - ...this.syncedMeta.ide, + ...newMetadata, ...newSnapshot, - node: newMetadata, }, }) const idMapToPersist = (newIdMap || newMetadata) && - ModulePersistence.getIdMapToPersist(newIdMap, newMetadata ?? this.syncedMeta.ide.node) + ModulePersistence.getIdMapToPersist(newIdMap, newMetadata ?? this.syncedMeta.ide) const newIdMapToPersistJson = idMapToPersist && serializeIdMap(idMapToPersist) const code = newCode ?? synced.code const newContent = combineFileParts({ @@ -566,7 +567,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { if (!result.ok) return handleError(result.error) this.syncedContent = newContent this.syncedVersion = newVersion - if (newMetadata) this.syncedMeta.ide.node = newMetadata + if (newMetadata) this.syncedMeta.ide = newMetadata if (newCode) this.syncedCode = newCode if (newIdMapToPersistJson) this.syncedIdMap = newIdMapToPersistJson if (newMetadataJson) this.syncedMetaJson = newMetadataJson @@ -583,6 +584,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const { code, idMapJson, metadataJson } = contentsReceived const metadata = fileFormat.tryParseMetadataOrFallback(metadataJson) const nodeMeta = Object.entries(metadata.ide.node) + const widgetMeta = Object.entries(metadata.ide.widget ?? {}) let parsedSpans let parsedIdMap @@ -646,7 +648,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { (code !== this.syncedCode || idMapJson !== this.syncedIdMap || metadataJson !== this.syncedMetaJson) && - nodeMeta.length !== 0 + (nodeMeta.length !== 0 || widgetMeta.length !== 0) ) { const externalIdToAst = new Map() astRoot.visitRecursiveAst(ast => { @@ -671,6 +673,18 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const newColorOverride = meta.colorOverride if (oldColorOverride !== newColorOverride) metadata.set('colorOverride', newColorOverride) } + for (const [id, meta] of widgetMeta) { + if (typeof id !== 'string') continue + const ast = externalIdToAst.get(id as ExternalId) + if (!ast) { + missing.add(id) + continue + } + const widgetsMetadata = syncModule.getVersion(ast).mutableWidgetsMetadata() + for (const [widgetKey, widgetMeta] of Object.entries(meta)) { + widgetsMetadata.set(widgetKey, widgetMeta) + } + } } this.syncedCode = code @@ -685,7 +699,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { contentsReceived, this.syncedCode ?? undefined, unsyncedIdMap, - this.syncedMeta?.ide?.node, + this.syncedMeta?.ide, ) } diff --git a/app/ydoc-shared/src/ast/mutableModule.ts b/app/ydoc-shared/src/ast/mutableModule.ts index a8f3d3bec96e..6271b05453ce 100644 --- a/app/ydoc-shared/src/ast/mutableModule.ts +++ b/app/ydoc-shared/src/ast/mutableModule.ts @@ -40,6 +40,7 @@ export interface ModuleUpdate { nodesUpdated: Set updateRoots: Set metadataUpdated: { id: AstId; changes: Map }[] + widgetMetadataUpdated: Set origin: Origin | undefined } @@ -280,6 +281,12 @@ export class MutableModule implements Module { metadata.get(key as any), ]) updateBuilder.updateMetadata(id, changes) + } else if (event.target.parent.parent.parent === this.nodes) { + // Updates to some specific widget's metadata + const id = event.target.parent.parent.get('id') + assertAstId(id) + if (!this.nodes.get(id)) continue + updateBuilder.updateWidgets(id) } } return updateBuilder.finish() @@ -351,6 +358,7 @@ export class MutableModule implements Module { const metadata = new Y.Map() as unknown as FixedMap const metadataFields = setAll(metadata, { externalId: externalId ?? newExternalId(), + widget: new Y.Map(), }) const fields = setAll(map_, { id, @@ -437,7 +445,11 @@ class UpdateBuilder { readonly nodesAdded = new Set() readonly nodesDeleted = new Set() readonly nodesUpdated = new Set() - readonly metadataUpdated: { id: AstId; changes: Map }[] = [] + readonly metadataUpdated: { + id: AstId + changes: Map + }[] = [] + readonly widgetMetadataUpdated = new Set() readonly origin: Origin | undefined private readonly module: Module @@ -471,15 +483,29 @@ class UpdateBuilder { } } if (fieldsChanged) this.nodesUpdated.add(id) - if (metadataChanges) this.metadataUpdated.push({ id, changes: metadataChanges }) + if (metadataChanges) { + this.metadataUpdated.push({ id, changes: metadataChanges }) + if (metadataChanges.has('widget')) { + this.widgetMetadataUpdated.add(id) + } + } } updateMetadata(id: AstId, changes: Iterable) { const changeMap = new Map() - for (const [key, value] of changes) changeMap.set(key, value) + for (const [key, value] of changes) { + changeMap.set(key, value) + if (key === 'widget') { + this.widgetMetadataUpdated.add(id) + } + } this.metadataUpdated.push({ id, changes: changeMap }) } + updateWidgets(id: AstId) { + this.widgetMetadataUpdated.add(id) + } + deleteNode(id: AstId) { this.nodesDeleted.add(id) } diff --git a/app/ydoc-shared/src/ast/tree.ts b/app/ydoc-shared/src/ast/tree.ts index 75a76c710686..267e8fd4b6c8 100644 --- a/app/ydoc-shared/src/ast/tree.ts +++ b/app/ydoc-shared/src/ast/tree.ts @@ -1,4 +1,5 @@ /* eslint-disable @typescript-eslint/no-unsafe-declaration-merging */ +import * as Y from 'yjs' import type { Identifier, IdentifierOrOperatorIdentifier, @@ -53,6 +54,7 @@ export type AstId = string & { [brandAstId]: never } /** @internal */ export interface MetadataFields { externalId: ExternalId + widget: Y.Map } export interface NodeMetadataFields { position?: { x: number; y: number } | undefined @@ -66,6 +68,7 @@ const nodeMetadataKeys = allKeys({ }) export type NodeMetadata = FixedMapView export type MutableNodeMetadata = FixedMap + /** @internal */ interface RawAstFields { id: AstId @@ -105,6 +108,16 @@ export abstract class Ast { return metadata as FixedMapView } + /** Get metadata of all widgets assigned to this node. */ + widgetsMetadata(): FixedMapView> { + return this.fields.get('metadata').get('widget') + } + + /** Get metadata of given widget assigned to this node. */ + widgetMetadata(widgetKey: string): DeepReadonly | undefined { + return this.fields.get('metadata').get('widget').get(widgetKey) + } + /** Returns a JSON-compatible object containing all metadata properties. */ serializeMetadata(): MetadataFields & NodeMetadataFields { return this.fields.get('metadata').toJSON() as any @@ -249,6 +262,16 @@ export abstract class MutableAst extends Ast { this.fields.get('metadata').set('externalId', id) } + /** Set the widget's new metadata. */ + setWidgetMetadata(widgetKey: string, widgetMetadata: unknown) { + this.fields.get('metadata').get('widget').set(widgetKey, widgetMetadata) + } + + /** Get map of all widget's metadata. */ + mutableWidgetsMetadata() { + return this.fields.get('metadata').get('widget') + } + /** TODO: Add docs */ mutableNodeMetadata(): MutableNodeMetadata { const metadata = this.fields.get('metadata') From 10d76ca6148f2efb931bbc651d50ee465eb36071 Mon Sep 17 00:00:00 2001 From: Nikita Pekin Date: Wed, 30 Oct 2024 13:15:37 +0200 Subject: [PATCH 006/286] chore(flake.nix): Add missing macOS-specific dependencies of `enso-formatter` (#11430) When compiling the `enso-formatter` binary for use in `~/.cargo/bin` (for the `cloud-v2` repo), the formatter requires some additional dependencies that are not currently provided in the nix build environment. This PR adds those dependencies to `flake.nix` so that `enso-formatter` compiles successfully. cc @somebody1234 --- flake.nix | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index cb7ec8a6bfbb..61d5e39f3af2 100644 --- a/flake.nix +++ b/flake.nix @@ -30,7 +30,11 @@ buildInputs = with pkgs; [ # === Graal dependencies === libxcrypt-legacy - ]; + ] ++ (if !isOnLinux then [ + # === macOS-specific dependencies === + darwin.apple_sdk.frameworks.IOKit # Required by `enso-formatter`. + darwin.apple_sdk.frameworks.Security # Required by `enso-formatter`. + ] else [ ]); packages = with pkgs; [ # === TypeScript dependencies === From dc50a7e3691c8e73da0fe1bfd598427f371daff6 Mon Sep 17 00:00:00 2001 From: Gregory Michael Travis Date: Wed, 30 Oct 2024 08:50:35 -0400 Subject: [PATCH 007/286] HTTP response caching, with TTL and LRU logic (#11342) --- CHANGELOG.md | 3 + .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 50 ++- .../src/Enso_Cloud/Internal/Utils.enso | 3 +- .../Base/0.0.0-dev/src/Errors/Common.enso | 23 ++ .../src/Internal/Data_Read_Helpers.enso | 6 +- .../Base/0.0.0-dev/src/Network/HTTP.enso | 47 ++- .../src/Network/HTTP/Cache_Policy.enso | 19 + .../main/java/org/enso/base/Stream_Utils.java | 24 ++ .../java/org/enso/base/cache/LRUCache.java | 353 ++++++++++++++++++ .../base/cache/ResponseTooLargeException.java | 15 + .../enso_cloud/EnsoHTTPResponseCache.java | 163 ++++++++ .../base/enso_cloud/EnsoSecretHelper.java | 130 +++++-- test/Base_Tests/src/Network/Http_Spec.enso | 7 +- test/Table_Tests/src/IO/Fetch_Spec.enso | 341 +++++++++++++++++ .../org/enso/shttp/HTTPTestHelperServer.java | 1 + .../test_helpers/DownloadTestHandler.java | 58 +++ 16 files changed, 1200 insertions(+), 43 deletions(-) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso create mode 100644 std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java create mode 100644 std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java create mode 100644 std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java create mode 100644 tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 97754e3e8796..9e54b5f091c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -106,6 +106,8 @@ range.][11135] - [Added `format` parameter to `Decimal.parse`.][11205] - [Added `format` parameter to `Float.parse`.][11229] +- [Implemented a cache for HTTP data requests, as well as a per-file response + size limit.][11342] [10614]: https://github.com/enso-org/enso/pull/10614 [10660]: https://github.com/enso-org/enso/pull/10660 @@ -121,6 +123,7 @@ [11135]: https://github.com/enso-org/enso/pull/11135 [11205]: https://github.com/enso-org/enso/pull/11205 [11229]: https://github.com/enso-org/enso/pull/11229 +[11342]: https://github.com/enso-org/enso/pull/11342 #### Enso Language & Runtime diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index b6886fa047df..2123bd0f5814 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -12,6 +12,7 @@ import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Problem_Behavior.Problem_Behavior import project.Internal.Data_Read_Helpers import project.Meta +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error @@ -51,6 +52,19 @@ from project.System.File_Format import Auto_Detect, File_Format If set to `Report_Error`, the operation fails with a dataflow error. If set to `Ignore`, the operation proceeds without errors or warnings. + ! Request Caching + + Responses to HTTP data requests are cached, and additional requests for the + same resources will use the cache, saving a round-trip call to the remote + server. Two resources are considered the same if the URIs and request + headers are the same. Header order does not affect sameness. + + The cache respects the "max-age" and "Age" response headers; see + `Data.fetch` for more details. + + The cached values are retained as long as the project remains open. Closing + a project will clear the cache. + > Example Read the first sheet of an XLSX from disk and convert it into a table. @@ -72,7 +86,7 @@ read : Text | URI | File -> File_Format -> Problem_Behavior -> Any ! File_Error read path=(Missing_Argument.throw "path") format=Auto_Detect (on_problems : Problem_Behavior = ..Report_Warning) = case path of _ : Text -> if Data_Read_Helpers.looks_like_uri path then Data_Read_Helpers.fetch_following_data_links path format=format else read (File.new path) format on_problems - uri : URI -> Data_Read_Helpers.fetch_following_data_links uri format=format + uri : URI -> fetch uri format=format _ -> file_obj = File.new path if file_obj.is_directory then Error.throw (Illegal_Argument.Error "Cannot `read` a directory, use `Data.list`.") else @@ -183,6 +197,32 @@ list (directory:(Text | File)=enso_project.root) (name_filter:Text="") recursive Defaults to `Auto_Detect`. If `Raw_Response` is selected or if the format cannot be determined automatically, a raw HTTP `Response` will be returned. + ! Request Caching + + Responses to HTTP data requests are cached, and additional requests for the + same resources will use the cache, saving a round-trip call to the remote + server. Two resources are considered the same if the URIs and request + headers are the same. Header order does not affect sameness. + + The cached values are retained as long as the project remains open. Closing + a project will clear the cache. + + The cache respects the "max-age" and "Age" response headers received from + remote servers. These headers are used to determine if the cached value is + fresh or stale. If it is stale, the cached value is removed and a request + is made again to the remote servers. + + The following limits are imposed on values stored in the cache: + - Single file limit: a single file can be no more than 10M. + - Total cache size limit: the entire cache can be no more than 10G. + + For data responses over the single file limit, you can use `Data.download` + to download the file locally. Download sizes are not constrained by either + limit. + + If the entire cache goes over the total cache size limit, the + least-recently-used entries are removed. + > Example Read from an HTTP endpoint. @@ -198,9 +238,9 @@ list (directory:(Text | File)=enso_project.root) (name_filter:Text="") recursive @uri (Text_Input display=..Always) @format Data_Read_Helpers.format_widget_with_raw_response @headers Header.default_widget -fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Any ! Request_Error | HTTP_Error -fetch (uri:(URI | Text)=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (format = Auto_Detect) = - Data_Read_Helpers.fetch_following_data_links uri method headers (Data_Read_Helpers.handle_legacy_format "fetch" "format" format) +fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> File_Format -> Cache_Policy -> Any ! Request_Error | HTTP_Error +fetch (uri:(URI | Text)=(Missing_Argument.throw "uri")) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (format = Auto_Detect) (cache_policy:Cache_Policy = ..Default) = + Data_Read_Helpers.fetch_following_data_links uri method headers (Data_Read_Helpers.handle_legacy_format "fetch" "format" format) cache_policy=cache_policy ## ALIAS http post, upload GROUP Output @@ -347,7 +387,7 @@ post (uri:(URI | Text)=(Missing_Argument.throw "uri")) (body:Request_Body=..Empt download : (URI | Text) -> Writable_File -> HTTP_Method -> Vector (Header | Pair Text Text) -> File ! Request_Error | HTTP_Error download (uri:(URI | Text)=(Missing_Argument.throw "uri")) file:Writable_File (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = Context.Output.if_enabled disabled_message="As writing is disabled, cannot download to a file. Press the Write button ▶ to perform the operation." panic=False <| - response = HTTP.fetch uri method headers + response = HTTP.fetch uri method headers cache_policy=Cache_Policy.No_Cache case Data_Link.is_data_link response.body.metadata of True -> # If the resource was a data link, we follow it, download the target data and try to write it to a file. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso index 73dd820165c3..c7b973b73e49 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso @@ -12,6 +12,7 @@ import project.Enso_Cloud.Errors.Not_Logged_In import project.Enso_Cloud.Internal.Authentication import project.Error.Error import project.Function.Function +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error @@ -95,7 +96,7 @@ http_request (method : HTTP_Method) (url : URI) (body : Request_Body = ..Empty) all_headers = [authorization_header] + additional_headers as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err) - response = HTTP.new.request (Request.new method url headers=all_headers body=body) error_on_failure_code=False + response = HTTP.new.request (Request.new method url headers=all_headers body=body) cache_policy=..No_Cache error_on_failure_code=False . catch HTTP_Error as_connection_error . catch Request_Error as_connection_error if response.is_error && (retries > 0) then http_request method url body additional_headers error_handlers (retries - 1) else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso index d1f6f53c3b56..86c132f88b29 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso @@ -15,6 +15,7 @@ polyglot java import java.lang.ArithmeticException polyglot java import java.lang.ClassCastException polyglot java import java.lang.OutOfMemoryError polyglot java import org.enso.base.CompareException +polyglot java import org.enso.base.cache.ResponseTooLargeException ## An error indicating that no value was found. type Not_Found @@ -553,3 +554,25 @@ type Out_Of_Range to_text self = extra = if self.message.is_nothing then "" else ": "+self.message.to_text "(Out_Of_Range (value = "+self.value.to_text+")" + extra + ")" + +## Indiciates that the response from a remote endpoint is over the size limit. +type Response_Too_Large + ## PRIVATE + Error limit:Integer + + ## PRIVATE + Create a human-readable version of the error. + to_display_text : Text + to_display_text self = + suggestion = " Use `Data.fetch` with `cache_policy=No_Cache`, or use `Data.download` to fetch the data to a local file, and `Data.read` to read the file." + "Response too large: repsonse size is over the limit ("+self.limit.to_text+")" + suggestion + + ## PRIVATE + to_text : Text + to_text self = + "(Response_Too_Large (limit = "+self.limit.to_text+")" + ")" + + ## PRIVATE + Convert the Java exception to an Enso dataflow error. + handle_java_exception ~action = + Panic.catch ResponseTooLargeException action (cause-> Error.throw (Response_Too_Large.Error cause.payload.getLimit)) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso index 37d879f0a493..fa00cdafa20d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Data_Read_Helpers.enso @@ -10,10 +10,12 @@ import project.Errors.Deprecated.Deprecated import project.Errors.Problem_Behavior.Problem_Behavior import project.Metadata.Display import project.Metadata.Widget +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.HTTP import project.Network.HTTP.HTTP_Error.HTTP_Error import project.Network.HTTP.HTTP_Method.HTTP_Method import project.Network.URI.URI +import project.Nothing.Nothing import project.Warning.Warning from project.Data import Raw_Response from project.Data.Boolean import Boolean, False, True @@ -31,9 +33,9 @@ looks_like_uri path:Text -> Boolean = ## PRIVATE A common implementation for fetching a resource and decoding it, following encountered data links. -fetch_following_data_links (uri:URI) (method:HTTP_Method = HTTP_Method.Get) (headers:Vector = []) format = +fetch_following_data_links (uri:URI) (method:HTTP_Method = ..Get) (headers:Vector = []) format (cache_policy:Cache_Policy = ..Default) = fetch_and_decode = - response = HTTP.fetch uri method headers + response = HTTP.fetch uri method headers cache_policy=cache_policy decode_http_response_following_data_links response format error_handler attempt = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 34511c97e7a9..5f115b906d0e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -1,19 +1,24 @@ import project.Any.Any import project.Data.Dictionary.Dictionary import project.Data.Hashset.Hashset +import project.Data.Numbers.Integer import project.Data.Pair.Pair +import project.Data.Sort_Direction.Sort_Direction import project.Data.Text.Encoding.Encoding import project.Data.Text.Text +import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Duration.Duration import project.Data.Vector.No_Wrap import project.Data.Vector.Vector import project.Enso_Cloud.Enso_Secret.Enso_Secret import project.Error.Error import project.Errors.Common.Forbidden_Operation +import project.Errors.Common.Response_Too_Large import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Unimplemented.Unimplemented import project.Function.Function import project.Meta +import project.Network.HTTP.Cache_Policy.Cache_Policy import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP_Error.HTTP_Error import project.Network.HTTP.HTTP_Method.HTTP_Method @@ -44,6 +49,7 @@ polyglot java import java.net.http.HttpRequest.Builder polyglot java import java.net.InetSocketAddress polyglot java import java.net.ProxySelector polyglot java import javax.net.ssl.SSLContext +polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache polyglot java import org.enso.base.enso_cloud.EnsoSecretHelper polyglot java import org.enso.base.file_system.File_Utils polyglot java import org.enso.base.net.http.MultipartBodyBuilder @@ -52,10 +58,14 @@ polyglot java import org.enso.base.net.http.UrlencodedBodyBuilder type HTTP ## PRIVATE Static helper for get-like methods - fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Response ! Request_Error | HTTP_Error - fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) = if_fetch_method method <| + + ! Response caching + + See `Data.fetch` for information about response caching. + fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Cache_Policy -> Response ! Request_Error | HTTP_Error + fetch (uri:(URI | Text)) (method:HTTP_Method=..Get) (headers:(Vector (Header | Pair Text Text))=[]) (cache_policy:Cache_Policy = ..Default) = if_fetch_method method <| request = Request.new method uri (Header.unify_vector headers) Request_Body.Empty - HTTP.new.request request + HTTP.new.request request cache_policy=cache_policy ## PRIVATE Static helper for post-like methods @@ -117,23 +127,33 @@ type HTTP Please note, this must be closed after use (either directly or via the helpers on Response_Body). + ! Response caching + + See `Data.fetch` for information about response caching. + Arguments: - req: The HTTP request to send using `self` HTTP client. - error_on_failure_code: Whether or not to throw an error if the response code is not a success code. - request : Request -> Boolean -> Response ! Request_Error | HTTP_Error | Illegal_Argument - request self req error_on_failure_code=True = + request : Request -> Boolean -> Cache_Policy -> Response ! Request_Error | HTTP_Error | Illegal_Argument | Response_Too_Large + request self req error_on_failure_code=True (cache_policy:Cache_Policy = ..Default) = # Prevent request if the method is a write-like method and output context is disabled. check_output_context ~action = if (if_fetch_method req.method True if_not=Context.Output.is_enabled) then action else Error.throw (Forbidden_Operation.Error ("As writing is disabled, " + req.method.to_text + " request not sent. Press the Write button ▶ to send it.")) + # You can only explicitly mention the cache for GET requests. + check_cache_policy ~action = + cache_policy_value_ok = req.method == HTTP_Method.Get || cache_policy != Cache_Policy.Use_Cache + if cache_policy_value_ok then action else + Error.throw (Illegal_Argument.Error "Cannot specify cache policy for a "+req.method.to_text+" request") + handle_request_error = handler caught_panic = exception = caught_panic.payload Error.throw (Request_Error.Error (Meta.type_of exception . to_text) exception.getMessage) Panic.catch IllegalArgumentException handler=handler <| Panic.catch IOException handler=handler - handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <| + handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <| check_cache_policy <| Response_Too_Large.handle_java_exception <| headers = _resolve_headers req headers.if_not_error <| resolved_body = _resolve_body req.body self.hash_method @@ -147,12 +167,25 @@ type HTTP all_headers = headers + boundary_header_list mapped_headers = all_headers.map on_problems=No_Wrap .to_java_pair - response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers) + response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers (cache_policy.should_use_cache req)) if error_on_failure_code.not || response.code.is_success then response else body = response.body.decode_as_text.catch Any _->"" message = if body.is_empty then Nothing else body Error.throw (HTTP_Error.Status_Error response.code message response.uri) + ## ALIAS flush + ICON temp + Clear the HTTP request cache. + + > Example + Clear the HTTP response cache. + + import Standard.Base.Network.HTTP + + HTTP.clear_response_cache + clear_response_cache : Nothing + clear_response_cache -> Nothing = EnsoHTTPResponseCache.clear + ## PRIVATE ADVANCED Create a copy of the HTTP client with a custom SSL context. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso new file mode 100644 index 000000000000..cad1bcece437 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Cache_Policy.enso @@ -0,0 +1,19 @@ +import project.Network.HTTP.HTTP_Method.HTTP_Method +import project.Network.HTTP.Request.Request +from project.Data.Boolean import Boolean, False, True + +type Cache_Policy + ## Use the default policy for the HTTP method of the request. + Default + + ## Use the response cache. + Use_Cache + + ## Don't use the response cache. + No_Cache + + # Default to using the cache for GET requests, unless explicitly disabled + should_use_cache self request:Request -> Boolean = case self of + Cache_Policy.Default -> if request.method == HTTP_Method.Get then True else False + Cache_Policy.Use_Cache -> True + Cache_Policy.No_Cache -> False diff --git a/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java b/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java index 04bd9e3be0f9..cad4db4e9632 100644 --- a/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java +++ b/std-bits/base/src/main/java/org/enso/base/Stream_Utils.java @@ -160,4 +160,28 @@ public void write(byte[] b, int off, int len) throws IOException { outputStreamLike.write(b, off, len); } } + + /** + * Copies the contents of the input sream to the output stream. If the number of bytes copied is + * greater than maxLength, abort the cpoy and return false; otherwise return true. + */ + public static boolean limitedCopy( + InputStream inputStream, OutputStream outputStream, long maxLength) throws IOException { + byte buffer[] = new byte[4096]; + long numBytesRead = 0; + while (true) { + int n = inputStream.read(buffer); + if (n <= 0) { + break; + } + if (numBytesRead + n <= maxLength) { + outputStream.write(buffer, 0, n); + } + numBytesRead += n; + if (numBytesRead > maxLength) { + return false; + } + } + return true; + } } diff --git a/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java new file mode 100644 index 000000000000..df1329da6a9c --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/cache/LRUCache.java @@ -0,0 +1,353 @@ +package org.enso.base.cache; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.time.Duration; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Predicate; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import org.enso.base.Stream_Utils; + +/** + * LRUCache is a cache for data presented via InputStreams. Files are deleted on JVM exit. + * + *

It puts limits on the size of files that can be requested, and on the total cache size, + * deleting entries to make space for new ones. All cache files are set to be deleted automatically + * on JVM exit. + * + * @param Additional metadata to associate with the data. + */ +public class LRUCache { + private static final Logger logger = Logger.getLogger(LRUCache.class.getName()); + + private final long maxFileSize; + private final long maxTotalCacheSize; + + private final CacheTestParameters cacheTestParameters = new CacheTestParameters(); + + private final Map> cache = new HashMap<>(); + private final Map lastUsed = new HashMap<>(); + + public LRUCache(long maxFileSize, long maxTotalCacheSize) { + this.maxFileSize = maxFileSize; + this.maxTotalCacheSize = maxTotalCacheSize; + } + + public CacheResult getResult(ItemBuilder itemBuilder) + throws IOException, InterruptedException, ResponseTooLargeException { + String cacheKey = itemBuilder.makeCacheKey(); + if (cache.containsKey(cacheKey)) { + return getResultForCacheEntry(cacheKey); + } else { + return makeRequestAndCache(cacheKey, itemBuilder); + } + } + + /** + * IOExceptions thrown by the HTTP request are propagated; IOExceptions thrown while storing the + * data in the cache are caught, and the request is re-issued without caching. + */ + private CacheResult makeRequestAndCache(String cacheKey, ItemBuilder itemBuilder) + throws IOException, InterruptedException, ResponseTooLargeException { + assert !cache.containsKey(cacheKey); + + Item item = itemBuilder.buildItem(); + + if (!item.shouldCache()) { + return new CacheResult<>(item.stream(), item.metadata()); + } + + if (item.sizeMaybe.isPresent()) { + long size = item.sizeMaybe().get(); + if (size > getMaxFileSize()) { + throw new ResponseTooLargeException(getMaxFileSize()); + } + makeRoomFor(size); + } + + try { + // Download the response data. + File responseData = downloadResponseData(cacheKey, item); + M metadata = item.metadata(); + long size = responseData.length(); + ZonedDateTime expiry = getNow().plus(Duration.ofSeconds(item.ttl().get())); + + // Create a cache entry. + var cacheEntry = new CacheEntry<>(responseData, metadata, size, expiry); + cache.put(cacheKey, cacheEntry); + markCacheEntryUsed(cacheKey); + + // Clear out old entries to satisfy the total cache size limit. This might + // be necessary here if we didn't receive a correct content size value. + removeFilesToSatisfyLimit(); + + return getResultForCacheEntry(cacheKey); + } catch (IOException e) { + logger.log( + Level.WARNING, "Failure storing cache entry; will re-execute without caching: {}", e); + // Re-issue the request since we don't know if we've consumed any of the response. + Item rerequested = itemBuilder.buildItem(); + return new CacheResult<>(rerequested.stream(), rerequested.metadata()); + } + } + + /** Mark cache entry used and return a stream reading from the cache file. */ + private CacheResult getResultForCacheEntry(String cacheKey) throws IOException { + markCacheEntryUsed(cacheKey); + return new CacheResult<>( + new FileInputStream(cache.get(cacheKey).responseData), cache.get(cacheKey).metadata()); + } + + /** + * Read the repsonse data from the remote server into the cache file. If the downloaded data is + * over the file size limit, throw a ResponseTooLargeException. + */ + private File downloadResponseData(String cacheKey, Item item) + throws IOException, ResponseTooLargeException { + File temp = File.createTempFile("LRUCache-" + cacheKey, ""); + temp.deleteOnExit(); + var inputStream = item.stream(); + var outputStream = new FileOutputStream(temp); + boolean successful = false; + try { + // Limit the download to getMaxFileSize(). + boolean sizeOK = Stream_Utils.limitedCopy(inputStream, outputStream, getMaxFileSize()); + + if (sizeOK) { + successful = true; + return temp; + } else { + throw new ResponseTooLargeException(getMaxFileSize()); + } + } finally { + outputStream.close(); + if (!successful) { + if (!temp.delete()) { + logger.log(Level.WARNING, "Unable to delete cache file (key {})", cacheKey); + } + } + } + } + + /** Mark the entry with the current time, to maintain LRU data. */ + private void markCacheEntryUsed(String cacheKey) { + lastUsed.put(cacheKey, getNow()); + } + + /** Remove all cache entries (and their files) that have passed their TTL. */ + private void removeStaleEntries() { + var now = getNow(); + removeCacheEntriesByPredicate(e -> e.expiry().isBefore(now)); + } + + /** Remove all cache entries (and their files). */ + public void clear() { + removeCacheEntriesByPredicate(e -> true); + } + + /** Remove all cache entries (and their cache files) that match the predicate. */ + private void removeCacheEntriesByPredicate(Predicate> predicate) { + List>> toRemove = + cache.entrySet().stream() + .filter(me -> predicate.test(me.getValue())) + .collect(Collectors.toList()); + removeCacheEntries(toRemove); + } + + /** Remove a set of cache entries. */ + private void removeCacheEntries(List>> toRemove) { + for (var entry : toRemove) { + removeCacheEntry(entry); + } + } + + /** Remove a cache entry: from `cache`, `lastUsed`, and the filesystem. */ + private void removeCacheEntry(Map.Entry> toRemove) { + var key = toRemove.getKey(); + var value = toRemove.getValue(); + cache.remove(key); + lastUsed.remove(key); + removeCacheFile(key, value); + } + + /** Remove a cache file. */ + private void removeCacheFile(String key, CacheEntry cacheEntry) { + boolean removed = cacheEntry.responseData.delete(); + if (!removed) { + logger.log(Level.WARNING, "Unable to delete cache file for key {0}", key); + } + } + + /** Remove least-recently used entries until there is enough room for a new file. */ + private void makeRoomFor(long newFileSize) { + removeStaleEntries(); + + long totalSize = getTotalCacheSize() + newFileSize; + long maxTotalCacheSize = getMaxTotalCacheSize(); + if (totalSize <= maxTotalCacheSize) { + return; + } + + // Remove least-recently used entries first. + var sortedEntries = getSortedEntries(); + var toRemove = new ArrayList>>(); + for (var mapEntry : sortedEntries) { + if (totalSize <= maxTotalCacheSize) { + break; + } + toRemove.add(mapEntry); + totalSize -= mapEntry.getValue().size(); + } + assert totalSize <= maxTotalCacheSize; + removeCacheEntries(toRemove); + } + + private SortedSet>> getSortedEntries() { + var sortedEntries = new TreeSet>>(cacheEntryLRUComparator); + sortedEntries.addAll(cache.entrySet()); + return sortedEntries; + } + + /** Remove least-recently used entries until the total cache size is under the limit. */ + private void removeFilesToSatisfyLimit() { + makeRoomFor(0L); + } + + private long getTotalCacheSize() { + return cache.values().stream().collect(Collectors.summingLong(e -> e.size())); + } + + private long getMaxFileSize() { + return cacheTestParameters.getMaxFileSizeOverrideTestOnly().orElse(maxFileSize); + } + + private long getMaxTotalCacheSize() { + return cacheTestParameters.getMaxTotalCacheSizeOverrideTestOnly().orElse(maxTotalCacheSize); + } + + public int getNumEntries() { + return cache.size(); + } + + public List getFileSizesTestOnly() { + return new ArrayList<>( + cache.values().stream().map(CacheEntry::size).collect(Collectors.toList())); + } + + private ZonedDateTime getNow() { + return cacheTestParameters.getNowOverrideTestOnly().orElse(ZonedDateTime.now()); + } + + /** Return a set of parameters that can be used to modify settings for testing purposes. */ + public CacheTestParameters getCacheTestParameters() { + return cacheTestParameters; + } + + private record CacheEntry(File responseData, M metadata, long size, ZonedDateTime expiry) {} + + /** + * A record to define the contents and properties of something to be cached. + * + * @param stream The InputStream providing the contents of the thing to be cached. + * @param sizeMaybe (Optional) The size of the data provided by the InputStream + * @param ttl (Optional) The time for which the data is fresh. If the returned Item has a TTL of + * 0, the item will not be cahced at all. + */ + public record Item( + InputStream stream, M metadata, Optional sizeMaybe, Optional ttl) { + + public boolean shouldCache() { + return ttl.isPresent(); + } + } + + public record CacheResult(InputStream inputStream, M metadata) {} + + /** Wraps code that creates an Item to be cached. */ + public interface ItemBuilder { + /** Generate a unique key for the Item */ + String makeCacheKey(); + + /** + * Creates the Item to be cached. Returning an Item with no TTL indicates that the data should + * not be cached. This is only called when the Item is not already present in the cache. + */ + Item buildItem() throws IOException, InterruptedException; + } + + private final Comparator>> cacheEntryLRUComparator = + Comparator.comparing(me -> lastUsed.get(me.getKey())); + + /** A set of parameters that can be used to modify cache settings for testing purposes. */ + public class CacheTestParameters { + /** This value is used for the current time when testing TTL expiration logic. */ + private Optional nowOverrideTestOnly = Optional.empty(); + + /** + * Used for testing file and cache size limits. These cannot be set to values larger than the + * real limits. + */ + private Optional maxFileSizeOverrideTestOnly = Optional.empty(); + + private Optional maxTotalCacheSizeOverrideTestOnly = Optional.empty(); + + public Optional getNowOverrideTestOnly() { + return nowOverrideTestOnly; + } + + public void setNowOverrideTestOnly(ZonedDateTime nowOverride) { + nowOverrideTestOnly = Optional.of(nowOverride); + } + + public void clearNowOverrideTestOnly() { + nowOverrideTestOnly = Optional.empty(); + } + + public Optional getMaxFileSizeOverrideTestOnly() { + return maxFileSizeOverrideTestOnly; + } + + public void setMaxFileSizeOverrideTestOnly(long maxFileSizeOverrideTestOnly_) { + if (maxFileSizeOverrideTestOnly_ > maxFileSize) { + throw new IllegalArgumentException( + "Cannot set the (test-only) maximum file size to more than the allowed limit of " + + maxFileSize); + } + maxFileSizeOverrideTestOnly = Optional.of(maxFileSizeOverrideTestOnly_); + } + + public void clearMaxFileSizeOverrideTestOnly() { + maxFileSizeOverrideTestOnly = Optional.empty(); + } + + public Optional getMaxTotalCacheSizeOverrideTestOnly() { + return maxTotalCacheSizeOverrideTestOnly; + } + + public void setMaxTotalCacheSizeOverrideTestOnly(long maxTotalCacheSizeOverrideTestOnly_) { + if (maxTotalCacheSizeOverrideTestOnly_ > maxTotalCacheSize) { + throw new IllegalArgumentException( + "Cannot set the (test-only) total cache size to more than the allowed limit of " + + maxTotalCacheSize); + } + maxTotalCacheSizeOverrideTestOnly = Optional.of(maxTotalCacheSizeOverrideTestOnly_); + } + + public void clearMaxTotalCacheSizeOverrideTestOnly() { + maxTotalCacheSizeOverrideTestOnly = Optional.empty(); + } + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java b/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java new file mode 100644 index 000000000000..4e7bddf7cbda --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/cache/ResponseTooLargeException.java @@ -0,0 +1,15 @@ +package org.enso.base.cache; + +public class ResponseTooLargeException extends Exception { + private final long limit; + + public ResponseTooLargeException(long limit) { + super("Response too large: repsonse size is over the limit (" + limit + ")"); + + this.limit = limit; + } + + public long getLimit() { + return limit; + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java new file mode 100644 index 000000000000..12187214b17f --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoHTTPResponseCache.java @@ -0,0 +1,163 @@ +package org.enso.base.enso_cloud; + +import java.io.IOException; +import java.io.InputStream; +import java.net.http.HttpHeaders; +import java.util.List; +import java.util.Optional; +import org.enso.base.cache.LRUCache; +import org.enso.base.cache.ResponseTooLargeException; + +/** + * EnsoHTTPResponseCache is a cache for EnsoHttpResponse values that respects the cache control HTTP + * headers received in the original repsonse to a request. + * + *

It uses LRUCache, so it also puts limits on the size of files that can be requested, and on + * the total cache size, deleting entries to make space for new ones. All cache files are set to be + * deleted automatically on JVM exit. + * + *

Without caching, EnsoHttpResponse contains an InputStream providing the response data. When + * there is a cache hit, this stream reads from the local file storing the cached data. When there + * is no cache hit, the InputStream is connected directly to the remote server. + */ +public class EnsoHTTPResponseCache { + // 1 year. + private static final int DEFAULT_TTL_SECONDS = 31536000; + private static final long MAX_FILE_SIZE = 2L * 1024 * 1024 * 1024; + private static final long MAX_TOTAL_CACHE_SIZE = 20L * 1024 * 1024 * 1024; + + private static final LRUCache lruCache = + new LRUCache<>(MAX_FILE_SIZE, MAX_TOTAL_CACHE_SIZE); + + public static EnsoHttpResponse makeRequest(RequestMaker requestMaker) + throws IOException, InterruptedException, ResponseTooLargeException { + var itemBuilder = new ItemBuilder(requestMaker); + + LRUCache.CacheResult cacheResult = lruCache.getResult(itemBuilder); + + return requestMaker.reconstructResponseFromCachedStream( + cacheResult.inputStream(), cacheResult.metadata()); + } + + public static class ItemBuilder implements LRUCache.ItemBuilder { + private final RequestMaker requestMaker; + + ItemBuilder(RequestMaker requestMaker) { + this.requestMaker = requestMaker; + } + + @Override + public String makeCacheKey() { + return requestMaker.hashKey(); + } + + /** Only HTTP 200 responses are cached; all others are returned uncached. */ + @Override + public LRUCache.Item buildItem() throws IOException, InterruptedException { + var response = requestMaker.makeRequest(); + + if (response.statusCode() != 200) { + // Don't cache non-200 repsonses. + return new LRUCache.Item<>( + response.body(), + new Metadata(response.headers(), response.statusCode()), + Optional.empty(), + Optional.empty()); + } else { + InputStream inputStream = response.body(); + var metadata = new Metadata(response.headers(), response.statusCode()); + var sizeMaybe = getResponseDataSize(response.headers()); + int ttl = calculateTTL(response.headers()); + return new LRUCache.Item<>(inputStream, metadata, sizeMaybe, Optional.of(ttl)); + } + } + } + + /** Get the size of the response data, if available. */ + private static Optional getResponseDataSize(HttpHeaders headers) { + return headers.firstValue("content-length").map(Long::parseLong); + } + + /** + * We define the TTL as the amount of time that the response should be considered fresh. + * + *

Define t0 as the time at which the content was generated on the origin server. + * + *

Define t1 as the time at which the current request was handled, either by the origin server + * or an intervening cache. + * + *

The 'Age' header, if present is (t1 - t0). + * + *

The 'max-age' value in the 'Cache-Control' header, if present, is the origin server's + * definition of how long the response should be considered fresh. + * + *

If 'max-age' and 'Age' are both present, we set TTL = max-age - Age. If only 'max-age' is + * present, we set TTL = max-age. If neither are present, we use a default. + */ + private static int calculateTTL(HttpHeaders headers) { + Integer maxAge = getMaxAge(headers); + if (maxAge == null) { + return DEFAULT_TTL_SECONDS; + } else { + int age = headers.firstValue("age").map(Integer::parseInt).orElse(0); + return maxAge - age; + } + } + + private static Integer getMaxAge(HttpHeaders headers) { + var cacheControlMaybe = headers.firstValue("cache-control"); + Integer maxAge = null; + if (cacheControlMaybe.isPresent()) { + var cacheControl = cacheControlMaybe.get(); + var cacheControlEntries = cacheControl.split(","); + for (var entry : cacheControlEntries) { + if (entry.trim().toLowerCase().startsWith("max-age")) { + var maxAgeBinding = entry.split("="); + if (maxAgeBinding.length > 1) { + maxAge = Integer.valueOf(maxAgeBinding[1]); + } + break; + } + } + } + return maxAge; + } + + public static void clear() { + lruCache.clear(); + } + + public static int getNumEntries() { + return lruCache.getNumEntries(); + } + + public static List getFileSizesTestOnly() { + return lruCache.getFileSizesTestOnly(); + } + + /** Return a set of parameters that can be used to modify settings for testing purposes. */ + public static LRUCache.CacheTestParameters getCacheTestParameters() { + return lruCache.getCacheTestParameters(); + } + + public interface RequestMaker { + /** Executes the HTTP request and returns the response. */ + EnsoHttpResponse makeRequest() throws IOException, InterruptedException; + + /** + * Returns a hash key that can be used to uniquely identify this request. This will be used to + * decide if the `run` method should be executed, or if a cached response will be returned. The + * hash key should not be reversible. + */ + String hashKey(); + + /** + * When a cached response is returned, instead of executing `makeRequest`, this method is used + * to construct the response. + */ + EnsoHttpResponse reconstructResponseFromCachedStream( + InputStream inputStream, Metadata metadata); + } + + public record Metadata(HttpHeaders headers, int statusCode) {} +} diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java index 86e9371787cc..3e4717fee5ab 100644 --- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java @@ -1,6 +1,7 @@ package org.enso.base.enso_cloud; import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.http.HttpClient; @@ -9,8 +10,12 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; import java.util.List; import java.util.Properties; +import org.enso.base.cache.ResponseTooLargeException; import org.enso.base.net.URISchematic; import org.enso.base.net.URIWithSecrets; import org.graalvm.collections.Pair; @@ -58,44 +63,119 @@ public static EnsoHttpResponse makeRequest( HttpClient client, Builder builder, URIWithSecrets uri, - List> headers) - throws IllegalArgumentException, IOException, InterruptedException { + List> headers, + boolean useCache) + throws IllegalArgumentException, + IOException, + InterruptedException, + ResponseTooLargeException { // Build a new URI with the query arguments. URI resolvedURI = resolveURI(uri); - URI renderedURI = uri.render(); - boolean hasSecrets = - uri.containsSecrets() || headers.stream().anyMatch(p -> p.getRight().containsSecrets()); - if (hasSecrets) { - if (resolvedURI.getScheme() == null) { - throw new IllegalArgumentException("The URI must have a scheme."); + List> resolvedHeaders = + headers.stream() + .map( + pair -> { + return Pair.create(pair.getLeft(), resolveValue(pair.getRight())); + }) + .toList(); + + var requestMaker = + new RequestMaker(client, builder, uri, resolvedURI, headers, resolvedHeaders); + + if (!useCache) { + return requestMaker.makeRequest(); + } else { + return EnsoHTTPResponseCache.makeRequest(requestMaker); + } + } + + public static void deleteSecretFromCache(String secretId) { + EnsoSecretReader.removeFromCache(secretId); + } + + private static class RequestMaker implements EnsoHTTPResponseCache.RequestMaker { + private final HttpClient client; + private final Builder builder; + private final URIWithSecrets uri; + private final URI resolvedURI; + private final List> headers; + private final List> resolvedHeaders; + + RequestMaker( + HttpClient client, + Builder builder, + URIWithSecrets uri, + URI resolvedURI, + List> headers, + List> resolvedHeaders) { + this.client = client; + this.builder = builder; + this.uri = uri; + this.resolvedURI = resolvedURI; + this.headers = headers; + this.resolvedHeaders = resolvedHeaders; + } + + @Override + public EnsoHttpResponse makeRequest() throws IOException, InterruptedException { + boolean hasSecrets = + uri.containsSecrets() || headers.stream().anyMatch(p -> p.getRight().containsSecrets()); + if (hasSecrets) { + if (resolvedURI.getScheme() == null) { + throw new IllegalArgumentException("The URI must have a scheme."); + } + + if (!resolvedURI.getScheme().equalsIgnoreCase("https")) { + throw new IllegalArgumentException( + "Secrets are not allowed in HTTP connections, use HTTPS instead."); + } } - if (!resolvedURI.getScheme().equalsIgnoreCase("https")) { - throw new IllegalArgumentException( - "Secrets are not allowed in HTTP connections, use HTTPS instead."); + builder.uri(resolvedURI); + + for (Pair resolvedHeader : resolvedHeaders) { + builder.header(resolvedHeader.getLeft(), resolvedHeader.getRight()); } + + // Build and Send the request. + var httpRequest = builder.build(); + var bodyHandler = HttpResponse.BodyHandlers.ofInputStream(); + var javaResponse = client.send(httpRequest, bodyHandler); + + URI renderedURI = uri.render(); + + return new EnsoHttpResponse( + renderedURI, javaResponse.headers(), javaResponse.body(), javaResponse.statusCode()); } - builder.uri(resolvedURI); + /** Sorts the header by header name and value. */ + @Override + public String hashKey() { + var sortedHeaders = resolvedHeaders.stream().sorted(headerNameComparator).toList(); + List keyStrings = new ArrayList<>(sortedHeaders.size() + 1); + keyStrings.add(resolvedURI.toString()); - // Resolve the header arguments. - for (Pair header : headers) { - builder.header(header.getLeft(), resolveValue(header.getRight())); + for (Pair resolvedHeader : sortedHeaders) { + keyStrings.add(resolvedHeader.getLeft()); + keyStrings.add(resolvedHeader.getRight()); + } + + return Integer.toString(Arrays.deepHashCode(keyStrings.toArray())); } - // Build and Send the request. - var httpRequest = builder.build(); - var bodyHandler = HttpResponse.BodyHandlers.ofInputStream(); - var javaResponse = client.send(httpRequest, bodyHandler); + @Override + public EnsoHttpResponse reconstructResponseFromCachedStream( + InputStream inputStream, EnsoHTTPResponseCache.Metadata metadata) { + URI renderedURI = uri.render(); - // Extract parts of the response - return new EnsoHttpResponse( - renderedURI, javaResponse.headers(), javaResponse.body(), javaResponse.statusCode()); + return new EnsoHttpResponse( + renderedURI, metadata.headers(), inputStream, metadata.statusCode()); + } } - public static void deleteSecretFromCache(String secretId) { - EnsoSecretReader.removeFromCache(secretId); - } + private static final Comparator> headerNameComparator = + Comparator.comparing((Pair pair) -> pair.getLeft()) + .thenComparing(Comparator.comparing(pair -> pair.getRight())); } diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index bcf256f71fd8..b72bb1175090 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -3,6 +3,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Forbidden_Operation import Standard.Base.Errors.Common.Syntax_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy import Standard.Base.Network.HTTP.HTTP_Error.HTTP_Error import Standard.Base.Network.HTTP.Request.Request import Standard.Base.Network.HTTP.Response.Response @@ -184,11 +185,11 @@ add_specs suite_builder = group_builder.specify "can select the version" <| Test.with_retries <| req = Request.get url_get - r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req . decode_as_json + r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req cache_policy=..No_Cache . decode_as_json r2.at "headers" . at "Connection" . should_equal "Upgrade, HTTP2-Settings" r2.at "headers" . at "Http2-Settings" . should_contain "AA" - r1 = HTTP.new version=HTTP_Version.HTTP_1_1 . request req . decode_as_json + r1 = HTTP.new version=HTTP_Version.HTTP_1_1 . request req cache_policy=..No_Cache . decode_as_json header_names = r1.at "headers" . field_names . map (s-> s.to_case Case.Lower) header_names.should_not_contain "connection" header_names.should_not_contain "http2-settings" @@ -606,7 +607,7 @@ add_specs suite_builder = r1.should_be_a Response group_builder.specify "should be able to handle server crash that closes stream abruptly" pending=pending_has_url <| - err = Data.fetch (base_url_with_slash+"crash?type=stream") + err = Data.fetch (base_url_with_slash+"crash?type=stream") cache_policy=..No_Cache err.should_fail_with HTTP_Error err.catch.message . should_equal "An IO error has occurred: java.io.IOException: closed" diff --git a/test/Table_Tests/src/IO/Fetch_Spec.enso b/test/Table_Tests/src/IO/Fetch_Spec.enso index d1943e9da6fd..125d84123f08 100644 --- a/test/Table_Tests/src/IO/Fetch_Spec.enso +++ b/test/Table_Tests/src/IO/Fetch_Spec.enso @@ -1,18 +1,28 @@ from Standard.Base import all import Standard.Base.Data.Base_64.Base_64 +import Standard.Base.Errors.Common.Response_Too_Large import Standard.Base.Errors.File_Error.File_Error +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Network.HTTP.Cache_Policy.Cache_Policy +import Standard.Base.Network.HTTP.Request.Request +import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Base.Network.HTTP.Response.Response import Standard.Base.Runtime.Context +import Standard.Base.Runtime.Ref.Ref from Standard.Table import all import Standard.Table.Errors.Invalid_JSON_Format from Standard.Test import all +import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup from enso_dev.Base_Tests.Network.Http.Http_Test_Setup import base_url_with_slash, pending_has_url import project.Util +polyglot java import java.lang.IllegalArgumentException +polyglot java import org.enso.base.enso_cloud.EnsoHTTPResponseCache + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder @@ -94,3 +104,334 @@ add_specs suite_builder = # Reinterpreting as TSV: r4 = (uri.add_query_argument "Content-Type" "text/tab-separated-values").fetch r4.should_equal (Table.from_rows ["Column 1"] [["A,B"], ["1,x"], ["3,y"]]) + + suite_builder.group "Response caching" pending=pending_has_url group_builder-> + get_num_response_cache_entries = + EnsoHTTPResponseCache.getNumEntries + with_counts ~action = + before_count = get_num_response_cache_entries + action + after_count = get_num_response_cache_entries + [before_count, after_count] + + reset_size_limits = + EnsoHTTPResponseCache.getCacheTestParameters.clearMaxFileSizeOverrideTestOnly + EnsoHTTPResponseCache.getCacheTestParameters.clearMaxTotalCacheSizeOverrideTestOnly + + expect_counts expected_counts ~action = + counts = with_counts action + counts . should_equal expected_counts frames_to_skip=1 + + get_cache_file_sizes : Vector Integer + get_cache_file_sizes -> Vector Integer = + Vector.from_polyglot_array EnsoHTTPResponseCache.getFileSizesTestOnly . sort Sort_Direction.Ascending + + url0 = base_url_with_slash+'test_download?max-age=16&length=10' + url1 = base_url_with_slash+'test_download?max-age=16&length=20' + url_post = base_url_with_slash + "post" + headers0 = [Header.new "A-Header" "a-header-value", Header.new "A-Header" "a-header-value"] + headers1 = [Header.new "A-Header" "a-different-header-value", Header.new "A-Header" "a-header-value"] + + # Run the request(s) twice and confirm the results are the same + check_same_results ~action = + results = 0.up_to 2 . map (_-> action) + results.distinct.length . should_equal 1 + + group_builder.specify "Cache should return the same repsonse" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + check_same_results <| + HTTP.fetch url0 . decode_as_text + get_num_response_cache_entries . should_equal 1 + check_same_results <| + HTTP.fetch url1 . decode_as_text + get_num_response_cache_entries . should_equal 2 + + HTTP.clear_response_cache + + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache . decode_as_text + url1_body_1 = HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache . decode_as_text . should_equal url1_body_1 + get_num_response_cache_entries . should_equal 2 + + HTTP.clear_response_cache + + url0_body_2 = HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url0_body_2 + url1_body_2 = HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache . decode_as_text . should_not_equal url1_body_2 + get_num_response_cache_entries . should_equal 0 + + group_builder.specify "Cache should handle many entries" pending=pending_has_url <| Test.with_retries <| + count = 20 + + HTTP.clear_response_cache + check_same_results <| + 0.up_to count . map i-> + HTTP.fetch base_url_with_slash+"test_download?length="+i.to_text . decode_as_text + get_num_response_cache_entries . should_equal count + + HTTP.clear_response_cache + check_same_results <| + 0.up_to count . each i-> + headers = [Header.new "A-Header" "a-header-value-"+i.to_text] + HTTP.fetch base_url_with_slash+"test_download?length=8" headers=headers . decode_as_text + get_num_response_cache_entries . should_equal count + + group_builder.specify "Cache policy should work for HTTP.fetch" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 0] <| + HTTP.fetch url0 cache_policy=Cache_Policy.No_Cache + HTTP.fetch url1 cache_policy=Cache_Policy.No_Cache + expect_counts [0, 2] <| + HTTP.fetch url0 cache_policy=Cache_Policy.Use_Cache + HTTP.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.clear_response_cache + expect_counts [0, 2] <| + HTTP.fetch url0 + HTTP.fetch url1 + + group_builder.specify "Cache policy should work for Data.fetch" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 0] <| + Data.fetch url0 cache_policy=Cache_Policy.No_Cache + Data.fetch url1 cache_policy=Cache_Policy.No_Cache + expect_counts [0, 2] <| + Data.fetch url0 cache_policy=Cache_Policy.Use_Cache + Data.fetch url1 cache_policy=Cache_Policy.Use_Cache + HTTP.clear_response_cache + expect_counts [0, 2] <| + Data.fetch url0 + Data.fetch url1 + + group_builder.specify "Should not cache Data.download" pending=pending_has_url <| Test.with_retries <| + target_file = enso_project.data / "transient" / "my_download0.txt" + + HTTP.clear_response_cache + target_file.delete_if_exists + + Data.download url0 target_file + get_num_response_cache_entries . should_equal 0 + + target_file.delete_if_exists + + group_builder.specify "Data.download is not affected by caching limits" pending=pending_has_url <| Test.with_retries <| + target_file = enso_project.data / "transient" / "my_download0.txt" + Panic.with_finalizer reset_size_limits <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 120 + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + Data.download base_url_with_slash+"test_download?length=200" target_file + target_file.read.length . should_equal 200 + target_file.delete_if_exists + + group_builder.specify "Should not cache for methods other than GET" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + expect_counts [0, 0] <| + Data.post url_post (Request_Body.Text "hello world") + + group_builder.specify "HTTP request with a non-GET method should reject a cache_policy=Use_Cache argument" pending=pending_has_url <| Test.with_retries <| + request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty + HTTP.new.request request cache_policy=Cache_Policy.Use_Cache . should_fail_with Illegal_Argument + + group_builder.specify "HTTP request with a non-GET method should not reject a cache_policy=No_Cache argument" pending=pending_has_url <| Test.with_retries <| + request = Request.new HTTP_Method.Post url_post [] Request_Body.Empty + HTTP.new.request request cache_policy=Cache_Policy.No_Cache . should_succeed + + group_builder.specify "Should be able to clear caches" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + Data.fetch url0 + get_num_response_cache_entries . should_equal 1 + HTTP.clear_response_cache + get_num_response_cache_entries . should_equal 0 + + group_builder.specify "Cache key should depend on the headers" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + expect_counts [0, 2] <| + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers1 + Data.fetch url0 headers=headers1 + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers0 + Data.fetch url0 headers=headers1 + + group_builder.specify "Cache key should not depend on header order" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + header0 = Header.new "Abc" "eef" + header1 = Header.new "Abc" "def" + header2 = Header.new "Ghi" "jkl" + orders = [[header0, header1, header2], [header1, header2, header0], [header2, header1, header0]] + responses = orders.map headers-> + Data.fetch url0 headers=headers . decode_as_text + get_num_response_cache_entries . should_equal 1 + responses.distinct.length . should_equal 1 + + ## Fetching the trigger uri causes stale entries to be removed, since the + uri is always different and so the caching and cleanup logic is run. + fake_now = Date_Time.now + trigger_uri_serial = Ref.new 0 + make_trigger_uri = + serial = trigger_uri_serial.get + trigger_uri_serial.modify (_ + 1) + base_url_with_slash+'test_download?max-age=10000&length=50&abc='+serial.to_text + set_time_and_get_count advance_secs = + EnsoHTTPResponseCache.getCacheTestParameters.setNowOverrideTestOnly (fake_now + (Duration.new seconds=advance_secs)) + trigger_uri = make_trigger_uri + Data.fetch trigger_uri + get_num_response_cache_entries + fake_time_resetter = + EnsoHTTPResponseCache.getCacheTestParameters.clearNowOverrideTestOnly + + group_builder.specify "The cache should expire stale entries" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + set_time_and_get_count 0 # Initialize fake now. + + Data.fetch base_url_with_slash+'test_download?max-age=100&length=50' + Data.fetch base_url_with_slash+'test_download?max-age=200&length=50' + Data.fetch base_url_with_slash+'test_download?max-age=200&length=51' + Data.fetch base_url_with_slash+'test_download?max-age=300&length=50' + + Panic.with_finalizer fake_time_resetter <| + ## The count will increase by 1 each time, but decrease by the + number of entries removed + set_time_and_get_count 0 . should_equal 6 + set_time_and_get_count 90 . should_equal 7 + set_time_and_get_count 110 . should_equal 7 + set_time_and_get_count 190 . should_equal 8 + set_time_and_get_count 202 . should_equal 7 + set_time_and_get_count 292 . should_equal 8 + set_time_and_get_count 301 . should_equal 8 + + group_builder.specify "The cache should use the Age response header" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + set_time_and_get_count 0 # Initialize fake now. + + Data.fetch base_url_with_slash+'test_download?max-age=100&age=50&length=50' # ttl 50 + Data.fetch base_url_with_slash+'test_download?max-age=100&age=30&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=120&age=50&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=70&&length=50' # ttl 70 + Data.fetch base_url_with_slash+'test_download?max-age=160&age=70&length=50' # ttl 90 + + Panic.with_finalizer fake_time_resetter <| + ## The count will increase by 1 each time, but decrease by the + number of entries removed + set_time_and_get_count 0 . should_equal 7 + set_time_and_get_count 40 . should_equal 8 + set_time_and_get_count 51 . should_equal 8 + set_time_and_get_count 68 . should_equal 9 + set_time_and_get_count 72 . should_equal 7 + set_time_and_get_count 88 . should_equal 8 + set_time_and_get_count 93 . should_equal 8 + + download size = + Data.fetch base_url_with_slash+'test_download?length='+size.to_text + + group_builder.specify "Will remove old cache files to keep the total cache size under the total cache size limit" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100 + + download 30 + download 50 + download 10 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + download 20 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 50] + download 40 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 40] + download 35 + get_cache_file_sizes . should_equal_ignoring_order [20, 35, 40] + + group_builder.specify "Will remove old cache files based on how recently they were used" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly 100 + + download 30 + download 50 + download 10 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + # Use 30 again so it's considered more recently used. + download 30 + get_cache_file_sizes . should_equal_ignoring_order [10, 30, 50] + download 20 + get_cache_file_sizes . should_equal_ignoring_order [10, 20, 30] + download 45 + get_cache_file_sizes . should_equal_ignoring_order [20, 30, 45] + + group_builder.specify "Will not cache a file with a content length greater than the single file limit" pending=pending_has_url <| Test.with_retries <| + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + download 110 . should_fail_with (Response_Too_Large.Error 100) + + + group_builder.specify "Will not cache a file without a content length, but which is greater than the single file limit" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + Panic.with_finalizer reset_size_limits <| + reset_size_limits + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly 100 + url = base_url_with_slash+'test_download?omit-content-length=1&length=110' + Data.fetch url . should_fail_with (Response_Too_Large.Error 100) + + group_builder.specify "Should not cache if the request fails" pending=pending_has_url <| Test.with_retries <| + HTTP.clear_response_cache + + HTTP.fetch url0 + get_num_response_cache_entries . should_equal 1 + HTTP.fetch base_url_with_slash+'crash' + get_num_response_cache_entries . should_equal 1 + HTTP.fetch base_url_with_slash+'nonexistent_endpoint' + get_num_response_cache_entries . should_equal 1 + + cloud_setup = Cloud_Tests_Setup.prepare + + group_builder.specify "Should work with secrets in the URI" pending=pending_has_url <| Test.with_retries <| + cloud_setup.with_prepared_environment <| + secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" + secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" + cleanup = + secret1.delete + secret2.delete + Panic.with_finalizer cleanup <| + # Requests differ only in secrets in URI. + url1 = URI.from 'https://httpbin.org/bytes/50' + . add_query_argument "arg1" secret1 + . add_query_argument "arg2" "plain value" + uri2 = URI.from 'https://httpbin.org/bytes/50' + . add_query_argument "arg1" secret2 + . add_query_argument "arg2" "plain value" + + HTTP.clear_response_cache + HTTP.fetch url1 + get_num_response_cache_entries . should_equal 1 + HTTP.fetch uri2 + get_num_response_cache_entries . should_equal 2 + + group_builder.specify "Should work with secrets in the headers" pending=pending_has_url <| Test.with_retries <| + cloud_setup.with_prepared_environment <| + secret1 = Enso_Secret.create "http-cache-secret-1-"+Random.uuid "My Value" + secret2 = Enso_Secret.create "http-cache-secret-2-"+Random.uuid "Some Value" + cleanup = + secret1.delete + secret2.delete + Panic.with_finalizer cleanup <| + # Requests differ only in secrets in headers. + uri = URI.from 'https://httpbin.org/bytes/50' + headers1 = [Header.new "A-Header" secret1] + headers2 = [Header.new "A-Header" secret2] + + HTTP.clear_response_cache + HTTP.fetch headers=headers1 uri + get_num_response_cache_entries . should_equal 1 + HTTP.fetch headers=headers2 uri + get_num_response_cache_entries . should_equal 2 + + group_builder.specify "Should not be able to set the cache limits higher than the real limits" pending=pending_has_url <| Test.with_retries <| + Test.expect_panic IllegalArgumentException <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxFileSizeOverrideTestOnly (2 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument + Test.expect_panic IllegalArgumentException <| + EnsoHTTPResponseCache.getCacheTestParameters.setMaxTotalCacheSizeOverrideTestOnly (20 * 1024 * 1024 * 1024 + 1) . should_fail_with Illegal_Argument diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java b/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java index a1f222706199..bc92132a095d 100644 --- a/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java +++ b/tools/http-test-helper/src/main/java/org/enso/shttp/HTTPTestHelperServer.java @@ -98,6 +98,7 @@ private static void setupEndpoints( server.addHandler("/test_basic_auth", new BasicAuthTestHandler()); server.addHandler("/crash", new CrashingTestHandler()); server.addHandler("/test_redirect", new RedirectTestHandler("/testfiles/js.txt")); + server.addHandler("/test_download", new DownloadTestHandler()); // Cloud mock if (cloudMockSetup != null) { diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java new file mode 100644 index 000000000000..85c306ebb5b5 --- /dev/null +++ b/tools/http-test-helper/src/main/java/org/enso/shttp/test_helpers/DownloadTestHandler.java @@ -0,0 +1,58 @@ +package org.enso.shttp.test_helpers; + +import com.sun.net.httpserver.HttpExchange; +import java.io.IOException; +import java.io.OutputStream; +import java.net.URI; +import java.util.Random; +import org.apache.http.client.utils.URIBuilder; +import org.enso.shttp.SimpleHttpHandler; + +/** + * A handler that generates a data response, with optional max-age and Age headers. The data + * response consists of a string of random letters of the requested length. + */ +public class DownloadTestHandler extends SimpleHttpHandler { + private Random random = new Random(42); + + @Override + protected void doHandle(HttpExchange exchange) throws IOException { + URI uri = exchange.getRequestURI(); + URIBuilder builder = new URIBuilder(uri); + + int length = 10; + String maxAge = null; + String age = null; + boolean omitContentLength = false; + for (var queryPair : builder.getQueryParams()) { + switch (queryPair.getName()) { + case "length" -> length = Integer.parseInt(queryPair.getValue()); + case "max-age" -> maxAge = queryPair.getValue(); + case "age" -> age = queryPair.getValue(); + case "omit-content-length" -> omitContentLength = true; + default -> {} + } + } + + byte responseData[] = new byte[length]; + for (int i = 0; i < length; ++i) { + responseData[i] = (byte) (97 + random.nextInt(26)); + } + + if (maxAge != null) { + exchange.getResponseHeaders().add("Cache-Control", "max-age=" + maxAge); + } + + if (age != null) { + exchange.getResponseHeaders().add("Age", age.toString()); + } + + long contentLength = omitContentLength ? 0 : responseData.length; + exchange.sendResponseHeaders(200, contentLength); + + try (OutputStream os = exchange.getResponseBody()) { + os.write(responseData); + } + exchange.close(); + } +} From 29f1f0d4e1926f49bf181ec3bdc0c13bafbf0093 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Grabarz?= Date: Wed, 30 Oct 2024 14:48:59 +0100 Subject: [PATCH 008/286] Add IDE version to window title. (#11446) Fixes #10966 ![image](https://github.com/user-attachments/assets/a46e38e9-1ff3-4eb4-9a88-901d684dc205) --- CHANGELOG.md | 2 ++ app/common/src/appConfig.js | 2 +- app/gui/index.html | 2 +- app/gui/vite.config.ts | 5 +++++ 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e54b5f091c3..920a52e90d62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ - [New dropdown-based component menu][11398]. - [Size of Table Input Widget is preserved and restored after project re-opening][11435] +- [Added application version to the title bar.][11446] [11151]: https://github.com/enso-org/enso/pull/11151 [11271]: https://github.com/enso-org/enso/pull/11271 @@ -23,6 +24,7 @@ [11388]: https://github.com/enso-org/enso/pull/11388 [11398]: https://github.com/enso-org/enso/pull/11398 [11435]: https://github.com/enso-org/enso/pull/11435 +[11446]: https://github.com/enso-org/enso/pull/11446 #### Enso Standard Library diff --git a/app/common/src/appConfig.js b/app/common/src/appConfig.js index 8ad2ea51f0e3..5dc227bad45b 100644 --- a/app/common/src/appConfig.js +++ b/app/common/src/appConfig.js @@ -44,7 +44,7 @@ export async function readEnvironmentFromFile() { if (!isProduction || entries.length > 0) { Object.assign(process.env, variables) } - process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version + process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version ?? '0.0.0-dev' process.env.ENSO_CLOUD_DASHBOARD_COMMIT_HASH ??= buildInfo.commit } catch (error) { process.env.ENSO_CLOUD_DASHBOARD_VERSION ??= buildInfo.version diff --git a/app/gui/index.html b/app/gui/index.html index 055ad35bde19..7c05be49a9e5 100644 --- a/app/gui/index.html +++ b/app/gui/index.html @@ -37,7 +37,7 @@ maximum-scale = 1.0, user-scalable = no" /> - Enso Analytics + Enso %ENSO_IDE_VERSION%

diff --git a/app/gui/vite.config.ts b/app/gui/vite.config.ts index d28babae7067..e85808cce522 100644 --- a/app/gui/vite.config.ts +++ b/app/gui/vite.config.ts @@ -26,6 +26,10 @@ await readEnvironmentFromFile() const entrypoint = process.env.E2E === 'true' ? './src/project-view/e2e-entrypoint.ts' : './src/entrypoint.ts' +// NOTE(Frizi): This rename is for the sake of forward compatibility with not yet merged config refactor on bazel branch, +// and because Vite's HTML env replacements only work with import.meta.env variables, not defines. +process.env.ENSO_IDE_VERSION = process.env.ENSO_CLOUD_DASHBOARD_VERSION + // https://vitejs.dev/config/ export default defineConfig({ root: fileURLToPath(new URL('.', import.meta.url)), @@ -65,6 +69,7 @@ export default defineConfig({ '#': fileURLToPath(new URL('./src/dashboard', import.meta.url)), }, }, + envPrefix: 'ENSO_IDE_', define: { ...getDefines(), IS_CLOUD_BUILD: JSON.stringify(IS_CLOUD_BUILD), From 6566b2da2ff36bb0b677754331915f6cec9c826b Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Wed, 30 Oct 2024 18:14:28 +0400 Subject: [PATCH 009/286] Fix help panel (#11421) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #11392 The issue was caused by somewhat cumbersome logic of updating/overriding displayed suggestion. I’m not sure if it even was working correctly at any time. https://github.com/user-attachments/assets/51e6fcd4-2198-40a2-86e7-8fcfa8b8b8d5 --- .../components/ComponentDocumentation.vue | 26 +++++++++---------- .../components/DocumentationPanel.vue | 4 +-- .../components/DocumentationPanel/history.ts | 26 ++++++++++++++----- .../project-view/components/GraphEditor.vue | 6 +---- 4 files changed, 35 insertions(+), 27 deletions(-) diff --git a/app/gui/src/project-view/components/ComponentDocumentation.vue b/app/gui/src/project-view/components/ComponentDocumentation.vue index 5a91a5b85a09..c28c3cadffcf 100644 --- a/app/gui/src/project-view/components/ComponentDocumentation.vue +++ b/app/gui/src/project-view/components/ComponentDocumentation.vue @@ -2,12 +2,12 @@ import DocumentationPanel from '@/components/DocumentationPanel.vue' import { injectGraphSelection } from '@/providers/graphSelection' import { useGraphStore } from '@/stores/graph' -import { computed } from 'vue' +import { computed, watch } from 'vue' import type { SuggestionId } from 'ydoc-shared/languageServerTypes/suggestions' -import { Err, Ok } from 'ydoc-shared/util/data/result' +import { Err, Ok, unwrapOr } from 'ydoc-shared/util/data/result' -const props = defineProps<{ displayedSuggestionId: SuggestionId | null }>() -const emit = defineEmits<{ 'update:displayedSuggestionId': [SuggestionId] }>() +// A displayed component can be overridren by this model, e.g. when the user clicks links in the documenation. +const overrideDisplayed = defineModel({ default: null }) const selection = injectGraphSelection() const graphStore = useGraphStore() @@ -19,20 +19,20 @@ function docsForSelection() { return Ok(suggestionId) } -const displayedId = computed(() => - props.displayedSuggestionId != null ? Ok(props.displayedSuggestionId) : docsForSelection(), -) +const docs = computed(() => docsForSelection()) +// When the selection changes, we cancel the displayed suggestion override that can be in place. +watch(docs, (_) => (overrideDisplayed.value = null)) + +const displayedId = computed(() => overrideDisplayed.value ?? unwrapOr(docs.value, null)) diff --git a/app/gui/src/project-view/components/RecordControl.vue b/app/gui/src/project-view/components/RecordControl.vue index 8bc5f30e70bf..8f8e50ef3455 100644 --- a/app/gui/src/project-view/components/RecordControl.vue +++ b/app/gui/src/project-view/components/RecordControl.vue @@ -1,13 +1,14 @@ diff --git a/app/gui/src/project-view/components/StandaloneButton.vue b/app/gui/src/project-view/components/StandaloneButton.vue new file mode 100644 index 000000000000..4c965dd46c58 --- /dev/null +++ b/app/gui/src/project-view/components/StandaloneButton.vue @@ -0,0 +1,26 @@ + + + + + diff --git a/app/gui/src/project-view/components/SvgButton.vue b/app/gui/src/project-view/components/SvgButton.vue index bbab27dff7d9..068b3f758eeb 100644 --- a/app/gui/src/project-view/components/SvgButton.vue +++ b/app/gui/src/project-view/components/SvgButton.vue @@ -5,7 +5,7 @@ import type { URLString } from '@/util/data/urlString' import type { Icon } from '@/util/iconName' const _props = defineProps<{ - name: Icon | URLString + name?: Icon | URLString | undefined label?: string | undefined disabled?: boolean title?: string | undefined @@ -14,7 +14,7 @@ const _props = defineProps<{ From 610ee5fdec4541453560d5ec93a5242ca514f4d5 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 31 Oct 2024 10:04:52 +0000 Subject: [PATCH 013/286] Expand and improve `pretty` for core data types, vector and table. (#11438) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - ✅ Alter default `Any.pretty` so constructor is prefixed with type name (as needed now). ![image](https://github.com/user-attachments/assets/72d5ff2f-b567-47e2-becf-2e4acd4d089d) - ✅ Tests for `pretty` on `Date`. - `pretty` for ✅ `Date_Time` and ✅ `Time_Of_Day` improved to not have as much noise. - `pretty` for ✅ `Period`, ✅ `Date_Range` and ✅ `Range`. - Added custom `pretty` for ✅ `Vector` and ✅ `Array` as built-in method doesn't call through to overrides. - Added custom `pretty` for ✅ `Column` and ✅ `Table`. - Bug fix for `pretty` in `Time_Zone` so calls through to `pretty` of the zone_id to ensure safely escaped. - Initial `default_widget` for `Date` and `Time_Of_Day`. - Improve widget for `Date.to_date_time`. ![image](https://github.com/user-attachments/assets/18bc1d88-8ea9-42d0-8a9c-bc873e5d6835) - `to_text`, `to_display_text` and `pretty` for `Enso_Secret` ![image](https://github.com/user-attachments/assets/d850c109-d1af-4b6f-a450-013c4d137805) - private constructor for `Enso_Secret` as can't be correctly built directly. - Use `_` for the testing methods in `HTTP` to clarify they shouldn't be used in general code. --- .../lib/Standard/Base/0.0.0-dev/src/Any.enso | 19 ++++++--- .../Base/0.0.0-dev/src/Data/Array.enso | 7 ++++ .../Base/0.0.0-dev/src/Data/Range.enso | 17 ++++++++ .../Base/0.0.0-dev/src/Data/Time/Date.enso | 29 +++++++++++--- .../0.0.0-dev/src/Data/Time/Date_Range.enso | 17 +++++++- .../0.0.0-dev/src/Data/Time/Date_Time.enso | 32 ++++++++++----- .../Base/0.0.0-dev/src/Data/Time/Period.enso | 20 ++++++++++ .../0.0.0-dev/src/Data/Time/Time_Of_Day.enso | 40 ++++++++++++++----- .../0.0.0-dev/src/Data/Time/Time_Zone.enso | 7 ++-- .../Base/0.0.0-dev/src/Data/Vector.enso | 8 ++++ .../0.0.0-dev/src/Enso_Cloud/Enso_File.enso | 1 + .../0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso | 37 ++++++++++++++++- .../Base/0.0.0-dev/src/Network/HTTP.enso | 16 ++++---- .../0.0.0-dev/src/Network/HTTP/Response.enso | 2 +- .../src/Network/HTTP/Response_Body.enso | 2 +- .../0.0.0-dev/src/System/File_Format.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Column.enso | 7 ++++ .../Standard/Table/0.0.0-dev/src/Table.enso | 6 +++ .../Visualization/0.0.0-dev/src/Widgets.enso | 8 +++- .../builtin/text/AnyPrettyNode.java | 2 +- test/Base_Tests/src/Data/Array_Spec.enso | 8 ++++ test/Base_Tests/src/Data/Range_Spec.enso | 19 +++++++++ .../src/Data/Time/Date_Range_Spec.enso | 8 +++- test/Base_Tests/src/Data/Time/Date_Spec.enso | 6 +++ .../src/Data/Time/Date_Time_Spec.enso | 13 ++++++ .../src/Data/Time/Day_Of_Week_Spec.enso | 5 +++ .../Base_Tests/src/Data/Time/Period_Spec.enso | 13 ++++++ .../src/Data/Time/Time_Of_Day_Spec.enso | 11 +++++ test/Base_Tests/src/Data/Vector_Spec.enso | 6 +++ test/Base_Tests/src/Network/Http_Spec.enso | 10 ++--- .../src/In_Memory/Column_Spec.enso | 15 +++++++ .../Table_Tests/src/In_Memory/Table_Spec.enso | 18 +++++++++ 32 files changed, 354 insertions(+), 57 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso index 670972c87b08..0f0196181e4c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso @@ -78,14 +78,23 @@ type Any to_text : Text to_text self = @Builtin_Method "Any.to_text" - ## ICON convert - Generic conversion of an arbitrary Enso value to a corresponding human-readable - representation. + ## GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. > Example - Getting a human-readable representation of the number 7. + Getting the Enso code of the number 7. - 7.to_text + 7.pretty + ## Returns a Text + 7 + + > Example + Getting the Enso code of the text Hello World!. + + "Hello World!".pretty + ## Returns a Text + 'Hello World!' pretty : Text pretty self = @Builtin_Method "Any.pretty" diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index e8478abde95a..bf1390ab66e2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -795,6 +795,13 @@ type Array to_display_text : Text to_display_text self = self.short_display_text max_entries=40 + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = self.map .pretty . join ", " "[" "]" + ## ICON column_add Combines all the elements of a non-empty array using a binary operation. If the array is empty, it returns `if_empty`. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso index e2e457c7ee1a..8c588cb434cd 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso @@ -574,6 +574,23 @@ type Range step = if self.step.abs == 1 then "" else " by " + self.step.to_display_text start + step + "]" + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the range 1 until 29. + + 1.up_to 29 . pretty + ## Returns a Text + Range.new 1 29 + pretty : Text + pretty self = + start = self.start.pretty + end = self.end.pretty + "Range.new " + start + " " + end + (if self.step.abs == 1 then "" else " step=" + self.step.abs.pretty) + ## PRIVATE throw_zero_step_error = Error.throw (Illegal_State.Error "A range with step = 0 is ill-formed.") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index 408aded58693..cb25b88edb03 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -26,7 +26,8 @@ import project.Panic.Panic from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all from project.Data.Time.Date_Time import ensure_in_epoch -from project.Metadata import Display, Widget +from project.Metadata import Display, make_single_choice, Widget +from project.Metadata.Choice import Option from project.Widget_Helpers import make_date_format_selector polyglot java import java.lang.ArithmeticException @@ -335,7 +336,7 @@ type Date Arguments: - period: the period to add to self. next : Date_Period -> Date - next self period=Date_Period.Day = self + period.to_period + next self period:Date_Period=..Day = self + period.to_period ## GROUP DateTime ICON time @@ -347,7 +348,7 @@ type Date Arguments: - period: the period to add to self. previous : Date_Period -> Date - previous self period=Date_Period.Day = self - period.to_period + previous self period:Date_Period=..Day = self - period.to_period ## GROUP DateTime ICON time @@ -492,6 +493,8 @@ type Date from Standard.Base import Date, Time_Of_Day, Time_Zone example_to_time = Date.new 2020 2 3 . to_date_time Time_Of_Day.new Time_Zone.utc + @time_of_day (Time_Of_Day.default_widget include_now=False) + @zone Time_Zone.default_widget to_date_time : Time_Of_Day -> Time_Zone -> Date_Time to_date_time self (time_of_day=Time_Of_Day.new) (zone=Time_Zone.system) = Time_Utils.make_zoned_date_time self time_of_day zone @@ -827,9 +830,25 @@ type Date format.format_date self ## PRIVATE - Convert to a Enso code representation of this Date. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the date 29-October-2024. + + (Date.new 2024 10 29).pretty + ## Returns a Text + Date.new 2024 10 29 pretty : Text - pretty self = "(Date.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text + ")" + pretty self = "Date.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text + + ## PRIVATE + Gets the default drop down option for Date. + default_widget : Boolean -> Widget + default_widget (include_today:Boolean=False) = + options = [Option "" "Date.new"] + (if include_today then [Option "" "Date.today"] else []) + Widget.Single_Choice values=options display=Display.When_Modified ## PRIVATE week_days_between start end = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso index d0fba4049fae..e51d9f12062e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Range.enso @@ -101,9 +101,22 @@ type Date_Range start + step + "]" ## PRIVATE - Convert to a human-readable representation. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the date range 10-September-2024 until + 29-October-2024. + + (Date.new 2024 09 10).up_to (Date.new 2024 10 29) . pretty + ## Returns a Text + Date_Range.new (Date.new 2024 09 10) (Date.new 2024 10 29) pretty : Text - pretty self = self.to_text + pretty self = + start = self.start.pretty + end = self.end.pretty + "Date_Range.new (" + start + ") (" + end + (if self.step == (Period.new days=1) then ")" else ") (" + self.step.pretty + ")") ## PRIVATE Converts this value to a JSON serializable object. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso index fe47ceb5e309..92a1bb70fdc9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso @@ -836,18 +836,28 @@ type Date_Time self.format "yyyy-MM-dd "+time_format+zone_format ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. - pretty : Text - pretty self = "(Date_Time.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text - + (if self.hour == 0 then "" else " hour="+self.hour.to_text) - + (if self.minute == 0 then "" else " minute="+self.minute.to_text) - + (if self.second == 0 then "" else " second="+self.second.to_text) - + (if self.millisecond == 0 then "" else " millisecond="+self.millisecond.to_text) - + (if self.microsecond == 0 then "" else " microsecond="+self.microsecond.to_text) - + (if self.nanosecond == 0 then "" else " nanosecond="+self.nanosecond.to_text) - + (if self.zone == Time_Zone.system then "" else " zone="+self.zone.pretty) - + ")" + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the date 29-October-2024 12:34. + (Date_Time.new 2024 10 29 12 34).pretty + ## Returns a Text + Date_Time.new 2024 10 29 12 34 + pretty : Text + pretty self = + parts = Vector.build builder-> + builder.append ("Date_Time.new " + self.year.to_text + " " + self.month.to_text + " " + self.day.to_text) + if self.hour != 0 then builder.append ((if builder.length!=1 then " hour=" else " ") + self.hour.to_text) + if self.minute != 0 then builder.append ((if builder.length!=2 then " minute=" else " ") + self.minute.to_text) + if self.second != 0 then builder.append ((if builder.length!=3 then " second=" else " ") + self.second.to_text) + if self.millisecond != 0 then builder.append ((if builder.length!=4 then " millisecond=" else " ") + self.millisecond.to_text) + if self.microsecond != 0 then builder.append ((if builder.length!=5 then " microsecond=" else " ") + self.microsecond.to_text) + if self.nanosecond != 0 then builder.append ((if builder.length!=6 then " nanosecond=" else " ") + self.nanosecond.to_text) + if self.zone != Time_Zone.system then builder.append ((if builder.length!=7 then " zone=(" else " (") + self.zone.pretty + ")") + parts.join "" ## PRIVATE Convert to a JavaScript Object representing a Date_Time. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso index 653688641e1e..3199648b14d4 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso @@ -209,6 +209,26 @@ type Period if self.days==0 . not then builder.append ["days", self.days] JS_Object.from_pairs v + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the period 1 month and 2 days. + + (Period.new months=1 days=2).pretty + ## Returns a Text + Time_Of_Day.new 12 34 millisecond=500 + pretty : Text + pretty self = + parts = Vector.build builder-> + builder.append "Period.new" + if self.years != 0 then builder.append ((if builder.length!=1 then " years=" else " ") + self.years.to_text) + if self.months != 0 then builder.append ((if builder.length!=2 then " months=" else " ") + self.months.to_text) + if self.days != 0 then builder.append ((if builder.length!=3 then " days=" else " ") + self.days.to_text) + parts.join "" + ## PRIVATE catch_java_exceptions operation ~action = handle_arithmetic_exception caught_panic = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso index b49ea45eaebd..efd8e18ecf89 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso @@ -11,6 +11,7 @@ import project.Data.Time.Duration.Duration import project.Data.Time.Period.Period import project.Data.Time.Time_Period.Time_Period import project.Data.Time.Time_Zone.Time_Zone +import project.Data.Vector.Vector import project.Error.Error import project.Errors.Common.Type_Error import project.Errors.Illegal_Argument.Illegal_Argument @@ -20,7 +21,8 @@ import project.Nothing.Nothing import project.Panic.Panic from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all -from project.Metadata import Display, Widget +from project.Metadata import Display, make_single_choice, Widget +from project.Metadata.Choice import Option from project.Widget_Helpers import make_time_format_selector polyglot java import java.lang.Exception as JException @@ -492,16 +494,34 @@ type Time_Of_Day format.format_time self ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + + > Example + Getting the Enso code of the time 12:34:00.5 + + (Time_Of_Day.new 12 34 0 500).pretty + ## Returns a Text + Time_Of_Day.new 12 34 millisecond=500 pretty : Text - pretty self = "(Time_Of_Day.new" - + (if self.hour == 0 then "" else " hour="+self.hour.to_text) - + (if self.minute == 0 then "" else " minute="+self.minute.to_text) - + (if self.second == 0 then "" else " second="+self.second.to_text) - + (if self.millisecond == 0 then "" else " millisecond="+self.millisecond.to_text) - + (if self.microsecond == 0 then "" else " microsecond="+self.microsecond.to_text) - + (if self.nanosecond == 0 then "" else " nanosecond="+self.nanosecond.to_text) - + ")" + pretty self = + parts = Vector.build builder-> + builder.append "Time_Of_Day.new" + if self.hour != 0 then builder.append ((if builder.length!=1 then " hour=" else " ") + self.hour.to_text) + if self.minute != 0 then builder.append ((if builder.length!=2 then " minute=" else " ") + self.minute.to_text) + if self.second != 0 then builder.append ((if builder.length!=3 then " second=" else " ") + self.second.to_text) + if self.millisecond != 0 then builder.append ((if builder.length!=4 then " millisecond=" else " ") + self.millisecond.to_text) + if self.microsecond != 0 then builder.append ((if builder.length!=5 then " microsecond=" else " ") + self.microsecond.to_text) + if self.nanosecond != 0 then builder.append ((if builder.length!=6 then " nanosecond=" else " ") + self.nanosecond.to_text) + parts.join "" + + ## PRIVATE + Gets the default drop down option for Time_Of_Day. + default_widget : Boolean -> Widget + default_widget (include_now:Boolean=False) = + options = [Option "" "Time_Of_Day.new"] + (if include_now then [Option "" "Time_Of_Day.now"] else []) + Widget.Single_Choice values=options display=Display.When_Modified ## PRIVATE Time_Of_Day.from (that:JS_Object) = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso index 9133bfa966e3..75c55383eb9f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso @@ -214,10 +214,11 @@ type Time_Zone zone_names = Time_Utils.getZoneNames ## PRIVATE - Convert to a Enso code representation of this Time_Of_Day. + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. pretty : Text - pretty self = "(Time_Zone.parse '" + self.zone_id + "')" - + pretty self = "Time_Zone.parse " + self.zone_id.pretty ## PRIVATE Time_Zone.from (that:JS_Object) = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 13d71a160c5d..3880dafc53cf 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -878,6 +878,14 @@ type Vector a short_display_text self (max_entries : Integer = 10) = Array_Like_Helpers.short_display_text self max_entries + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = self.map .pretty . join ", " "[" "]" + + ## ALIAS append, concatenate, union GROUP Operators ICON union diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso index f5fc798054d8..bb95c33e56b9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso @@ -530,6 +530,7 @@ type Enso_File "Enso_File "+self.path ## PRIVATE + Converts the file descriptor to a JSON object. to_js_object : JS_Object to_js_object self = JS_Object.from_pairs [["type", "Enso_File"], ["constructor", "new"], ["path", self.path.to_text]] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso index 538dface7dc4..33ee5cb13487 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso @@ -31,7 +31,19 @@ polyglot java import org.enso.base.enso_cloud.HideableValue.SecretValue ## A reference to a secret stored in the Enso Cloud. type Enso_Secret ## PRIVATE - Value name:Text id:Text path:Enso_Path + private Value internal_name:Text id:Text internal_path:Enso_Path + + ## GROUP Metadata + ICON metadata + The name of the secret. + name : Text + name self = self.internal_name + + ## GROUP Metadata + ICON metadata + The path of the secret. + path : Text + path self = self.internal_path.to_text ## GROUP Output ICON edit @@ -146,6 +158,29 @@ type Enso_Secret EnsoSecretHelper.deleteSecretFromCache self.id self + ## PRIVATE + Returns a text representation of the secret. + to_text : Text + to_text self = "Enso_Secret " + self.path.to_text + + ## PRIVATE + Returns a display text representation of the secret. + to_display_text : Text + to_display_text self = "Enso_Secret {" + self.name + "}" + + ## PRIVATE + Converts the secret to a JSON object. + to_js_object : JS_Object + to_js_object self = + JS_Object.from_pairs [["type", "Enso_Secret"], ["constructor", "get"], ["path", self.path.to_text]] + + ## PRIVATE + GROUP convert + ICON enso_logo + Convert the value to a corresponding Enso code representation. + pretty : Text + pretty self = "Enso_Secret.get " + self.path.to_text.pretty + ## PRIVATE type Enso_Secret_Error ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 5f115b906d0e..a5f5aa5c08d1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -355,20 +355,20 @@ type Request_Error ## PRIVATE Access the HTTP's timeout (for testing purposes). -get_timeout : HTTP -> Duration -get_timeout http:HTTP = http.timeout +_get_timeout : HTTP -> Duration +_get_timeout http:HTTP = http.timeout ## PRIVATE Access the HTTP's follow_redirects (for testing purposes). -get_follow_redirects : HTTP -> Boolean -get_follow_redirects http:HTTP = http.follow_redirects +_get_follow_redirects : HTTP -> Boolean +_get_follow_redirects http:HTTP = http.follow_redirects ## PRIVATE Access the HTTP's proxy (for testing purposes). -get_proxy : HTTP -> Proxy -get_proxy http:HTTP = http.proxy +_get_proxy : HTTP -> Proxy +_get_proxy http:HTTP = http.proxy ## PRIVATE Access the HTTP's version (for testing purposes). -get_version : HTTP -> HTTP_Version -get_version http:HTTP = http.version +_get_version : HTTP -> HTTP_Version +_get_version http:HTTP = http.version diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 8c79be929151..2b09db5597f8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -193,7 +193,7 @@ type Response example_write = Data.fetch Examples.geo_data_url . write Examples.scratch_file - @path (Widget.Text_Input display=Display.Always) + @file (Widget.Text_Input display=Display.Always) write : Writable_File -> Existing_File_Behavior -> File write self file:Writable_File on_existing_file=Existing_File_Behavior.Backup = self.body.write file on_existing_file diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso index a7c3477f08c9..31cd11eda675 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso @@ -180,7 +180,7 @@ type Response_Body example_write = Examples.get_geo_data.write Examples.scratch_file - @path (Widget.Text_Input display=Display.Always) + @file (Widget.Text_Input display=Display.Always) write : Writable_File -> Existing_File_Behavior -> File write self file:Writable_File on_existing_file=Existing_File_Behavior.Backup = self.with_stream body_stream-> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 312cc5c70190..85a0522c7752 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -232,7 +232,7 @@ type Bytes type JSON_Format ## PRIVATE Resolve an unresolved constructor to the actual type. - resolve : Function -> Bytes | Nothing + resolve : Function -> JSON_Format | Nothing resolve constructor = _ = constructor Nothing diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index 53cc25ab3e98..5a838234eecb 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -2589,6 +2589,13 @@ type Column data = Statistic.running self.to_vector statistic Column.from_vector name data + ## PRIVATE + pretty : Text + pretty self = + name = self.name.pretty + data = self.to_vector.pretty + "Column.from_vector " + name + " " + data + ## PRIVATE Folds the vectorized operation over the provided column and values. When more diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 9fd946da0195..91ac28b900d6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -3739,6 +3739,12 @@ type Table if merged_columns.is_empty then problem_builder_for_unification.raise_no_output_columns_with_cause else Table.new merged_columns + ## PRIVATE + pretty : Text + pretty self = + data = self.columns.map c->("[" + c.name.pretty + ", " + c.to_vector.pretty + "]") . join ", " + "Table.new [" + data + "]" + ## PRIVATE A helper to create a new table consisting of slices of the original table. slice_ranges table ranges = diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso index 0a20b2b63f87..1a9c60c4db71 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Widgets.enso @@ -1,5 +1,7 @@ from Standard.Base import all +import Standard.Base.Metadata.Widget import Standard.Base.Errors.Common.Not_Invokable +from Standard.Base.Logging import all from Standard.Base.Meta import Instrumentor from Standard.Table import all @@ -20,7 +22,11 @@ get_widget_json value call_name argument_names uuids="{}" = uuid:Text -> Instrumentor.uuid uuid _ -> Nothing - read_annotation argument = + log_panic argument err = + Widget.log_message "Failed for "+argument+": "+err.payload.to_display_text ..Warning + Nothing + + read_annotation argument = Panic.catch Any handler=(log_panic argument) <| annotation = Warning.clear <| Meta.get_annotation value call_name argument return_target err = err.payload.target Panic.catch Not_Invokable handler=return_target diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java index 00b2444f5654..2d1301804798 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/text/AnyPrettyNode.java @@ -47,7 +47,7 @@ Text doOther(Object object) { @CompilerDirectives.TruffleBoundary private Text consName(AtomConstructor constructor) { - return Text.create(constructor.getDisplayName()); + return Text.create(constructor.getType().getName() + "." + constructor.getName()); } @CompilerDirectives.TruffleBoundary diff --git a/test/Base_Tests/src/Data/Array_Spec.enso b/test/Base_Tests/src/Data/Array_Spec.enso index 74799f5f26fc..d08ad876e342 100644 --- a/test/Base_Tests/src/Data/Array_Spec.enso +++ b/test/Base_Tests/src/Data/Array_Spec.enso @@ -49,6 +49,14 @@ add_specs suite_builder = make_enso_array [] . reduce (+) . should_fail_with (Empty_Error.Error Array) make_enso_array [] . reduce (+) 0 . should_equal 0 + group_builder.specify "should have a well-defined debug-printing method" <| + ## Enso arrays should be coded as Vectors when Enso code is generated. + make_enso_array [] . pretty . should_equal "[]" + make_enso_array [1,2,3] . pretty . should_equal "[1, 2, 3]" + make_enso_array [Nothing] . pretty . should_equal "[Nothing]" + make_enso_array [True, False, 'a'] . pretty . should_equal "[True, False, 'a']" + make_enso_array [Date.new 2022 1 1] . pretty . should_equal "[Date.new 2022 1 1]" + suite_builder.group "Compare functionality with Vector" group_builder-> group_builder.specify "compare methods" <| vector_methods = Meta.meta Vector . methods . sort diff --git a/test/Base_Tests/src/Data/Range_Spec.enso b/test/Base_Tests/src/Data/Range_Spec.enso index 719b0a636dc5..a80a5cd52a0f 100644 --- a/test/Base_Tests/src/Data/Range_Spec.enso +++ b/test/Base_Tests/src/Data/Range_Spec.enso @@ -7,6 +7,7 @@ import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Common.Unsupported_Argument_Types import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -559,6 +560,24 @@ add_specs suite_builder = suite_builder.group "Range" group_builder-> invalid_range . find _->True . should_fail_with Illegal_State invalid_range . contains 0 . should_fail_with Illegal_State + group_builder.specify "should define friendly text representations" <| + range = 1.up_to 100 + range_2 = 0.up_to 10 . with_step 2 + range_3 = 20.down_to 0 . with_step 3 + + range.to_text . should_equal "(Between 1 100 1)" + range_2.to_text . should_equal "(Between 0 10 2)" + range_3.to_text . should_equal "(Between 20 0 -3)" + + range.to_display_text . should_equal "[1 .. 100]" + range_2.to_display_text . should_equal "[0 .. 10 by 2]" + range_3.to_display_text . should_equal "[20 .. 0 by -3]" + + range.pretty . should_equal "Range.new 1 100" + range_2.pretty . should_equal "Range.new 0 10 step=2" + range_3.pretty . should_equal "Range.new 20 0 step=3" + Debug.eval range_3.pretty . should_equal range_3 + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder diff --git a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso index 7c4ebf7efce4..b2b469ae1c61 100644 --- a/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Range_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Empty_Error.Empty_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -194,8 +195,11 @@ add_specs suite_builder = r1.to_text . should_equal '(Date_Range from 2020-02-28 up to 2020-03-02)' r2.to_text . should_equal '(Date_Range from 2020-03-20 down to 2020-02-29 by 7D)' - r1.pretty . should_equal r1.to_text - r2.pretty . should_equal r2.to_text + r1.pretty . should_equal 'Date_Range.new (Date.new 2020 2 28) (Date.new 2020 3 2)' + (Debug.eval r1.pretty) . should_equal r1 + + r2.pretty . should_equal 'Date_Range.new (Date.new 2020 3 20) (Date.new 2020 2 29) (Period.new days=7)' + (Debug.eval r2.pretty) . should_equal r2 r1.to_display_text . should_equal '[2020-02-28 .. 2020-03-02]' r2.to_display_text . should_equal '[2020-03-20 .. 2020-02-29 by -7D]' diff --git a/test/Base_Tests/src/Data/Time/Date_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Spec.enso index 466655891763..31f93695dc8e 100644 --- a/test/Base_Tests/src/Data/Time/Date_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -96,6 +97,11 @@ spec_with suite_builder name create_new_date parse_date pending=Nothing = datetime.date . should_equal date datetime.time_of_day . should_equal time + group_builder.specify "should convert to Enso code" <| + date = create_new_date 2001 12 21 + date.pretty . should_equal "Date.new 2001 12 21" + Debug.eval date.pretty . should_equal date + group_builder.specify "should convert to Json" <| date = create_new_date 2001 12 21 date.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso index b5617f721092..328a2429af92 100644 --- a/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Time_Spec.enso @@ -2,6 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -119,6 +120,18 @@ spec_with suite_builder name create_new_datetime parse_datetime nanoseconds_loss text = create_new_datetime 1970 (zone = Time_Zone.utc) . to_text text . should_equal "1970-01-01 00:00:00Z[UTC]" + group_builder.specify "should convert to Enso code" <| + create_new_datetime 1970 . pretty . should_equal "Date_Time.new 1970 1 1" + create_new_datetime 1923 9 24 . pretty . should_equal "Date_Time.new 1923 9 24" + create_new_datetime 1923 9 24 12 20 44 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 44" + if nanoseconds_loss_in_precision.not then + create_new_datetime 1923 9 24 12 20 nanosecond=500000000 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 millisecond=500" + create_new_datetime 1923 9 24 12 20 nanosecond=500000 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 microsecond=500" + create_new_datetime 1923 9 24 12 20 nanosecond=500 . pretty . should_equal "Date_Time.new 1923 9 24 12 20 nanosecond=500" + + date_time = create_new_datetime 1970 12 21 11 23 45 nanosecond=123456789 zone=Time_Zone.utc + Debug.eval date_time.pretty . should_equal date_time + group_builder.specify "should convert to Json" <| time = create_new_datetime 1970 12 21 (zone = Time_Zone.utc) time.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso b/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso index 32f3a14aa4a3..76d6a0145a07 100644 --- a/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Day_Of_Week_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -11,6 +12,10 @@ add_specs suite_builder = Day_Of_Week.Friday.to_integer . should_equal 6 Day_Of_Week.Saturday.to_integer . should_equal 7 + group_builder.specify "should be able to convert to Enso code" <| + Day_Of_Week.Sunday.pretty . should_equal "Day_Of_Week.Sunday" + Debug.eval Day_Of_Week.Wednesday.pretty . should_equal Day_Of_Week.Wednesday + group_builder.specify "should be able to convert from an Integer" <| Day_Of_Week.from 1 . should_equal Day_Of_Week.Sunday Day_Of_Week.from 4 . should_equal Day_Of_Week.Wednesday diff --git a/test/Base_Tests/src/Data/Time/Period_Spec.enso b/test/Base_Tests/src/Data/Time/Period_Spec.enso index f61c4abfbbb8..a1c841add966 100644 --- a/test/Base_Tests/src/Data/Time/Period_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Period_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Common.Incomparable_Values +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -50,6 +51,18 @@ add_specs suite_builder = Period.new years=2 days=3 . to_display_text . should_equal "2Y 0M 3D" Period.new days=18 . to_display_text . should_equal "18D" + group_builder.specify "should render to Enso code" <| + Period.new . pretty . should_equal "Period.new" + Period.new years=2 . pretty . should_equal "Period.new 2" + Period.new months=24 . pretty . should_equal "Period.new months=24" + Period.new months=4 . pretty . should_equal "Period.new months=4" + Period.new years=1 months=6 . pretty . should_equal "Period.new 1 6" + Period.new years=2 days=3 . pretty . should_equal "Period.new 2 days=3" + Period.new days=18 . pretty . should_equal "Period.new days=18" + + period = Period.new years=2 days=3 + Debug.eval period.pretty . should_equal period + main filter=Nothing = suite = Test.build suite_builder-> add_specs suite_builder diff --git a/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso b/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso index 23cf7ffd8d99..e4a88f896797 100644 --- a/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Time_Of_Day_Spec.enso @@ -3,6 +3,7 @@ import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Time_Error.Time_Error +import Standard.Base.Runtime.Debug from Standard.Test import all @@ -58,6 +59,16 @@ specWith suite_builder name create_new_time parse_time nanoseconds_loss_in_preci text = create_new_time 12 20 44 . to_text text . should_equal "12:20:44" + group_builder.specify "should convert to Enso code" <| + create_new_time 12 20 . pretty . should_equal "Time_Of_Day.new 12 20" + create_new_time 12 20 44 . pretty . should_equal "Time_Of_Day.new 12 20 44" + create_new_time 12 20 0 500000000 . pretty . should_equal "Time_Of_Day.new 12 20 millisecond=500" + create_new_time 12 20 0 500000 . pretty . should_equal "Time_Of_Day.new 12 20 microsecond=500" + if nanoseconds_loss_in_precision.not then create_new_time 12 20 0 500 . pretty . should_equal "Time_Of_Day.new 12 20 nanosecond=500" + + time = create_new_time 12 20 0 500000 + Debug.eval time.pretty . should_equal time + group_builder.specify "should convert to Json" <| time = create_new_time 1 2 3 time.to_json.should_equal <| diff --git a/test/Base_Tests/src/Data/Vector_Spec.enso b/test/Base_Tests/src/Data/Vector_Spec.enso index fca7f5bbf0b2..97fba3a36a9e 100644 --- a/test/Base_Tests/src/Data/Vector_Spec.enso +++ b/test/Base_Tests/src/Data/Vector_Spec.enso @@ -12,6 +12,7 @@ import Standard.Base.Errors.Common.Not_Found import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Unimplemented.Unimplemented +import Standard.Base.Runtime.Debug import Standard.Base.Runtime.Ref.Ref import Standard.Base.Runtime.State from Standard.Base.Panic import Wrapped_Dataflow_Error @@ -1243,6 +1244,11 @@ add_specs suite_builder = [Nothing].pretty.should_equal "[Nothing]" [True, False, 'a'].pretty . should_equal "[True, False, 'a']" [Foo.Value True].pretty . should_equal "[(Foo.Value True)]" + [Date.new 2022 1 1].pretty . should_equal "[Date.new 2022 1 1]" + + mixed = [1, 2, 'a', (Foo.Value True), Date.new 2022 1 1, Nothing] + mixed.pretty . should_equal "[1, 2, 'a', (Foo.Value True), Date.new 2022 1 1, Nothing]" + Debug.eval (mixed.pretty) . should_equal [1, 2, 'a', Foo.Value True, Date.new 2022 1 1, Nothing] type_spec suite_builder "Use Vector as vectors" identity type_spec suite_builder "Use Array as vectors" (v -> v.to_array) diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index b72bb1175090..9a453ceb1522 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -11,7 +11,7 @@ import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Base.Network.HTTP.Request_Error import Standard.Base.Network.Proxy.Proxy import Standard.Base.Runtime.Context -from Standard.Base.Network.HTTP import _resolve_headers, get_follow_redirects, get_proxy, get_timeout, get_version +from Standard.Base.Network.HTTP import _resolve_headers, _get_follow_redirects, _get_proxy, _get_timeout, _get_version from Standard.Test import all from Standard.Test.Execution_Context_Helpers import run_with_and_without_output @@ -66,11 +66,11 @@ add_specs suite_builder = suite_builder.group "HTTP client" pending=pending_has_url group_builder-> group_builder.specify "should create HTTP client with timeout setting" <| http = HTTP.new (timeout = (Duration.new seconds=30)) - (get_timeout http).should_equal (Duration.new seconds=30) + (_get_timeout http).should_equal (Duration.new seconds=30) group_builder.specify "should create HTTP client with follow_redirects setting" <| http = HTTP.new (follow_redirects = False) - (get_follow_redirects http).should_equal False + (_get_follow_redirects http).should_equal False Test.with_retries <| r = http.request (Request.new HTTP_Method.Get base_url_with_slash+"test_redirect") @@ -81,12 +81,12 @@ add_specs suite_builder = group_builder.specify "should create HTTP client with proxy setting" <| proxy_setting = Proxy.Address "example.com" 80 http = HTTP.new (proxy = proxy_setting) - (get_proxy http).should_equal proxy_setting + (_get_proxy http).should_equal proxy_setting group_builder.specify "should create HTTP client with version setting" <| version_setting = HTTP_Version.HTTP_2 http = HTTP.new (version = version_setting) - (get_version http).should_equal version_setting + (_get_version http).should_equal version_setting url_get = base_url_with_slash.if_not_nothing <| base_url_with_slash + "get" suite_builder.group "fetch" pending=pending_has_url group_builder-> diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index f788d10fedc5..025c47abb05f 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Runtime.Debug import project.Util @@ -257,6 +258,20 @@ add_specs suite_builder = r2 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=True) r2.should_fail_with Illegal_Argument + group_builder.specify "should be able to serialize to Enso code" <| + c1 = Column.from_vector "X" [1, 2] Value_Type.Float + c1.pretty . should_equal 'Column.from_vector \'X\' [1.0, 2.0]' + Debug.eval c1.pretty . should_equal c1 + + c2 = Column.from_vector "X" ["a", 42] + c2.pretty . should_equal 'Column.from_vector \'X\' [\'a\', 42]' + + c3 = Column.from_vector "X" ["aaa", "bbb"] + c3.pretty . should_equal 'Column.from_vector \'X\' [\'aaa\', \'bbb\']' + + c4 = Column.from_vector "X" [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30] + c4.pretty . should_equal 'Column.from_vector \'X\' [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30]' + suite_builder.group "Rounding" group_builder-> group_builder.specify "should be able to round a column of decimals" <| Column.from_vector "foo" [1.2, 2.3, 2.5, 3.6] . round . should_equal (Column.from_vector "round([foo])" [1, 2, 3, 4]) diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 50ea98631405..fecd48c0ff0c 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -4,6 +4,7 @@ import Standard.Base.Errors.Common.Incomparable_Values import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Runtime.Debug from Standard.Table import Table, Column, Sort_Column, Aggregate_Column, Blank_Selector, Value_Type from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column, Floating_Point_Equality, Invalid_Value_Type, Row_Count_Mismatch @@ -88,6 +89,23 @@ add_specs suite_builder = r2.at "foo" . to_vector . should_equal [] r2.at "bar" . to_vector . should_equal [] + group_builder.specify "should allow creating Enso code from a Table" <| + r = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]]] + r.pretty . should_equal "Table.new [['foo', [1, 2, 3]], ['bar', [False, True, False]]]" + Debug.eval r.pretty . should_equal r + + r2 = Table.new [["foo", []], ["bar", []]] + r2.pretty . should_equal "Table.new [['foo', []], ['bar', []]]" + Debug.eval r2.pretty . should_equal r2 + + r3 = Table.new [["date", [Date.new 2022 8 27, Date.new 1999 1 1]], ["time", [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34]]] + r3.pretty . should_equal "Table.new [['date', [Date.new 2022 8 27, Date.new 1999 1 1]], ['time', [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34]]]" + Debug.eval r3.pretty . should_equal r3 + + r4 = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]], ["date", [Date.new 2022 8 27, Date.new 1999 1 1, Date.new 2012 1 23]], ["time", [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34, Time_Of_Day.new 12 0]]] + r4.pretty . should_equal "Table.new [['foo', [1, 2, 3]], ['bar', [False, True, False]], ['date', [Date.new 2022 8 27, Date.new 1999 1 1, Date.new 2012 1 23]], ['time', [Time_Of_Day.new 18, Time_Of_Day.new 1 2 34, Time_Of_Day.new 12]]]" + Debug.eval r4.pretty . should_equal r4 + group_builder.specify "should handle error scenarios gracefully" <| Table.new [["X", [1,2,3]], ["Y", [4]]] . should_fail_with Illegal_Argument Table.new [["X", [1]], ["X", [2]]] . should_fail_with Illegal_Argument From c6e87c2a1739ac13cc386deef1cca576c355e838 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Thu, 31 Oct 2024 20:36:10 +1000 Subject: [PATCH 014/286] Optimize asset table rendering (#11382) - Depends on: - #11380 - Some optimizations for re-rendering assets table: - Omit `visibilities` from `state` in favor of passing each `AssetRow`'s `visibility` directly to the row. This minimizes spurious `state` updates. - Pass `id` and `parentId` to `AssetRow` instead of the entire object. This ensures that re-fetches do not force a re-render of the `AssetRow` - we are no longer passing a reference to the object, so we are now comparing by string comparison (which is almost always stable). # Important Notes None --- app/common/package.json | 8 +- app/common/src/services/Backend.ts | 80 ++++-- app/common/src/utilities/data/object.ts | 9 + .../components/dashboard/AssetRow.tsx | 238 +++++++----------- .../dashboard/DatalinkNameColumn.tsx | 23 +- .../dashboard/DirectoryNameColumn.tsx | 43 ++-- .../components/dashboard/FileNameColumn.tsx | 31 +-- .../dashboard/ProjectNameColumn.tsx | 62 ++--- .../components/dashboard/SecretNameColumn.tsx | 16 +- .../dashboard/components/dashboard/column.ts | 7 +- .../dashboard/column/DocsColumn.tsx | 3 +- .../dashboard/column/LabelsColumn.tsx | 43 +--- .../dashboard/column/ModifiedColumn.tsx | 19 +- .../dashboard/column/SharedWithColumn.tsx | 7 +- .../src/dashboard/events/AssetEventType.ts | 1 - app/gui/src/dashboard/events/assetEvent.ts | 7 - app/gui/src/dashboard/hooks/backendHooks.ts | 63 +++-- .../dashboard/layouts/AssetContextMenu.tsx | 68 ++--- app/gui/src/dashboard/layouts/AssetPanel.tsx | 12 +- .../layouts/AssetProjectSessions.tsx | 10 +- .../src/dashboard/layouts/AssetProperties.tsx | 25 +- .../layouts/AssetVersions/AssetVersion.tsx | 14 +- .../layouts/AssetVersions/AssetVersions.tsx | 15 +- app/gui/src/dashboard/layouts/AssetsTable.tsx | 171 +++++++------ .../dashboard/pages/dashboard/Dashboard.tsx | 18 +- .../src/dashboard/services/LocalBackend.ts | 11 +- .../src/dashboard/utilities/AssetTreeNode.ts | 3 - pnpm-lock.yaml | 22 +- 28 files changed, 471 insertions(+), 558 deletions(-) diff --git a/app/common/package.json b/app/common/package.json index 2ab252dbb0b6..d3bb6c880eec 100644 --- a/app/common/package.json +++ b/app/common/package.json @@ -34,11 +34,11 @@ "@tanstack/vue-query": ">= 5.54.0 < 5.56.0" }, "dependencies": { - "idb-keyval": "^6.2.1", - "react": "^18.3.1", "@tanstack/query-persist-client-core": "^5.54.0", "@tanstack/vue-query": ">= 5.54.0 < 5.56.0", - "vue": "^3.5.2", - "vitest": "^1.3.1" + "idb-keyval": "^6.2.1", + "react": "^18.3.1", + "vitest": "^1.3.1", + "vue": "^3.5.2" } } diff --git a/app/common/src/services/Backend.ts b/app/common/src/services/Backend.ts index b690abcc8c0a..d2d575b80233 100644 --- a/app/common/src/services/Backend.ts +++ b/app/common/src/services/Backend.ts @@ -762,8 +762,9 @@ export const ASSET_TYPE_ORDER: Readonly> = { * Metadata uniquely identifying a directory entry. * These can be Projects, Files, Secrets, or other directories. */ -export interface BaseAsset { - readonly id: AssetId +export interface Asset { + readonly type: Type + readonly id: IdType[Type] readonly title: string readonly modifiedAt: dateTime.Rfc3339DateTime /** @@ -774,16 +775,10 @@ export interface BaseAsset { readonly permissions: readonly AssetPermission[] | null readonly labels: readonly LabelName[] | null readonly description: string | null -} - -/** - * Metadata uniquely identifying a directory entry. - * These can be Projects, Files, Secrets, or other directories. - */ -export interface Asset extends BaseAsset { - readonly type: Type - readonly id: IdType[Type] readonly projectState: Type extends AssetType.project ? ProjectStateType : null + readonly extension: Type extends AssetType.file ? string : null + readonly parentsPath: string + readonly virtualParentsPath: string } /** A convenience alias for {@link Asset}<{@link AssetType.directory}>. */ @@ -823,11 +818,19 @@ export function createRootDirectoryAsset(directoryId: DirectoryId): DirectoryAss parentId: DirectoryId(''), permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Extract the file extension from a file name. */ +function fileExtension(fileNameOrPath: string) { + return fileNameOrPath.match(/[.]([^.]+?)$/)?.[1] ?? '' +} + /** Creates a {@link FileAsset} using the given values. */ export function createPlaceholderFileAsset( title: string, @@ -842,8 +845,11 @@ export function createPlaceholderFileAsset( permissions: assetPermissions, modifiedAt: dateTime.toRfc3339(new Date()), projectState: null, + extension: fileExtension(title), labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } @@ -868,8 +874,11 @@ export function createPlaceholderProjectAsset( ...(organization != null ? { openedBy: organization.email } : {}), ...(path != null ? { path } : {}), }, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } @@ -881,16 +890,24 @@ export function createSpecialLoadingAsset(directoryId: DirectoryId): SpecialLoad return { type: AssetType.specialLoading, title: '', - id: LoadingAssetId(uniqueString.uniqueString()), + id: LoadingAssetId(`${AssetType.specialLoading}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link LoadingAssetId}. */ +export function isLoadingAssetId(id: string): id is LoadingAssetId { + return id.startsWith(`${AssetType.specialLoading}-`) +} + /** * Creates a {@link SpecialEmptyAsset}, with all irrelevant fields initialized to default * values. @@ -899,16 +916,24 @@ export function createSpecialEmptyAsset(directoryId: DirectoryId): SpecialEmptyA return { type: AssetType.specialEmpty, title: '', - id: EmptyAssetId(uniqueString.uniqueString()), + id: EmptyAssetId(`${AssetType.specialEmpty}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link EmptyAssetId}. */ +export function isEmptyAssetId(id: string): id is EmptyAssetId { + return id.startsWith(`${AssetType.specialEmpty}-`) +} + /** * Creates a {@link SpecialErrorAsset}, with all irrelevant fields initialized to default * values. @@ -917,16 +942,24 @@ export function createSpecialErrorAsset(directoryId: DirectoryId): SpecialErrorA return { type: AssetType.specialError, title: '', - id: ErrorAssetId(uniqueString.uniqueString()), + id: ErrorAssetId(`${AssetType.specialError}-${uniqueString.uniqueString()}`), modifiedAt: dateTime.toRfc3339(new Date()), parentId: directoryId, permissions: [], projectState: null, + extension: null, labels: [], description: null, + parentsPath: '', + virtualParentsPath: '', } } +/** Whether a given {@link string} is an {@link ErrorAssetId}. */ +export function isErrorAssetId(id: string): id is ErrorAssetId { + return id.startsWith(`${AssetType.specialError}-`) +} + /** Any object with a `type` field matching the given `AssetType`. */ interface HasType { readonly type: Type @@ -1386,6 +1419,25 @@ export function extractProjectExtension(name: string) { return { basename: basename ?? name, extension: extension ?? '' } } +/** Check whether a pending rename is valid. */ +export function isNewTitleValid( + item: AnyAsset, + newTitle: string, + siblings?: readonly AnyAsset[] | null, +) { + siblings ??= [] + return ( + newTitle !== '' && + newTitle !== item.title && + siblings.every(sibling => { + const isSelf = sibling.id === item.id + const hasSameType = sibling.type === item.type + const hasSameTitle = sibling.title === newTitle + return !(!isSelf && hasSameType && hasSameTitle) + }) + ) +} + /** Network error class. */ export class NetworkError extends Error { /** diff --git a/app/common/src/utilities/data/object.ts b/app/common/src/utilities/data/object.ts index ac8934358aa6..f8010da8aef6 100644 --- a/app/common/src/utilities/data/object.ts +++ b/app/common/src/utilities/data/object.ts @@ -57,6 +57,15 @@ export function unsafeMutable(object: T): { -readonly [K in ke // === unsafeEntries === // ===================== +/** + * Return the entries of an object. UNSAFE only when it is possible for an object to have + * extra keys. + */ +export function unsafeKeys(object: T): readonly (keyof T)[] { + // @ts-expect-error This is intentionally a wrapper function with a different type. + return Object.keys(object) +} + /** * Return the entries of an object. UNSAFE only when it is possible for an object to have * extra keys. diff --git a/app/gui/src/dashboard/components/dashboard/AssetRow.tsx b/app/gui/src/dashboard/components/dashboard/AssetRow.tsx index 099779de379f..623c00160157 100644 --- a/app/gui/src/dashboard/components/dashboard/AssetRow.tsx +++ b/app/gui/src/dashboard/components/dashboard/AssetRow.tsx @@ -8,7 +8,6 @@ import BlankIcon from '#/assets/blank.svg' import * as dragAndDropHooks from '#/hooks/dragAndDropHooks' import { useEventCallback } from '#/hooks/eventCallbackHooks' -import * as setAssetHooks from '#/hooks/setAssetHooks' import { useDriveStore, useSetSelectedKeys } from '#/providers/DriveProvider' import * as modalProvider from '#/providers/ModalProvider' @@ -30,10 +29,12 @@ import * as localBackend from '#/services/LocalBackend' import * as backendModule from '#/services/Backend' import { Text } from '#/components/AriaComponents' +import type { AssetEvent } from '#/events/assetEvent' import { useCutAndPaste } from '#/events/assetListEvent' import { backendMutationOptions, backendQueryOptions, + useAssetPassiveListenerStrict, useBackendMutationState, } from '#/hooks/backendHooks' import { createGetProjectDetailsQuery } from '#/hooks/projectHooks' @@ -69,9 +70,8 @@ const DRAG_EXPAND_DELAY_MS = 500 /** Common properties for state and setters passed to event handlers on an {@link AssetRow}. */ export interface AssetRowInnerProps { - readonly key: backendModule.AssetId - readonly item: assetTreeNode.AnyAssetTreeNode - readonly setItem: React.Dispatch> + readonly asset: backendModule.AnyAsset + readonly path: string readonly state: assetsTable.AssetsTableState readonly rowState: assetsTable.AssetRowState readonly setRowState: React.Dispatch> @@ -80,64 +80,57 @@ export interface AssetRowInnerProps { /** Props for an {@link AssetRow}. */ export interface AssetRowProps { readonly isOpened: boolean - readonly item: assetTreeNode.AnyAssetTreeNode + readonly visibility: Visibility | undefined + readonly id: backendModule.AssetId + readonly parentId: backendModule.DirectoryId + readonly path: string + readonly initialAssetEvents: readonly AssetEvent[] | null + readonly depth: number readonly state: assetsTable.AssetsTableState readonly hidden: boolean readonly columns: columnUtils.Column[] readonly isKeyboardSelected: boolean - readonly grabKeyboardFocus: (item: assetTreeNode.AnyAssetTreeNode) => void + readonly grabKeyboardFocus: (item: backendModule.AnyAsset) => void readonly onClick: (props: AssetRowInnerProps, event: React.MouseEvent) => void - readonly select: (item: assetTreeNode.AnyAssetTreeNode) => void + readonly select: (item: backendModule.AnyAsset) => void readonly onDragStart?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragOver?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragLeave?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDragEnd?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void readonly onDrop?: ( event: React.DragEvent, - item: assetTreeNode.AnyAssetTreeNode, + item: backendModule.AnyAsset, ) => void - readonly updateAssetRef: React.RefObject< - Record void> - > } /** A row containing an {@link backendModule.AnyAsset}. */ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { - const { isKeyboardSelected, isOpened, select, state, columns, onClick } = props - const { item: rawItem, hidden: hiddenRaw, updateAssetRef, grabKeyboardFocus } = props - const { - nodeMap, - doToggleDirectoryExpansion, - doCopy, - doCut, - doPaste, - doDelete: doDeleteRaw, - doRestore, - doMove, - category, - } = state - const { scrollContainerRef, rootDirectoryId, backend } = state - const { visibilities } = state - - const [item, setItem] = React.useState(rawItem) + const { id, parentId, isKeyboardSelected, isOpened, select, state, columns, onClick } = props + const { path, hidden: hiddenRaw, grabKeyboardFocus, visibility: visibilityRaw, depth } = props + const { initialAssetEvents } = props + const { nodeMap, doCopy, doCut, doPaste, doDelete: doDeleteRaw } = state + const { doRestore, doMove, category, scrollContainerRef, rootDirectoryId, backend } = state + const { doToggleDirectoryExpansion } = state + + const asset = useAssetPassiveListenerStrict(backend.type, id, parentId, category) const driveStore = useDriveStore() const queryClient = useQueryClient() const { user } = useFullUserSession() const setSelectedKeys = useSetSelectedKeys() const selected = useStore(driveStore, ({ visuallySelectedKeys, selectedKeys }) => - (visuallySelectedKeys ?? selectedKeys).has(item.key), + (visuallySelectedKeys ?? selectedKeys).has(id), ) const isSoleSelected = useStore( driveStore, @@ -157,7 +150,6 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const rootRef = React.useRef(null) const dragOverTimeoutHandle = React.useRef(null) const grabKeyboardFocusRef = useSyncRef(grabKeyboardFocus) - const asset = item.item const [innerRowState, setRowState] = React.useState( assetRowUtils.INITIAL_ROW_STATE, ) @@ -185,11 +177,13 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const isCloud = isCloudCategory(category) const { data: projectState } = useQuery({ - // This is SAFE, as `isOpened` is only true for projects. - // eslint-disable-next-line no-restricted-syntax - ...createGetProjectDetailsQuery.createPassiveListener(item.item.id as backendModule.ProjectId), + ...createGetProjectDetailsQuery.createPassiveListener( + // This is SAFE, as `isOpened` is only true for projects. + // eslint-disable-next-line no-restricted-syntax + asset.id as backendModule.ProjectId, + ), select: (data) => data?.state.type, - enabled: item.type === backendModule.AssetType.project, + enabled: asset.type === backendModule.AssetType.project, }) const toastAndLog = useToastAndLog() @@ -197,9 +191,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { const createPermissionMutation = useMutation(backendMutationOptions(backend, 'createPermission')) const associateTagMutation = useMutation(backendMutationOptions(backend, 'associateTag')) - const outerVisibility = visibilities.get(item.key) const insertionVisibility = useStore(driveStore, (driveState) => - driveState.pasteData?.type === 'move' && driveState.pasteData.data.ids.has(item.key) ? + driveState.pasteData?.type === 'move' && driveState.pasteData.data.ids.has(id) ? Visibility.faded : Visibility.visible, ) @@ -210,27 +203,15 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { createPermissionVariables.actorsIds[0] === user.userId const visibility = isRemovingSelf ? Visibility.hidden - : outerVisibility === Visibility.visible ? insertionVisibility - : outerVisibility ?? insertionVisibility + : visibilityRaw === Visibility.visible ? insertionVisibility + : visibilityRaw ?? insertionVisibility const hidden = isDeleting || isRestoring || hiddenRaw || visibility === Visibility.hidden const setSelected = useEventCallback((newSelected: boolean) => { const { selectedKeys } = driveStore.getState() - setSelectedKeys(set.withPresence(selectedKeys, item.key, newSelected)) + setSelectedKeys(set.withPresence(selectedKeys, id, newSelected)) }) - React.useEffect(() => { - setItem(rawItem) - }, [rawItem]) - - const rawItemRef = useSyncRef(rawItem) - React.useEffect(() => { - // Mutation is HIGHLY INADVISABLE in React, however it is useful here as we want to update the - // parent's state while avoiding re-rendering the parent. - rawItemRef.current.item = asset - }, [asset, rawItemRef]) - const setAsset = setAssetHooks.useSetAsset(asset, setItem) - React.useEffect(() => { if (selected && insertionVisibility !== Visibility.visible) { setSelected(false) @@ -240,30 +221,15 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { React.useEffect(() => { if (isKeyboardSelected) { rootRef.current?.focus() - grabKeyboardFocusRef.current(item) - } - }, [grabKeyboardFocusRef, isKeyboardSelected, item]) - - React.useImperativeHandle(updateAssetRef, () => ({ setAsset, item })) - - if (updateAssetRef.current) { - updateAssetRef.current[item.item.id] = setAsset - } - - React.useEffect(() => { - return () => { - if (updateAssetRef.current) { - // eslint-disable-next-line react-hooks/exhaustive-deps, @typescript-eslint/no-dynamic-delete - delete updateAssetRef.current[item.item.id] - } + grabKeyboardFocusRef.current(asset) } - }, [item.item.id, updateAssetRef]) + }, [grabKeyboardFocusRef, isKeyboardSelected, asset]) const doDelete = React.useCallback( (forever = false) => { - void doDeleteRaw(item.item, forever) + void doDeleteRaw(asset, forever) }, - [doDeleteRaw, item.item], + [doDeleteRaw, asset], ) const clearDragState = React.useCallback(() => { @@ -276,8 +242,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { }, []) const onDragOver = (event: React.DragEvent) => { - const directoryKey = - item.item.type === backendModule.AssetType.directory ? item.key : item.directoryKey + const directoryKey = asset.type === backendModule.AssetType.directory ? id : parentId const payload = drag.ASSET_ROWS.lookup(event) const isPayloadMatch = payload != null && payload.every((innerItem) => innerItem.key !== directoryKey) @@ -287,8 +252,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { } else { if (nodeMap.current !== nodeParentKeysRef.current?.nodeMap.deref()) { const parentKeys = new Map( - Array.from(nodeMap.current.entries()).map(([id, otherAsset]) => [ - id, + Array.from(nodeMap.current.entries()).map(([otherId, otherAsset]) => [ + otherId, otherAsset.directoryKey, ]), ) @@ -303,7 +268,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { return true } else { // Assume user path; check permissions - const permission = permissions.tryFindSelfPermission(user, item.item.permissions) + const permission = permissions.tryFindSelfPermission(user, asset.permissions) return ( permission != null && permissions.canPermissionModifyDirectoryContents(permission.permission) @@ -314,7 +279,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { })() if ((isPayloadMatch && canPaste) || event.dataTransfer.types.includes('Files')) { event.preventDefault() - if (item.item.type === backendModule.AssetType.directory && state.category.type !== 'trash') { + if (asset.type === backendModule.AssetType.directory && state.category.type !== 'trash') { setIsDraggedOver(true) } } @@ -323,26 +288,26 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { eventListProvider.useAssetEventListener(async (event) => { switch (event.type) { case AssetEventType.move: { - if (event.ids.has(item.key)) { - await doMove(event.newParentKey, item.item) + if (event.ids.has(id)) { + await doMove(event.newParentKey, asset) } break } case AssetEventType.delete: { - if (event.ids.has(item.key)) { + if (event.ids.has(id)) { doDelete(false) } break } case AssetEventType.deleteForever: { - if (event.ids.has(item.key)) { + if (event.ids.has(id)) { doDelete(true) } break } case AssetEventType.restore: { - if (event.ids.has(item.key)) { - await doRestore(item.item) + if (event.ids.has(id)) { + await doRestore(asset) } break } @@ -436,7 +401,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { actorsIds: [user.userId], }, ]) - dispatchAssetListEvent({ type: AssetListEventType.delete, key: item.key }) + dispatchAssetListEvent({ type: AssetListEventType.delete, key: id }) } catch (error) { toastAndLog(null, error) } @@ -444,7 +409,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { break } case AssetEventType.temporarilyAddLabels: { - const labels = event.ids.has(item.key) ? event.labelNames : set.EMPTY_SET + const labels = event.ids.has(id) ? event.labelNames : set.EMPTY_SET setRowState((oldRowState) => ( oldRowState.temporarilyAddedLabels === labels && @@ -459,7 +424,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { break } case AssetEventType.temporarilyRemoveLabels: { - const labels = event.ids.has(item.key) ? event.labelNames : set.EMPTY_SET + const labels = event.ids.has(id) ? event.labelNames : set.EMPTY_SET setRowState((oldRowState) => ( oldRowState.temporarilyAddedLabels === set.EMPTY_SET && @@ -481,18 +446,16 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) const labels = asset.labels if ( - event.ids.has(item.key) && + event.ids.has(id) && (labels == null || [...event.labelNames].some((label) => !labels.includes(label))) ) { const newLabels = [ ...(labels ?? []), ...[...event.labelNames].filter((label) => labels?.includes(label) !== true), ] - setAsset(object.merger({ labels: newLabels })) try { await associateTagMutation.mutateAsync([asset.id, newLabels, asset.title]) } catch (error) { - setAsset(object.merger({ labels })) toastAndLog(null, error) } } @@ -506,47 +469,24 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) const labels = asset.labels if ( - event.ids.has(item.key) && + event.ids.has(id) && labels != null && [...event.labelNames].some((label) => labels.includes(label)) ) { const newLabels = labels.filter((label) => !event.labelNames.has(label)) - setAsset(object.merger({ labels: newLabels })) try { await associateTagMutation.mutateAsync([asset.id, newLabels, asset.title]) } catch (error) { - setAsset(object.merger({ labels })) toastAndLog(null, error) } } break } - case AssetEventType.deleteLabel: { - setAsset((oldAsset) => { - const oldLabels = oldAsset.labels ?? [] - const labels: backendModule.LabelName[] = [] - - for (const label of oldLabels) { - if (label !== event.labelName) { - labels.push(label) - } - } - - return oldLabels.length !== labels.length ? object.merge(oldAsset, { labels }) : oldAsset - }) - break - } - case AssetEventType.setItem: { - if (asset.id === event.id) { - setAsset(event.valueOrUpdater) - } - break - } default: { return } } - }, item.initialAssetEvents) + }, initialAssetEvents) switch (asset.type) { case backendModule.AssetType.directory: @@ -555,9 +495,8 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { case backendModule.AssetType.datalink: case backendModule.AssetType.secret: { const innerProps: AssetRowInnerProps = { - key: item.key, - item, - setItem, + asset, + path, state, rowState, setRowState, @@ -608,7 +547,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { unsetModal() onClick(innerProps, event) if ( - item.type === backendModule.AssetType.directory && + asset.type === backendModule.AssetType.directory && eventModule.isDoubleClick(event) && !rowState.isEditingName ) { @@ -617,7 +556,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { window.setTimeout(() => { setSelected(false) }) - doToggleDirectoryExpansion(item.item.id, item.key) + doToggleDirectoryExpansion(asset.id, asset.id) } }} onContextMenu={(event) => { @@ -625,7 +564,7 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { event.preventDefault() event.stopPropagation() if (!selected) { - select(item) + select(asset) } setModal( { if (dragOverTimeoutHandle.current != null) { window.clearTimeout(dragOverTimeoutHandle.current) } - if (item.type === backendModule.AssetType.directory) { + if (asset.type === backendModule.AssetType.directory) { dragOverTimeoutHandle.current = window.setTimeout(() => { - doToggleDirectoryExpansion(item.item.id, item.key, true) + doToggleDirectoryExpansion(asset.id, asset.id, true) }, DRAG_EXPAND_DELAY_MS) } // Required because `dragover` does not fire on `mouseenter`. - props.onDragOver?.(event, item) + props.onDragOver?.(event, asset) onDragOver(event) }} onDragOver={(event) => { if (state.category.type === 'trash') { event.dataTransfer.dropEffect = 'none' } - props.onDragOver?.(event, item) + props.onDragOver?.(event, asset) onDragOver(event) }} onDragEnd={(event) => { clearDragState() - props.onDragEnd?.(event, item) + props.onDragEnd?.(event, asset) }} onDragLeave={(event) => { if ( @@ -694,30 +633,28 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { ) { clearDragState() } - props.onDragLeave?.(event, item) + props.onDragLeave?.(event, asset) }} onDrop={(event) => { if (state.category.type !== 'trash') { - props.onDrop?.(event, item) + props.onDrop?.(event, asset) clearDragState() - const [directoryKey, directoryId] = - item.type === backendModule.AssetType.directory ? - [item.key, item.item.id] - : [item.directoryKey, item.directoryId] + const directoryId = + asset.type === backendModule.AssetType.directory ? asset.id : parentId const payload = drag.ASSET_ROWS.lookup(event) if ( payload != null && - payload.every((innerItem) => innerItem.key !== directoryKey) + payload.every((innerItem) => innerItem.key !== directoryId) ) { event.preventDefault() event.stopPropagation() unsetModal() - doToggleDirectoryExpansion(directoryId, directoryKey, true) + doToggleDirectoryExpansion(directoryId, directoryId, true) const ids = payload .filter((payloadItem) => payloadItem.asset.parentId !== directoryId) .map((dragItem) => dragItem.key) cutAndPaste( - directoryKey, + directoryId, directoryId, { backendType: backend.type, ids: new Set(ids), category }, nodeMap.current, @@ -725,10 +662,10 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { } else if (event.dataTransfer.types.includes('Files')) { event.preventDefault() event.stopPropagation() - doToggleDirectoryExpansion(directoryId, directoryKey, true) + doToggleDirectoryExpansion(directoryId, directoryId, true) dispatchAssetListEvent({ type: AssetListEventType.uploadFiles, - parentKey: directoryKey, + parentKey: directoryId, parentId: directoryId, files: Array.from(event.dataTransfer.files), }) @@ -741,11 +678,11 @@ export const AssetRow = React.memo(function AssetRow(props: AssetRowProps) { return (