Skip to content

Commit

Permalink
feat: require safe-ds version 0.22.0 (#98)
Browse files Browse the repository at this point in the history
### Summary of Changes

* Require `safe-ds` version 0.22.0.
* Handle new `TabularDataset` (former `TaggedTable`).

---------

Co-authored-by: megalinter-bot <[email protected]>
  • Loading branch information
lars-reimann and megalinter-bot authored May 1, 2024
1 parent e19a315 commit 6f7c934
Show file tree
Hide file tree
Showing 9 changed files with 190 additions and 435 deletions.
516 changes: 102 additions & 414 deletions poetry.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ safe-ds-runner = "safeds_runner.main:main"

[tool.poetry.dependencies]
python = "^3.11,<3.13"
safe-ds = ">=0.20,<0.22"
safe-ds = ">=0.22,<0.23"
hypercorn = "^0.16.0"
psutil = "^5.9.8"
pydantic = "^2.7.0"
Expand Down
9 changes: 7 additions & 2 deletions src/safeds_runner/server/_json_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,12 @@ def default(self, o: Any) -> Any:
"""
# Moving these imports to the top drastically increases startup time
from safeds.data.image.containers import Image
from safeds.data.labeled.containers import TabularDataset
from safeds.data.tabular.containers import Table

if isinstance(o, TabularDataset):
o = o.to_table()

if isinstance(o, Table):
dict_with_nan_infinity = o.to_dict()
# Convert NaN / Infinity to None, as the JSON encoder generates invalid JSON otherwise
Expand All @@ -43,7 +47,8 @@ def default(self, o: Any) -> Any:
]
for key in dict_with_nan_infinity
}
if isinstance(o, Image):
elif isinstance(o, Image):
# Send images together with their format, by default images are encoded only as PNG
return {"format": "png", "bytes": str(base64.encodebytes(o._repr_png_()), "utf-8")}
return json.JSONEncoder.default(self, o)
else:
return json.JSONEncoder.default(self, o)
4 changes: 4 additions & 0 deletions src/safeds_runner/server/_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ def create_placeholder_value(placeholder_query: QueryMessageData, type_: str, va
message_data:
Message data of "placeholder_value" messages.
"""
import safeds.data.labeled.containers
import safeds.data.tabular.containers

message: dict[str, Any] = {"name": placeholder_query.name, "type": type_}
Expand All @@ -237,6 +238,9 @@ def create_placeholder_value(placeholder_query: QueryMessageData, type_: str, va
end_index = (
(start_index + max(placeholder_query.window.size, 0)) if placeholder_query.window.size is not None else None
)
if isinstance(value, safeds.data.labeled.containers.TabularDataset):
value = value.to_table()

if isinstance(value, safeds.data.tabular.containers.Table) and (
placeholder_query.window.begin is not None or placeholder_query.window.size is not None
):
Expand Down
9 changes: 6 additions & 3 deletions src/safeds_runner/server/_pipeline_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,13 @@
import logging
import os
import runpy
import traceback
import typing
from functools import cached_property
from pathlib import Path
from typing import Any

import stack_data
from safeds.data.labeled.containers import TabularDataset

from safeds_runner.memoization._memoization_map import MemoizationMap
from safeds_runner.memoization._memoization_utils import (
Expand Down Expand Up @@ -432,8 +433,8 @@ def get_backtrace_info(error: BaseException) -> list[dict[str, Any]]:
List containing file and line information for each stack frame.
"""
backtrace_list = []
for frame in stack_data.core.FrameInfo.stack_data(error.__traceback__):
backtrace_list.append({"file": frame.filename, "line": int(frame.lineno)})
for frame in traceback.extract_tb(error.__traceback__):
backtrace_list.append({"file": frame.filename, "line": frame.lineno})
return backtrace_list


Expand All @@ -460,6 +461,8 @@ def _get_placeholder_type(value: Any) -> str:
return "Int"
case str():
return "String"
case TabularDataset():
return "Table"
case object():
object_name = type(value).__name__
match object_name:
Expand Down
35 changes: 31 additions & 4 deletions tests/safeds_runner/memoization/test_memoization_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import numpy as np
import pytest
from safeds.data.image.containers import Image
from safeds.data.labeled.containers import TabularDataset
from safeds.data.tabular.containers import Table
from safeds_runner.memoization._memoization_utils import (
ExplicitIdentityWrapper,
Expand Down Expand Up @@ -52,6 +53,7 @@ def test_is_not_primitive(value: Any, primitive: bool) -> None:
(None, False),
("ab", False),
(object(), False),
(TabularDataset({"a": [1], "b": [2]}, "a"), True),
(Table(), True),
(
Image.from_bytes(
Expand All @@ -69,6 +71,7 @@ def test_is_not_primitive(value: Any, primitive: bool) -> None:
"value_none",
"value_string",
"value_object",
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand All @@ -80,14 +83,15 @@ def test_is_deterministically_hashable(value: Any, deterministically_hashable: b
@pytest.mark.parametrize(
argnames="value",
argvalues=[
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
Image.from_bytes(
base64.b64decode(
"iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAD0lEQVQIW2NkQAOMpAsAAADuAAVDMQ2mAAAAAElFTkSuQmCC",
),
),
],
ids=["value_table_plain", "value_image_plain"],
ids=["value_tabular_dataset_plain", "value_table_plain", "value_image_plain"],
)
def test_has_explicit_identity(value: Any) -> None:
assert not _has_explicit_identity(value)
Expand All @@ -98,14 +102,15 @@ def test_has_explicit_identity(value: Any) -> None:
@pytest.mark.parametrize(
argnames="value",
argvalues=[
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
Image.from_bytes(
base64.b64decode(
"iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAD0lEQVQIW2NkQAOMpAsAAADuAAVDMQ2mAAAAAElFTkSuQmCC",
),
),
],
ids=["value_table_plain", "value_image_plain"],
ids=["value_tabular_dataset_plain", "value_table_plain", "value_image_plain"],
)
def test_explicit_identity_deterministic_hash(value: Any) -> None:
assert not _has_explicit_identity(value)
Expand All @@ -117,14 +122,15 @@ def test_explicit_identity_deterministic_hash(value: Any) -> None:
@pytest.mark.parametrize(
argnames="value",
argvalues=[
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
Image.from_bytes(
base64.b64decode(
"iVBORw0KGgoAAAANSUhEUgAAAAQAAAAECAYAAACp8Z5+AAAAD0lEQVQIW2NkQAOMpAsAAADuAAVDMQ2mAAAAAElFTkSuQmCC",
),
),
],
ids=["value_table_plain", "value_image_plain"],
ids=["value_tabular_dataset_plain", "value_table_plain", "value_image_plain"],
)
def test_explicit_identity_shared_memory(value: Any) -> None:
_shared_memory_serialize_and_assign(value)
Expand All @@ -134,6 +140,7 @@ def test_explicit_identity_shared_memory(value: Any) -> None:
@pytest.mark.parametrize(
argnames="value,hashable,exception",
argvalues=[
(TabularDataset({"a": [1], "b": [2]}, "a"), True, None),
(Table(), True, None),
(
Image.from_bytes(
Expand All @@ -149,6 +156,7 @@ def test_explicit_identity_shared_memory(value: Any) -> None:
(lambda a, b: a + b, False, pickle.PicklingError),
],
ids=[
"value_tabular_dataset_hashable",
"value_table_hashable",
"value_image_hashable",
"value_dict_unhashable",
Expand All @@ -175,6 +183,7 @@ def test_make_hashable_non_wrapper(value: Any, hashable: bool, exception: type[B
@pytest.mark.parametrize(
argnames="value,wrapper",
argvalues=[
(TabularDataset({"a": [1], "b": [2]}, "a"), True),
(Table(), True),
(
Image.from_bytes(
Expand All @@ -186,6 +195,7 @@ def test_make_hashable_non_wrapper(value: Any, hashable: bool, exception: type[B
),
],
ids=[
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand Down Expand Up @@ -238,13 +248,14 @@ def test_memory_usage(value: Any, expected_size: int) -> None:
1,
[1, 2, 3],
(1, 2, 3),
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
(Table(), Table()),
{"a": Table()},
{"a", "b", Table()},
frozenset({"a", "b", Table()}),
],
ids=["int", "list", "tuple", "table", "tuple_table", "dict", "set", "frozenset"],
ids=["int", "list", "tuple", "tabular_dataset", "table", "tuple_table", "dict", "set", "frozenset"],
)
def test_wrap_value_to_shared_memory(value: Any) -> None:
def _delete_unpackvalue_field(wrapped_object: Any) -> None:
Expand Down Expand Up @@ -307,6 +318,7 @@ def test_wrap_value_to_shared_memory_non_deterministic(value: Any) -> None:
1,
[1, 2, 3],
(1, 2, 3),
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
(Table(), Table()),
{"a": Table()},
Expand All @@ -319,6 +331,7 @@ def test_wrap_value_to_shared_memory_non_deterministic(value: Any) -> None:
"int",
"list",
"tuple",
"tabular_dataset",
"table",
"tuple_table",
"dict",
Expand Down Expand Up @@ -355,6 +368,7 @@ def test_serialize_value_to_shared_memory_non_lazy(value: Any) -> None:
@pytest.mark.parametrize(
argnames="value",
argvalues=[
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
Image.from_bytes(
base64.b64decode(
Expand All @@ -363,6 +377,7 @@ def test_serialize_value_to_shared_memory_non_lazy(value: Any) -> None:
),
],
ids=[
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand All @@ -387,6 +402,10 @@ def test_compare_wrapper_to_lazy(value: Any) -> None:
@pytest.mark.parametrize(
argnames="value1,value2",
argvalues=[
(
TabularDataset({"a": [1], "b": [2]}, "a"),
TabularDataset({"a": [1], "b": [2]}, "a"),
),
(Table(), Table()),
(
Image.from_bytes(
Expand All @@ -402,6 +421,7 @@ def test_compare_wrapper_to_lazy(value: Any) -> None:
),
],
ids=[
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand Down Expand Up @@ -449,6 +469,10 @@ def test_compare_wrapper_to_lazy_multi(value1: Any, value2: Any) -> None:
@pytest.mark.parametrize(
argnames="value1,value2",
argvalues=[
(
TabularDataset({"a": [1], "b": [2]}, "a"),
TabularDataset({"a": [1], "b": [2]}, "a"),
),
(Table(), Table()),
(
Image.from_bytes(
Expand All @@ -464,6 +488,7 @@ def test_compare_wrapper_to_lazy_multi(value1: Any, value2: Any) -> None:
),
],
ids=[
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand All @@ -481,6 +506,7 @@ def test_wrapper_hash(value1: Any, value2: Any) -> None:
@pytest.mark.parametrize(
argnames="value",
argvalues=[
TabularDataset({"a": [1], "b": [2]}, "a"),
Table(),
Image.from_bytes(
base64.b64decode(
Expand All @@ -489,6 +515,7 @@ def test_wrapper_hash(value1: Any, value2: Any) -> None:
),
],
ids=[
"value_tabular_dataset",
"value_table",
"value_image",
],
Expand Down
12 changes: 10 additions & 2 deletions tests/safeds_runner/server/test_json_encoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import pytest
from safeds.data.image.containers import Image
from safeds.data.labeled.containers import TabularDataset
from safeds.data.tabular.containers import Table
from safeds_runner.server._json_encoder import SafeDsEncoder

Expand All @@ -15,7 +16,14 @@
argnames="data,expected_string",
argvalues=[
(
Table.from_dict({"a": [1, 2], "b": [3.2, 4.0], "c": [math.nan, 5.6], "d": [5, -6]}),
TabularDataset(
{"a": [1, 2], "b": [3.2, 4.0], "c": [math.nan, 5.6], "d": [5, -6]},
target_name="d",
),
'{"a": [1, 2], "b": [3.2, 4.0], "c": [null, 5.6], "d": [5, -6]}',
),
(
Table({"a": [1, 2], "b": [3.2, 4.0], "c": [math.nan, 5.6], "d": [5, -6]}),
'{"a": [1, 2], "b": [3.2, 4.0], "c": [null, 5.6], "d": [5, -6]}',
),
(
Expand All @@ -30,7 +38,7 @@
),
),
],
ids=["encode_table", "encode_image_png"],
ids=["encode_tabular_dataset", "encode_table", "encode_image_png"],
)
def test_encoding_custom_types(data: Any, expected_string: str) -> None:
assert json.dumps(data, cls=SafeDsEncoder) == expected_string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Any

import pytest
from safeds.data.labeled.containers import TabularDataset
from safeds_runner.server._pipeline_manager import _get_placeholder_type


Expand All @@ -26,6 +27,7 @@
(object(), "object"),
(None, "Null"),
(lambda x: x + 1, "Callable"),
(TabularDataset({"a": [1], "b": [2]}, "a"), "Table"),
],
ids=[
"boolean_true",
Expand All @@ -45,6 +47,7 @@
"object",
"null",
"callable",
"tabular_dataset",
],
)
def test_should_placeholder_type_match_safeds_dsl_placeholder(value: Any, type_: str) -> None:
Expand Down
Loading

0 comments on commit 6f7c934

Please sign in to comment.