From 54dc1640093a8287fd1ff40304eab1b6a4c30b0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Andr=C3=A9?= <88906996+joaoandre-avaiga@users.noreply.github.com> Date: Mon, 25 Nov 2024 19:21:21 -0300 Subject: [PATCH] fix: make sure path is in unix format (#2267) * fix: make sure path is in unix format * fix: add normalize_path to utils --- taipy/core/_entity/_properties.py | 3 +++ taipy/core/common/_utils.py | 5 +++++ taipy/core/config/data_node_config.py | 5 ++++- taipy/core/data/_file_datanode_mixin.py | 10 ++++++---- tests/core/config/test_data_node_config.py | 5 +++++ tests/core/data/test_csv_data_node.py | 16 +++++++++------- tests/core/data/test_data_node.py | 2 +- tests/core/data/test_excel_data_node.py | 16 +++++++++------- tests/core/data/test_json_data_node.py | 14 ++++++++------ tests/core/data/test_parquet_data_node.py | 16 +++++++++------- tests/core/data/test_pickle_data_node.py | 12 +++++++----- 11 files changed, 66 insertions(+), 38 deletions(-) diff --git a/taipy/core/_entity/_properties.py b/taipy/core/_entity/_properties.py index 5a641f09e0..66ba0040b8 100644 --- a/taipy/core/_entity/_properties.py +++ b/taipy/core/_entity/_properties.py @@ -13,6 +13,7 @@ from taipy.common.config.common._template_handler import _TemplateHandler as _tpl +from ..common._utils import _normalize_path from ..notification import EventOperation, Notifier, _make_event @@ -26,6 +27,8 @@ def __init__(self, entity_owner, **kwargs): self._pending_deletions = set() def __setitem__(self, key, value): + if key == "path": + value = _normalize_path(value) super(_Properties, self).__setitem__(key, value) if hasattr(self, "_entity_owner"): diff --git a/taipy/core/common/_utils.py b/taipy/core/common/_utils.py index 14c3830781..72c0e98450 100644 --- a/taipy/core/common/_utils.py +++ b/taipy/core/common/_utils.py @@ -10,6 +10,7 @@ # specific language governing permissions and limitations under the License. import functools +import re import time from collections import namedtuple from importlib import import_module @@ -79,4 +80,8 @@ def _fcts_to_dict(objs): return [d for obj in objs if (d := _fct_to_dict(obj)) is not None] +def _normalize_path(path: str) -> str: + return re.sub(r"[\\]+", "/", path) + + _Subscriber = namedtuple("_Subscriber", "callback params") diff --git a/taipy/core/config/data_node_config.py b/taipy/core/config/data_node_config.py index 78cf5a636e..162f9bc5c2 100644 --- a/taipy/core/config/data_node_config.py +++ b/taipy/core/config/data_node_config.py @@ -21,6 +21,7 @@ from taipy.common.config.common.scope import Scope from taipy.common.config.section import Section +from ..common._utils import _normalize_path from ..common._warnings import _warn_deprecated from ..common.mongo_default_document import MongoDefaultDocument @@ -271,6 +272,8 @@ def __init__( self._storage_type = storage_type self._scope = scope self._validity_period = validity_period + if "path" in properties: + properties["path"] = _normalize_path(properties["path"]) super().__init__(id, **properties) # modin exposed type is deprecated since taipy 3.1.0 @@ -318,7 +321,7 @@ def scope(self, val) -> None: @property def validity_period(self) -> Optional[timedelta]: - """ The validity period of the data nodes instantiated from the data node config. + """The validity period of the data nodes instantiated from the data node config. It corresponds to the duration since the last edit date for which the data node can be considered valid. Once the validity period has passed, the data node is diff --git a/taipy/core/data/_file_datanode_mixin.py b/taipy/core/data/_file_datanode_mixin.py index be87316589..e0bd82f438 100644 --- a/taipy/core/data/_file_datanode_mixin.py +++ b/taipy/core/data/_file_datanode_mixin.py @@ -20,6 +20,7 @@ from taipy.common.logger._taipy_logger import _TaipyLogger from .._entity._reload import _self_reload +from ..common._utils import _normalize_path from ..reason import InvalidUploadFile, NoFileToDownload, NotAFile, ReasonCollection, UploadFileCanNotBeRead from .data_node import DataNode from .data_node_id import Edit @@ -60,13 +61,14 @@ def is_generated(self) -> bool: @_self_reload(DataNode._MANAGER_NAME) def path(self) -> str: """The path to the file data of the data node.""" - return self._path + return _normalize_path(self._path) @path.setter def path(self, value) -> None: - self._path = value - self.properties[self._PATH_KEY] = value - self.properties[self._IS_GENERATED_KEY] = False + _path = _normalize_path(value) + self._path = _path + self.properties[self._PATH_KEY] = _path # type: ignore[attr-defined] + self.properties[self._IS_GENERATED_KEY] = False # type: ignore[attr-defined] def is_downloadable(self) -> ReasonCollection: """Check if the data node is downloadable. diff --git a/tests/core/config/test_data_node_config.py b/tests/core/config/test_data_node_config.py index 999370876a..4c0a822925 100644 --- a/tests/core/config/test_data_node_config.py +++ b/tests/core/config/test_data_node_config.py @@ -405,3 +405,8 @@ def test_clean_config(): assert dn1_config.validity_period is dn2_config.validity_period is None assert dn1_config.default_path is dn2_config.default_path is None assert dn1_config.properties == dn2_config.properties == {} + + +def test_normalize_path(): + data_node_config = Config.configure_data_node(id="data_nodes1", storage_type="csv", path=r"data\file.csv") + assert data_node_config.path == "data/file.csv" diff --git a/tests/core/data/test_csv_data_node.py b/tests/core/data/test_csv_data_node.py index 72dc5ed740..95b78520c6 100644 --- a/tests/core/data/test_csv_data_node.py +++ b/tests/core/data/test_csv_data_node.py @@ -12,6 +12,7 @@ import dataclasses import os import pathlib +import re import uuid from datetime import datetime, timedelta from time import sleep @@ -25,6 +26,7 @@ from taipy.common.config import Config from taipy.common.config.common.scope import Scope from taipy.common.config.exceptions.exceptions import InvalidConfigurationId +from taipy.core.common._utils import _normalize_path from taipy.core.data._data_manager import _DataManager from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.csv import CSVDataNode @@ -129,7 +131,7 @@ def test_new_csv_data_node_with_existing_file_is_ready_for_reading(self): ) def test_create_with_default_data(self, properties, exists): dn = CSVDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties) - assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "csvs", dn.id + ".csv") + assert dn.path == f"{Config.core.storage_folder}csvs/{dn.id}.csv" assert os.path.exists(dn.path) is exists def test_set_path(self): @@ -208,7 +210,7 @@ def test_is_not_downloadable_no_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NoFileToDownload(path, dn.id)) in reasons.reasons + assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons def test_is_not_downloadable_not_a_file(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample") @@ -216,12 +218,12 @@ def test_is_not_downloadable_not_a_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NotAFile(path, dn.id)) in reasons.reasons + assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons def test_get_downloadable_path(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.csv") dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"}) - assert dn._get_downloadable_path() == path + assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path) def test_get_downloadable_path_with_not_existing_file(self): dn = CSVDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTING.csv", "exposed_type": "pandas"}) @@ -247,7 +249,7 @@ def test_upload(self, csv_file, tmpdir_factory): assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content assert dn.last_edit_date > old_last_edit_date - assert dn.path == old_csv_path # The path of the dn should not change + assert dn.path == _normalize_path(old_csv_path) # The path of the dn should not change def test_upload_with_upload_check_with_exception(self, csv_file, tmpdir_factory, caplog): old_csv_path = tmpdir_factory.mktemp("data").join("df.csv").strpath @@ -304,7 +306,7 @@ def check_data_column(upload_path, upload_data): assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_csv_path # The path of the dn should not change + assert dn.path == _normalize_path(old_csv_path) # The path of the dn should not change # The upload should succeed when check_data_column() return True assert dn._upload(csv_file, upload_checker=check_data_column) @@ -354,7 +356,7 @@ def check_data_is_positive(upload_path, upload_data): np.array_equal(dn.read(), old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_csv_path # The path of the dn should not change + assert dn.path == _normalize_path(old_csv_path) # The path of the dn should not change # The upload should succeed when check_data_is_positive() return True assert dn._upload(new_csv_path, upload_checker=check_data_is_positive) diff --git a/tests/core/data/test_data_node.py b/tests/core/data/test_data_node.py index bb36a52ac8..108257a436 100644 --- a/tests/core/data/test_data_node.py +++ b/tests/core/data/test_data_node.py @@ -667,7 +667,7 @@ def test_path_populated_with_config_default_path(self): data_node.path = "baz.p" assert data_node.path == "baz.p" - def test_track_edit(self): + def test_edit_edit_tracking(self): dn_config = Config.configure_data_node("A") data_node = _DataManager._bulk_get_or_create([dn_config])[dn_config] diff --git a/tests/core/data/test_excel_data_node.py b/tests/core/data/test_excel_data_node.py index 01813a7a51..734f72b2d8 100644 --- a/tests/core/data/test_excel_data_node.py +++ b/tests/core/data/test_excel_data_node.py @@ -11,6 +11,7 @@ import os import pathlib +import re import uuid from datetime import datetime, timedelta from time import sleep @@ -24,6 +25,7 @@ from taipy.common.config import Config from taipy.common.config.common.scope import Scope +from taipy.core.common._utils import _normalize_path from taipy.core.data._data_manager import _DataManager from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.data_node_id import DataNodeId @@ -183,7 +185,7 @@ def test_set_path(self): ) def test_create_with_default_data(self, properties, exists): dn = ExcelDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties) - assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "excels", dn.id + ".xlsx") + assert dn.path == f"{Config.core.storage_folder}excels/{dn.id}.xlsx" assert os.path.exists(dn.path) is exists def test_read_write_after_modify_path(self): @@ -423,7 +425,7 @@ def test_is_not_downloadable_no_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NoFileToDownload(path, dn.id)) in reasons.reasons + assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons def test_is_not_downloadable_not_a_file(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample") @@ -431,12 +433,12 @@ def test_is_not_downloadable_not_a_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NotAFile(path, dn.id)) in reasons.reasons + assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons def test_get_download_path(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.xlsx") dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"}) - assert dn._get_downloadable_path() == path + assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path) def test_get_downloadable_path_with_not_existing_file(self): dn = ExcelDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTING.xlsx", "exposed_type": "pandas"}) @@ -457,7 +459,7 @@ def test_upload(self, excel_file, tmpdir_factory): assert_frame_equal(dn.read()["Sheet1"], upload_content) # The data of dn should change to the uploaded content assert dn.last_edit_date > old_last_edit_date - assert dn.path == old_xlsx_path # The path of the dn should not change + assert dn.path == _normalize_path(old_xlsx_path) # The path of the dn should not change def test_upload_with_upload_check_pandas(self, excel_file, tmpdir_factory): old_xlsx_path = tmpdir_factory.mktemp("data").join("df.xlsx").strpath @@ -503,7 +505,7 @@ def check_data_column(upload_path, upload_data): assert_frame_equal(dn.read()["Sheet1"], old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_xlsx_path # The path of the dn should not change + assert dn.path == _normalize_path(old_xlsx_path) # The path of the dn should not change # The upload should succeed when check_data_column() return True assert dn._upload(excel_file, upload_checker=check_data_column) @@ -552,7 +554,7 @@ def check_data_is_positive(upload_path, upload_data): np.array_equal(dn.read()["Sheet1"], old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_excel_path # The path of the dn should not change + assert dn.path == _normalize_path(old_excel_path) # The path of the dn should not change # The upload should succeed when check_data_is_positive() return True assert dn._upload(new_excel_path, upload_checker=check_data_is_positive) diff --git a/tests/core/data/test_json_data_node.py b/tests/core/data/test_json_data_node.py index 6389985442..9319a65bf7 100644 --- a/tests/core/data/test_json_data_node.py +++ b/tests/core/data/test_json_data_node.py @@ -13,6 +13,7 @@ import json import os import pathlib +import re import uuid from dataclasses import dataclass from enum import Enum @@ -26,6 +27,7 @@ from taipy.common.config import Config from taipy.common.config.common.scope import Scope from taipy.common.config.exceptions.exceptions import InvalidConfigurationId +from taipy.core.common._utils import _normalize_path from taipy.core.data._data_manager import _DataManager from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.data_node_id import DataNodeId @@ -336,7 +338,7 @@ def test_filter(self, json_file): ) def test_create_with_default_data(self, properties, exists): dn = JSONDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties) - assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "jsons", dn.id + ".json") + assert dn.path == f"{Config.core.storage_folder}jsons/{dn.id}.json" assert os.path.exists(dn.path) is exists def test_set_path(self): @@ -405,7 +407,7 @@ def test_is_not_downloadable_no_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NoFileToDownload(path, dn.id)) in reasons.reasons + assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons def is_not_downloadable_not_a_file(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/json") @@ -413,12 +415,12 @@ def is_not_downloadable_not_a_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NotAFile(path, dn.id)) in reasons.reasons + assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons def test_get_download_path(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/json/example_dict.json") dn = JSONDataNode("foo", Scope.SCENARIO, properties={"path": path}) - assert dn._get_downloadable_path() == path + assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path) def test_get_download_path_with_not_existed_file(self): dn = JSONDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTED.json"}) @@ -440,7 +442,7 @@ def test_upload(self, json_file, tmpdir_factory): assert dn.read() == upload_content # The content of the dn should change to the uploaded content assert dn.last_edit_date > old_last_edit_date - assert dn.path == old_json_path # The path of the dn should not change + assert dn.path == _normalize_path(old_json_path) # The path of the dn should not change def test_upload_with_upload_check(self, json_file, tmpdir_factory): old_json_path = tmpdir_factory.mktemp("data").join("df.json").strpath @@ -486,7 +488,7 @@ def check_data_keys(upload_path, upload_data): assert dn.read() == old_data # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_json_path # The path of the dn should not change + assert dn.path == _normalize_path(old_json_path) # The path of the dn should not change # The upload should succeed when check_data_keys() return True assert dn._upload(json_file, upload_checker=check_data_keys) diff --git a/tests/core/data/test_parquet_data_node.py b/tests/core/data/test_parquet_data_node.py index 0dfcb46158..02dd247b69 100644 --- a/tests/core/data/test_parquet_data_node.py +++ b/tests/core/data/test_parquet_data_node.py @@ -11,6 +11,7 @@ import os import pathlib +import re import uuid from datetime import datetime, timedelta from importlib import util @@ -25,6 +26,7 @@ from taipy.common.config import Config from taipy.common.config.common.scope import Scope from taipy.common.config.exceptions.exceptions import InvalidConfigurationId +from taipy.core.common._utils import _normalize_path from taipy.core.data._data_manager import _DataManager from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.data_node_id import DataNodeId @@ -142,7 +144,7 @@ def test_new_parquet_data_node_with_existing_file_is_ready_for_reading(self, par ) def test_create_with_default_data(self, properties, exists): dn = ParquetDataNode("foo", Scope.SCENARIO, DataNodeId(f"dn_id_{uuid.uuid4()}"), properties=properties) - assert dn.path == os.path.join(Config.core.storage_folder.strip("/"), "parquets", dn.id + ".parquet") + assert dn.path == f"{Config.core.storage_folder}parquets/{dn.id}.parquet" assert os.path.exists(dn.path) is exists @pytest.mark.parametrize("engine", __engine) @@ -248,7 +250,7 @@ def test_is_not_downloadable_no_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NoFileToDownload(path, dn.id)) in reasons.reasons + assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons def test_is_not_downloadable_not_a_file(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample") @@ -256,12 +258,12 @@ def test_is_not_downloadable_not_a_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NotAFile(path, dn.id)) in reasons.reasons + assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons def test_get_downloadable_path(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.parquet") dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": path, "exposed_type": "pandas"}) - assert dn._get_downloadable_path() == path + assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path) def test_get_downloadable_path_with_not_existing_file(self): dn = ParquetDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTING.parquet"}) @@ -287,7 +289,7 @@ def test_upload(self, parquet_file_path, tmpdir_factory): assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content assert dn.last_edit_date > old_last_edit_date - assert dn.path == old_parquet_path # The path of the dn should not change + assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change def test_upload_with_upload_check_pandas(self, parquet_file_path, tmpdir_factory): old_parquet_path = tmpdir_factory.mktemp("data").join("df.parquet").strpath @@ -332,7 +334,7 @@ def check_data_column(upload_path, upload_data): assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_parquet_path # The path of the dn should not change + assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change # The upload should succeed when check_data_column() return True assert dn._upload(parquet_file_path, upload_checker=check_data_column) @@ -382,7 +384,7 @@ def check_data_is_positive(upload_path, upload_data): np.array_equal(dn.read(), old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_parquet_path # The path of the dn should not change + assert dn.path == _normalize_path(old_parquet_path) # The path of the dn should not change # The upload should succeed when check_data_is_positive() return True assert dn._upload(new_parquet_path, upload_checker=check_data_is_positive) diff --git a/tests/core/data/test_pickle_data_node.py b/tests/core/data/test_pickle_data_node.py index 9cacdb47ce..57d4259e28 100644 --- a/tests/core/data/test_pickle_data_node.py +++ b/tests/core/data/test_pickle_data_node.py @@ -12,6 +12,7 @@ import os import pathlib import pickle +import re from datetime import datetime, timedelta from time import sleep @@ -23,6 +24,7 @@ from taipy.common.config import Config from taipy.common.config.common.scope import Scope from taipy.common.config.exceptions.exceptions import InvalidConfigurationId +from taipy.core.common._utils import _normalize_path from taipy.core.data._data_manager import _DataManager from taipy.core.data._data_manager_factory import _DataManagerFactory from taipy.core.data.pickle import PickleDataNode @@ -220,7 +222,7 @@ def test_is_not_downloadable_no_file(self): assert not reasons assert not reasons assert len(reasons._reasons) == 1 - assert str(NoFileToDownload(path, dn.id)) in reasons.reasons + assert str(NoFileToDownload(_normalize_path(path), dn.id)) in reasons.reasons def test_is_not_downloadable_not_a_file(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample") @@ -228,12 +230,12 @@ def test_is_not_downloadable_not_a_file(self): reasons = dn.is_downloadable() assert not reasons assert len(reasons._reasons) == 1 - assert str(NotAFile(path, dn.id)) in reasons.reasons + assert str(NotAFile(_normalize_path(path), dn.id)) in reasons.reasons def test_get_download_path(self): path = os.path.join(pathlib.Path(__file__).parent.resolve(), "data_sample/example.p") dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": path}) - assert dn._get_downloadable_path() == path + assert re.split(r"[\\/]", dn._get_downloadable_path()) == re.split(r"[\\/]", path) def test_get_download_path_with_not_existed_file(self): dn = PickleDataNode("foo", Scope.SCENARIO, properties={"path": "NOT_EXISTED.p"}) @@ -254,7 +256,7 @@ def test_upload(self, pickle_file_path, tmpdir_factory): assert_frame_equal(dn.read(), upload_content) # The content of the dn should change to the uploaded content assert dn.last_edit_date > old_last_edit_date - assert dn.path == old_pickle_path # The path of the dn should not change + assert dn.path == _normalize_path(old_pickle_path) # The path of the dn should not change def test_upload_with_upload_check(self, pickle_file_path, tmpdir_factory): old_pickle_path = tmpdir_factory.mktemp("data").join("df.p").strpath @@ -299,7 +301,7 @@ def check_data_column(upload_path, upload_data): assert_frame_equal(dn.read(), old_data) # The content of the dn should not change when upload fails assert dn.last_edit_date == old_last_edit_date # The last edit date should not change when upload fails - assert dn.path == old_pickle_path # The path of the dn should not change + assert dn.path == _normalize_path(old_pickle_path) # The path of the dn should not change # The upload should succeed when check_data_column() return True assert dn._upload(pickle_file_path, upload_checker=check_data_column)