Skip to content

Commit

Permalink
adding test for query datasets
Browse files Browse the repository at this point in the history
  • Loading branch information
CamDavidsonPilon committed Nov 28, 2024
1 parent d8e9cb5 commit d7971f4
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 2 deletions.
4 changes: 2 additions & 2 deletions pioreactor/actions/leader/export_experiment_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# See create_tables.sql for all tables
from __future__ import annotations

from base64 import b64decode
from contextlib import closing
from contextlib import ExitStack
from datetime import datetime
Expand All @@ -12,7 +13,6 @@
import click
from msgspec import DecodeError
from msgspec import ValidationError
from msgspec.json import decode as json_decode
from msgspec.yaml import decode as yaml_decode

from pioreactor.config import config
Expand Down Expand Up @@ -49,7 +49,7 @@ def load_exportable_datasets() -> dict[str, Dataset]:


def decode_base64(string: str) -> str:
return json_decode(string)
return b64decode(string).decode("utf-8")


def validate_dataset_information(dataset: Dataset, cursor) -> None:
Expand Down
49 changes: 49 additions & 0 deletions pioreactor/tests/test_export_experiment_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,15 @@ def mock_load_exportable_datasets():
default_order_by="timestamp",
timestamp_columns=["timestamp"],
),
"test_base64": Dataset(
dataset_name="test_base64",
display_name="Test Table",
query="SELECT id, BASE64(data) as data FROM test_base64",
has_unit=False,
has_experiment=False,
description="",
default_order_by=None,
),
}
with patch(
"pioreactor.actions.leader.export_experiment_data.load_exportable_datasets",
Expand Down Expand Up @@ -111,6 +120,46 @@ def test_export_experiment_data(temp_zipfile, mock_load_exportable_datasets) ->
) # can't compare exactly since it uses datetime(ts, 'locatime') in sqlite3, and the localtime will vary between CI servers.


def test_export_experiment_data_with_base64_data(temp_zipfile, mock_load_exportable_datasets) -> None:
# Set up a temporary SQLite database with sample data
conn = sqlite3.connect(":memory:")
conn.execute("CREATE TABLE test_base64 (id INTEGER, data BLOB)")
conn.execute(
"INSERT INTO test_base64 (id, data) VALUES (1, 'eyJ2b2x1bWUiOjAuNSwiZHVyYXRpb24iOjIwLjAsInN0YXRlIjoiaW5pdCJ9')"
)
conn.commit()

# Mock the connection and logger objects
with patch("sqlite3.connect") as mock_connect:
mock_connect.return_value = conn

export_experiment_data(
experiments=[],
output=temp_zipfile.strpath,
partition_by_unit=False,
dataset_names=["test_base64"],
)

# Check if the exported data is correct
with zipfile.ZipFile(temp_zipfile.strpath, mode="r") as zf:
# Find the file with a matching pattern
csv_filename = None
for filename in zf.namelist():
if re.match(r"test_base64-all_experiments-all_units-\d{14}\.csv", filename):
csv_filename = filename
break

assert csv_filename is not None, "CSV file not found in the zipfile"

with zf.open(csv_filename) as csv_file:
content = csv_file.read().decode("utf-8").strip()
headers, rows = content.split("\r\n")
assert headers == "id,data"
values = rows.split(",", maxsplit=1)
assert values[0] == "1"
assert values[1] == '"{""volume"":0.5,""duration"":20.0,""state"":""init""}"'


def test_export_experiment_data_with_experiment(temp_zipfile, mock_load_exportable_datasets) -> None:
# Set up a temporary SQLite database with sample data
conn = sqlite3.connect(":memory:")
Expand Down

0 comments on commit d7971f4

Please sign in to comment.