Skip to content

Commit

Permalink
Merge pull request #191 from Aiven-Open/joelynch/fix-for-aiven-core
Browse files Browse the repository at this point in the history
tests: use asyncio_mode = auto [DDB-901]
  • Loading branch information
kmichel-aiven authored Mar 12, 2024
2 parents 350b51e + 05c8668 commit 2712e4f
Show file tree
Hide file tree
Showing 26 changed files with 19 additions and 158 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Test Suite (Maximal)
name: Test

on:
pull_request:
Expand Down
30 changes: 0 additions & 30 deletions .github/workflows/tests.yml

This file was deleted.

1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ filterwarnings =
markers =
clickhouse
x86_64: mark test as x86-only
asyncio_mode = auto

# asynctest is dependency of pytest, I think, but we don't have
# control over it anyway.
4 changes: 3 additions & 1 deletion requirements.testing.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ pre-commit>=2.20.0
# pre-commit tasks in Makefile need these
anyio==3.5.0
pylint==2.17.4
pytest-asyncio==0.14.0
# Note breaking change in 0.23.
# https://github.com/pytest-dev/pytest-asyncio/issues/706
pytest-asyncio==0.21.1
pytest-cov==3.0.0
pytest-mock==3.10.0
pytest-order==1.0.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Copyright (c) 2021 Aiven Ltd
See LICENSE for details
"""

from .conftest import ClickHouseCommand, create_clickhouse_service, get_clickhouse_client
from astacus.coordinator.plugins.clickhouse.client import ClickHouseClientQueryError
from collections.abc import Sequence
Expand All @@ -17,21 +18,18 @@
]


@pytest.mark.asyncio
async def test_client_execute(clickhouse: Service) -> None:
client = get_clickhouse_client(clickhouse)
response = cast(Sequence[list[str]], await client.execute(b"SHOW DATABASES"))
assert sorted(list(response)) == [["INFORMATION_SCHEMA"], ["default"], ["information_schema"], ["system"]]


@pytest.mark.asyncio
async def test_client_execute_on_system_database(clickhouse: Service) -> None:
client = get_clickhouse_client(clickhouse)
response = await client.execute(b"SELECT currentDatabase()")
assert response == [["system"]]


@pytest.mark.asyncio
async def test_client_execute_with_empty_response(clickhouse: Service) -> None:
# In that case, ClickHouse http protocol doesn't bother with replying with
# an empty json dict and instead replies with an empty string.
Expand All @@ -41,7 +39,6 @@ async def test_client_execute_with_empty_response(clickhouse: Service) -> None:
assert response == []


@pytest.mark.asyncio
async def test_client_execute_bounded_connection_failure_time(ports: Ports, clickhouse_command: ClickHouseCommand) -> None:
async with create_clickhouse_service(ports, clickhouse_command) as clickhouse:
client = get_clickhouse_client(clickhouse, timeout=1.0)
Expand All @@ -53,7 +50,6 @@ async def test_client_execute_bounded_connection_failure_time(ports: Ports, clic
assert elapsed_time < 10.0


@pytest.mark.asyncio
async def test_client_execute_bounded_query_time(clickhouse: Service) -> None:
client = get_clickhouse_client(clickhouse, timeout=1.0)
start_time = time.monotonic()
Expand All @@ -63,7 +59,6 @@ async def test_client_execute_bounded_query_time(clickhouse: Service) -> None:
assert 1.0 <= elapsed_time < 3.0


@pytest.mark.asyncio
async def test_client_execute_timeout_can_be_customized_per_query(clickhouse: Service) -> None:
client = get_clickhouse_client(clickhouse, timeout=10.0)
start_time = time.monotonic()
Expand All @@ -74,7 +69,6 @@ async def test_client_execute_timeout_can_be_customized_per_query(clickhouse: Se
assert 1.0 <= elapsed_time < 3.0


@pytest.mark.asyncio
async def test_client_execute_error_returns_status_and_exception_code(clickhouse: Service) -> None:
unknown_table_exception_code = 60
client = get_clickhouse_client(clickhouse)
Expand Down
14 changes: 0 additions & 14 deletions tests/integration/coordinator/plugins/clickhouse/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ async def setup_cluster_users(clients: Sequence[HttpClickHouseClient]) -> None:
await clients[0].execute(b"CREATE USER `z_\\x80_enjoyer`")


@pytest.mark.asyncio
async def test_restores_access_entities(restored_cluster: Sequence[ClickHouseClient]) -> None:
for client in restored_cluster:
assert await client.execute(
Expand All @@ -263,7 +262,6 @@ async def test_restores_access_entities(restored_cluster: Sequence[ClickHouseCli
assert await client.execute(b"SELECT name FROM system.settings_profiles WHERE storage = 'replicated'") == [["érin"]]


@pytest.mark.asyncio
async def test_restores_replicated_merge_tree_tables_data(restored_cluster: Sequence[ClickHouseClient]) -> None:
s1_data = [[123, "foo"], [456, "bar"]]
s2_data = [[789, "baz"]]
Expand All @@ -273,7 +271,6 @@ async def test_restores_replicated_merge_tree_tables_data(restored_cluster: Sequ
assert response == expected_data


@pytest.mark.asyncio
async def test_restores_table_with_experimental_types(restored_cluster: Sequence[ClickHouseClient]) -> None:
# The JSON type merges the keys in the response,
# hence the extra zero-valued entries we don't see in the insert queries.
Expand All @@ -285,14 +282,12 @@ async def test_restores_table_with_experimental_types(restored_cluster: Sequence
assert response == expected_data


@pytest.mark.asyncio
async def test_restores_table_with_nullable_key(restored_cluster: Sequence[ClickHouseClient]) -> None:
for client in restored_cluster:
response = await client.execute(b"SELECT thekey, thedata FROM default.with_nullable_key ORDER BY thekey")
assert response == []


@pytest.mark.asyncio
async def test_restores_table_with_nested_fields(restored_cluster: Sequence[ClickHouseClient]) -> None:
client = restored_cluster[0]
response = await client.execute(b"SELECT thekey, thedata FROM default.nested_not_flatten ORDER BY thekey")
Expand All @@ -305,7 +300,6 @@ async def test_restores_table_with_nested_fields(restored_cluster: Sequence[Clic
assert response == [[123, [4], [5]]]


@pytest.mark.asyncio
async def test_restores_function_table(restored_cluster: Sequence[ClickHouseClient]) -> None:
client = restored_cluster[0]
response = await client.execute(b"SELECT * FROM default.from_function_table")
Expand All @@ -321,19 +315,16 @@ async def check_object_storage_data(cluster: Sequence[ClickHouseClient]) -> None
assert response == expected_data


@pytest.mark.asyncio
async def test_restores_url_engine_table(restored_cluster: Sequence[ClickHouseClient]) -> None:
for client in restored_cluster:
response = await client.execute(b"SELECT create_table_query FROM system.tables WHERE name = 'url_engine_table'")
assert response[0][0] == SAMPLE_URL_ENGINE_DDL


@pytest.mark.asyncio
async def test_restores_object_storage_data(restored_cluster: Sequence[ClickHouseClient]) -> None:
await check_object_storage_data(restored_cluster)


@pytest.mark.asyncio
async def test_restores_simple_view(restored_cluster: Sequence[ClickHouseClient]) -> None:
s1_data = [[123 * 2], [456 * 2]]
s2_data = [[789 * 2]]
Expand All @@ -343,7 +334,6 @@ async def test_restores_simple_view(restored_cluster: Sequence[ClickHouseClient]
assert response == expected_data


@pytest.mark.asyncio
async def test_restores_materialized_view_data(restored_cluster: Sequence[ClickHouseClient]) -> None:
s1_data = [[123 * 3], [456 * 3]]
s2_data = [[789 * 3]]
Expand All @@ -353,21 +343,18 @@ async def test_restores_materialized_view_data(restored_cluster: Sequence[ClickH
assert response == expected_data


@pytest.mark.asyncio
async def test_restores_connectivity_between_distributed_servers(restored_cluster: Sequence[ClickHouseClient]) -> None:
# This only works if each node can connect to all nodes of the cluster named after the Distributed database
for client in restored_cluster:
assert await client.execute(b"SELECT * FROM clusterAllReplicas('default', system.one) ") == [[0], [0], [0]]


@pytest.mark.asyncio
async def test_does_not_restore_log_tables_data(restored_cluster: Sequence[ClickHouseClient]) -> None:
# We restored the table structure but not the data
for client in restored_cluster:
assert await client.execute(b"SELECT thekey, thedata FROM default.memory") == []


@pytest.mark.asyncio
async def test_cleanup_does_not_break_object_storage_disk_files(
ports: Ports,
clickhouse_command: ClickHouseCommand,
Expand Down Expand Up @@ -404,7 +391,6 @@ async def test_cleanup_does_not_break_object_storage_disk_files(
await check_object_storage_data(clients)


@pytest.mark.asyncio
async def test_restores_integration_tables(restored_cluster: Sequence[ClickHouseClient]) -> None:
for client in restored_cluster:
assert await table_exists(client, "default.postgresql")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Copyright (c) 2022 Aiven Ltd
See LICENSE for details
"""

from astacus.coordinator.plugins.clickhouse.replication import get_shard_and_replica
from tests.integration.conftest import create_zookeeper, Ports
from tests.integration.coordinator.plugins.clickhouse.conftest import (
Expand All @@ -19,7 +20,6 @@
]


@pytest.mark.asyncio
async def test_get_shard_and_replica(ports: Ports, clickhouse_command: ClickHouseCommand, minio_bucket: MinioBucket) -> None:
async with create_zookeeper(ports) as zookeeper:
async with create_clickhouse_cluster(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Copyright (c) 2021 Aiven Ltd
See LICENSE for details
"""

from .conftest import ClickHouseCommand, create_clickhouse_cluster, get_clickhouse_client, MinioBucket
from astacus.coordinator.cluster import Cluster
from astacus.coordinator.plugins.base import StepsContext
Expand All @@ -21,7 +22,6 @@
]


@pytest.mark.asyncio
async def test_retrieve_tables(ports: Ports, clickhouse_command: ClickHouseCommand, minio_bucket: MinioBucket) -> None:
async with create_zookeeper(ports) as zookeeper:
# We need a "real" cluster to be able to use Replicated databases
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Copyright (c) 2021 Aiven Ltd
See LICENSE for details
"""

from astacus.coordinator.plugins.zookeeper import (
KazooZooKeeperClient,
NodeExistsError,
Expand Down Expand Up @@ -42,62 +43,53 @@ def fixture_znode(zookeeper: Service) -> ZNode:
return znode


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_get(zookeeper_client: KazooZooKeeperClient, znode: ZNode):
async with zookeeper_client.connect() as connection:
assert await connection.get(znode.path) == znode.content


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_get_missing_node_fails(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
with pytest.raises(NoNodeError):
assert await connection.get("/does/not/exist")


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_get_children(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
assert await connection.get_children("/zookeeper") == ["config", "quota"]


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_get_children_of_missing_node_fails(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
with pytest.raises(NoNodeError):
assert await connection.get_children("/does/not/exists")


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_try_create(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
assert await connection.try_create("/new/try_create", b"new_content") is True
assert await connection.get("/new/try_create") == b"new_content"


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_try_create_failure(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
await connection.create("/new/try_create_failure", b"content")
assert await connection.try_create("/new/try_create_failure", b"new_content") is False
assert await connection.get("/new/try_create_failure") == b"content"


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_create(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
await connection.create("/new/create", b"content")
assert await connection.get("/new/create") == b"content"


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_create_existing_node_fails(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
with pytest.raises(NodeExistsError):
await connection.create("/zookeeper", b"content")


@pytest.mark.asyncio
async def test_kazoo_zookeeper_transaction(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
transaction = connection.transaction()
Expand All @@ -108,7 +100,6 @@ async def test_kazoo_zookeeper_transaction(zookeeper_client: KazooZooKeeperClien
assert await connection.get("/transaction_2") == b"content"


@pytest.mark.asyncio
async def test_kazoo_zookeeper_failing_transaction(zookeeper_client: KazooZooKeeperClient) -> None:
async with zookeeper_client.connect() as connection:
await connection.create("/failing_transaction_2", b"old_content")
Expand All @@ -123,7 +114,6 @@ async def test_kazoo_zookeeper_failing_transaction(zookeeper_client: KazooZooKee
assert isinstance(raised.value.results[2], RuntimeInconsistency)


@pytest.mark.asyncio
async def test_kazoo_zookeeper_client_bounded_failure_time(ports: Ports) -> None:
async with create_zookeeper(ports) as zookeeper:
zookeeper_client = KazooZooKeeperClient(hosts=[get_kazoo_host(zookeeper)], user=None, timeout=1)
Expand Down
4 changes: 1 addition & 3 deletions tests/integration/coordinator/plugins/flink/test_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,15 @@
Copyright (c) 2022 Aiven Ltd
See LICENSE for details
"""

from astacus.coordinator.plugins.base import StepsContext
from astacus.coordinator.plugins.flink.manifest import FlinkManifest
from astacus.coordinator.plugins.flink.steps import FlinkManifestStep, RestoreDataStep, RetrieveDataStep
from astacus.coordinator.plugins.zookeeper import KazooZooKeeperClient
from unittest.mock import Mock
from uuid import uuid4

import pytest


@pytest.mark.asyncio
async def test_restore_data(zookeeper_client: KazooZooKeeperClient) -> None:
table_id1 = str(uuid4()).partition("-")[0]
table_id2 = str(uuid4()).partition("-")[0]
Expand Down
1 change: 0 additions & 1 deletion tests/system/test_config_reload.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@


@pytest.mark.order("last")
@pytest.mark.asyncio
def test_reload_config(tmpdir, rootdir: str, astacus1: TestNode, astacus2: TestNode, astacus3: TestNode) -> None:
# Update the root_globs config of the first node
create_astacus_config(tmpdir=tmpdir, node=astacus1, plugin_config={"root_globs": ["*.foo"]})
Expand Down
1 change: 0 additions & 1 deletion tests/unit/common/cassandra/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ def test_cassandra_client(mocker: MockFixture, ssl: bool) -> None:
assert isinstance(session, client_module.CassandraSession)


@pytest.mark.asyncio
async def test_cassandra_client_run(mocker: MockFixture):
client = create_client(mocker)

Expand Down
2 changes: 0 additions & 2 deletions tests/unit/common/test_limiter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
(3, ["s1", "s2", "s3", "e2", "e3", "e1"]),
],
)
@pytest.mark.asyncio
async def test_limiter(limit: int, expected_trace: Sequence[str]) -> None:
trace = []

Expand All @@ -44,7 +43,6 @@ async def add_trace(start: str, sleep: float, stop: str):
(3, ["s1", "s2", "s3", "e2", "e3", "e1"]),
],
)
@pytest.mark.asyncio
async def test_gather_limited(limit: int, expected_trace: Sequence[str]) -> None:
trace = []

Expand Down
Loading

0 comments on commit 2712e4f

Please sign in to comment.