Skip to content

Commit

Permalink
Added has_subtests field to the perfcompare endpoint (#8113)
Browse files Browse the repository at this point in the history
  • Loading branch information
beatrice-acasandrei authored Jul 10, 2024
1 parent 8368be4 commit 611d99b
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 31 deletions.
66 changes: 51 additions & 15 deletions tests/webapp/api/test_perfcompare_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,12 @@ def test_perfcompare_results_against_no_base(
perf_datum.push.save()

response = get_expected(
base_sig, extra_options, test_option_collection, new_perf_data_values, base_perf_data_values
base_sig,
new_sig,
extra_options,
test_option_collection,
new_perf_data_values,
base_perf_data_values,
)

expected = [
Expand Down Expand Up @@ -150,8 +155,11 @@ def test_perfcompare_results_against_no_base(
"is_improvement": response["is_improvement"],
"is_regression": response["is_regression"],
"is_meaningful": response["is_meaningful"],
"parent_signature": response["parent_signature"],
"signature_id": response["signature_id"],
"base_parent_signature": response["base_parent_signature"],
"new_parent_signature": response["new_parent_signature"],
"base_signature_id": response["base_signature_id"],
"new_signature_id": response["new_signature_id"],
"has_subtests": response["has_subtests"],
},
]

Expand All @@ -170,7 +178,8 @@ def test_perfcompare_results_against_no_base(

assert response.status_code == 200
assert expected[0] == response.json()[0]
assert response.json()[0]["parent_signature"] is None
assert response.json()[0]["base_parent_signature"] is None
assert response.json()[0]["new_parent_signature"] is None


def test_perfcompare_results_with_only_one_run_and_diff_repo(
Expand Down Expand Up @@ -257,7 +266,12 @@ def test_perfcompare_results_with_only_one_run_and_diff_repo(
perf_datum.push.save()

response = get_expected(
base_sig, extra_options, test_option_collection, new_perf_data_values, base_perf_data_values
base_sig,
new_sig,
extra_options,
test_option_collection,
new_perf_data_values,
base_perf_data_values,
)

expected = [
Expand Down Expand Up @@ -309,8 +323,11 @@ def test_perfcompare_results_with_only_one_run_and_diff_repo(
"is_improvement": response["is_improvement"],
"is_regression": response["is_regression"],
"is_meaningful": response["is_meaningful"],
"parent_signature": response["parent_signature"],
"signature_id": response["signature_id"],
"base_parent_signature": response["base_parent_signature"],
"new_parent_signature": response["new_parent_signature"],
"base_signature_id": response["base_signature_id"],
"new_signature_id": response["new_signature_id"],
"has_subtests": response["has_subtests"],
},
]

Expand Down Expand Up @@ -420,7 +437,12 @@ def test_perfcompare_results_subtests_support(
perf_datum.push.save()

response = get_expected(
base_sig, extra_options, test_option_collection, new_perf_data_values, base_perf_data_values
base_sig,
new_sig,
extra_options,
test_option_collection,
new_perf_data_values,
base_perf_data_values,
)

expected = [
Expand Down Expand Up @@ -472,14 +494,17 @@ def test_perfcompare_results_subtests_support(
"is_improvement": response["is_improvement"],
"is_regression": response["is_regression"],
"is_meaningful": response["is_meaningful"],
"parent_signature": response["parent_signature"],
"signature_id": response["signature_id"],
"base_parent_signature": response["base_parent_signature"],
"new_parent_signature": response["new_parent_signature"],
"base_signature_id": response["base_signature_id"],
"new_signature_id": response["new_signature_id"],
"has_subtests": response["has_subtests"],
},
]

query_params = (
"?base_repository={}&new_repository={}&base_revision={}&new_revision={}&framework={"
"}&parent_signature={}".format(
"}&base_parent_signature={}".format(
try_repository.name,
test_repository.name,
test_perfcomp_push.revision,
Expand All @@ -493,7 +518,8 @@ def test_perfcompare_results_subtests_support(

assert response.status_code == 200
assert expected[0] == response.json()[0]
assert response.json()[0]["parent_signature"] == test_perf_signature_2.id
assert response.json()[0]["base_parent_signature"] == test_perf_signature_2.id
assert response.json()[0]["new_parent_signature"] == test_perf_signature_2.id


@skip("test is frequently failing in CI, needs to be fixed, see bug 1809467")
Expand Down Expand Up @@ -757,7 +783,12 @@ def test_interval_is_required_when_comparing_without_base(


def get_expected(
base_sig, extra_options, test_option_collection, new_perf_data_values, base_perf_data_values
base_sig,
new_sig,
extra_options,
test_option_collection,
new_perf_data_values,
base_perf_data_values,
):
response = {"option_name": test_option_collection.get(base_sig.option_collection_id, "")}
test_suite = perfcompare_utils.get_test_suite(base_sig.suite, base_sig.test)
Expand Down Expand Up @@ -815,8 +846,13 @@ def get_expected(
response["is_improvement"] = class_name == "success"
response["is_regression"] = class_name == "danger"
response["is_meaningful"] = class_name == ""
response["parent_signature"] = (
response["base_parent_signature"] = (
base_sig.parent_signature.id if base_sig.parent_signature else None
)
response["signature_id"] = base_sig.id
response["new_parent_signature"] = (
new_sig.parent_signature.id if base_sig.parent_signature else None
)
response["base_signature_id"] = base_sig.id
response["new_signature_id"] = new_sig.id
response["has_subtests"] = base_sig.has_subtests or new_sig.has_subtests
return response
28 changes: 17 additions & 11 deletions treeherder/webapp/api/performance_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -883,7 +883,8 @@ def list(self, request):
interval = query_params.validated_data["interval"]
framework = query_params.validated_data["framework"]
no_subtests = query_params.validated_data["no_subtests"]
parent_signature = query_params.validated_data["parent_signature"]
base_parent_signature = query_params.validated_data["base_parent_signature"]
new_parent_signature = query_params.validated_data["new_parent_signature"]

try:
new_push = models.Push.objects.get(revision=new_rev, repository__name=new_repo_name)
Expand Down Expand Up @@ -917,11 +918,11 @@ def list(self, request):
push_timestamp = self._get_push_timestamp(base_push, new_push)

base_signatures = self._get_signatures(
base_repo_name, framework, parent_signature, interval, no_subtests
base_repo_name, framework, base_parent_signature, interval, no_subtests
)

new_signatures = self._get_signatures(
new_repo_name, framework, parent_signature, interval, no_subtests
new_repo_name, framework, new_parent_signature, interval, no_subtests
)

base_perf_data = self._get_perf_data(
Expand Down Expand Up @@ -956,7 +957,9 @@ def list(self, request):
new_sig = new_signatures_map.get(sig_identifier, {})
new_sig_id = new_sig.get("id", "")
lower_is_better = base_sig.get("lower_is_better", "")
is_empty = not (base_sig and new_sig)
is_empty = not (
base_sig and new_sig
) # ensures there are signatures for base and new
if is_empty:
continue
base_perf_data_values = base_grouped_values.get(base_sig_id, [])
Expand Down Expand Up @@ -1058,12 +1061,14 @@ def list(self, request):
"is_improvement": is_improvement,
"is_regression": is_regression,
"is_meaningful": is_meaningful,
"parent_signature": parent_signature
if parent_signature
else base_sig.get("parent_signature_id", None),
"signature_id": base_sig_id,
"base_parent_signature": base_sig.get("parent_signature_id", None),
"new_parent_signature": new_sig.get("parent_signature_id", None),
"base_signature_id": base_sig_id,
"new_signature_id": new_sig_id,
"has_subtests": (
base_sig.get("has_subtests", None) or new_sig.get("has_subtests", None)
),
}

self.queryset.append(row_result)

serializer = self.get_serializer(self.queryset, many=True)
Expand Down Expand Up @@ -1133,16 +1138,17 @@ def _get_signatures(repository_name, framework, parent_signature, interval, no_s
signatures = signatures.values(
"framework_id",
"id",
"lower_is_better",
"has_subtests",
"extra_options",
"suite",
"signature_hash",
"platform__platform",
"test",
"option_collection_id",
"parent_signature_id",
"repository_id",
"measurement_unit",
"lower_is_better",
"signature_hash",
"application",
)
return signatures
Expand Down
17 changes: 12 additions & 5 deletions treeherder/webapp/api/performance_serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,8 @@ class PerfCompareResultsQueryParamsSerializer(serializers.Serializer):
framework = serializers.IntegerField(required=False, allow_null=True, default=None)
interval = serializers.IntegerField(required=False, allow_null=True, default=None)
no_subtests = serializers.BooleanField(required=False)
parent_signature = serializers.CharField(required=False, allow_null=True, default=None)
base_parent_signature = serializers.CharField(required=False, allow_null=True, default=None)
new_parent_signature = serializers.CharField(required=False, allow_null=True, default=None)

def validate(self, data):
if data["base_revision"] is None and data["interval"] is None:
Expand Down Expand Up @@ -545,8 +546,11 @@ class PerfCompareResultsSerializer(serializers.ModelSerializer):
is_improvement = serializers.BooleanField(required=False)
is_regression = serializers.BooleanField(required=False)
is_meaningful = serializers.BooleanField(required=False)
parent_signature = serializers.IntegerField()
signature_id = serializers.IntegerField()
base_parent_signature = serializers.IntegerField()
new_parent_signature = serializers.IntegerField()
base_signature_id = serializers.IntegerField()
new_signature_id = serializers.IntegerField()
has_subtests = serializers.BooleanField()

class Meta:
model = PerformanceSignature
Expand Down Expand Up @@ -594,8 +598,11 @@ class Meta:
"is_improvement",
"is_regression",
"is_meaningful",
"parent_signature",
"signature_id",
"base_parent_signature",
"new_parent_signature",
"base_signature_id",
"new_signature_id",
"has_subtests",
]


Expand Down

0 comments on commit 611d99b

Please sign in to comment.