diff --git a/playwright_tests/core/utilities.py b/playwright_tests/core/utilities.py index 046201a2041..155f9b291ee 100644 --- a/playwright_tests/core/utilities.py +++ b/playwright_tests/core/utilities.py @@ -1,3 +1,5 @@ +from typing import Any, Union + import requests import time import re @@ -5,11 +7,13 @@ import random import os from datetime import datetime + +from nltk import SnowballStemmer from playwright.sync_api import Page from playwright_tests.messages.homepage_messages import HomepageMessages from requests.exceptions import HTTPError - from playwright_tests.pages.top_navbar import TopNavbar +from playwright_tests.test_data.search_synonym import SearchSynonyms class Utilities: @@ -302,3 +306,116 @@ def extract_date_to_digit_format(self, date_str: str) -> int: """ date = datetime.strptime(date_str, "%b %d, %Y") return int(date.strftime("%m%d%Y")) + + def tokenize_string(self, text: str) -> list[str]: + """ + This helper function tokenizes the text into individual words and removes any non + alphanumeric characters. + """ + return re.findall(r'\b\w+\b', text.lower()) + + def stem_tokens(self, tokens: list[str], search_term_locale: str): + """ + This helper function stems each token and returns the list of stemmed tokens. + """ + stemmer = SnowballStemmer(search_term_locale) + return [stemmer.stem(token) for token in tokens] + + def search_result_check(self, search_result, search_term, search_term_locale: str, + exact_phrase: bool): + """ + Checks if the search result contains: + 1. Any variation of the provided keyword. + 2. The search term or any of its synonyms. + 3. The exact phrase or any component of the phrase. + 4. Variations of the search term by stemming. + 5. Variations of the search term by stemming for non-US words. + """ + + search_term_split = search_term.lower().split() + search_results_lower = search_result.lower() + + # Check if searching for exact phrase. + if exact_phrase: + return self._exact_phrase_check(search_result, search_term) + + # Check if keyword variations + if self._contains_keyword_variation(search_results_lower, search_term_split): + print(f"The {search_term} was found in search result variation.") + return True + + # Check synonyms of split terms and the whole term + match_found, matching_synonym = self._contains_synonym(search_results_lower, search_term, + search_term_split) + if match_found: + print(f"Search result for {search_term} found in synonym: {matching_synonym}") + return True + + # Check if exact phrase match + if ' '.join(search_term_split) in search_results_lower: + print(f"Search results for {search_term} found in exact match") + return True + + # Check each term component + if any(term in search_results_lower for term in search_term_split): + print(f"Search result for {search_term} found in a component of the search result") + return True + + # Check stemming in search results. + stemmed_tokens = self.stem_tokens(self.tokenize_string(search_result), search_term_locale) + stemmed_search_term = self.stem_tokens(self.tokenize_string(search_term), + search_term_locale) + + if any(term in stemmed_tokens for term in stemmed_search_term): + print(f"Search result for {search_term} found in stemmed word") + return True + + if self._contains_synonym(search_results_lower, stemmed_search_term, search_term_split)[0]: + print(f"Search result for {search_term} found in stemmed word synonym") + return True + + print("Search result not found!") + return False + + def _contains_synonym(self, search_result_lower, search_term: Union[str, list[str]], + search_term_split) -> [bool, Any]: + """ + This helper function checks if any synonyms of a given search term or its components + (split term) are present in the search result. + """ + synonyms = None + + if isinstance(search_term, list): + for term in search_term: + synonyms = SearchSynonyms.synonym_dict.get(term.lower(), []) + else: + synonyms = SearchSynonyms.synonym_dict.get(search_term.lower(), []) + + for term in search_term_split: + synonyms.extend(SearchSynonyms.synonym_dict.get(term, [])) + + for synonym in synonyms: + if synonym.lower() in search_result_lower: + return True, synonym.lower() + return False, None + + def _contains_keyword_variation(self, search_result_lower, search_term_split): + """ + This helper function checks if any variation of the keyword (components of the search term) + are present in the search results. This includes different cases (lowercase or uppercase) + and simple stemmed forms (by removing the last character). + """ + keyword_variations = [ + variation + for term in search_term_split + for variation in [term, term.capitalize(), term.upper(), term[:-1], + term[:-1].capitalize()] + ] + return any(variation in search_result_lower for variation in keyword_variations) + + def _exact_phrase_check(self, search_result: str, search_term: str) -> bool: + search_term = search_term.replace('"', '').lower() + print(f"Search term is: {search_term}") + search_result = search_result.lower() + print(f"Search result is: {search_result}") + return search_term in search_result diff --git a/playwright_tests/pages/search/search_page.py b/playwright_tests/pages/search/search_page.py index 8f07fe3f312..37c49b05aec 100644 --- a/playwright_tests/pages/search/search_page.py +++ b/playwright_tests/pages/search/search_page.py @@ -3,48 +3,131 @@ class SearchPage(BasePage): - __search_bar = "//form[@id='support-search-masthead']/input[@id='search-q']" - __search_bar_button = "//form[@id='support-search-masthead']/button[@class='search-button']" + """ + Locators belonging to the searchbar. + """ + __searchbar = "//form[@id='support-search-masthead']/input[@id='search-q']" + __searchbar_search_button = "//form[@id='support-search-masthead']/button" + __search_results_header = "//div[@class='home-search-section--content']/h2" __popular_searches = "//p[@class='popular-searches']/a" - __search_results_article_titles = "//h3[@class='sumo-card-heading']/a" + + """ + Locators belonging to the search results filter + """ + __view_all_filter = "//span[text()='View All']/..[0]" + __help_articles_only_filter = "//span[text()='Help Articles Only']/..[0]" + __community_discussions_only_filter = "//span[text()='Community Discussion Only']/..[0]" + + """ + Locators belonging to the search results + """ + __search_results_titles = "//section[@class='topic-list content-box']//a[@class='title']" __search_results_articles_summary = "//div[@class='topic-article--text']/p" + __search_results_content = "//section[@class='topic-list content-box']" + __all_bolded_article_content = ("//h3[@class='sumo-card-heading']/a/../following-sibling::p/" + "strong") + + """ + Locators belonging to the side navbar + """ + __search_results_side_nav_header = "//h3[@class='sidebar-subheading']" + __search_results_side_nav_selected_item = "//ul[@id='product-filter']//li[@class='selected']/a" + __search_results_side_nav_elements = "//ul[@id='product-filter']//a" + + """ + General locators + """ + __page_header = "//h1[@class='sumo-page-heading-xl']" def __init__(self, page: Page): super().__init__(page) - def _get_text_of_searchbar_field(self) -> str: - return super()._get_text_of_element(self.__search_bar) + """ + Actions against the search results + """ + def click_on_a_particular_popular_search(self, popular_search_option: str): + self._click(f"//p[@class='popular-searches']/a[text()='{popular_search_option}']") + + def get_search_result_summary_text_of_a_particular_article(self, article_title) -> str: + return self._get_text_of_element(f"//h3[@class='sumo-card-heading']/" + f"a[normalize-space(text())='{article_title}']/../" + f"../p") + + def is_a_particular_article_visible(self, article_title: str) -> bool: + return self._is_element_visible(f"//h3[@class='sumo-card-heading']/" + f"a[normalize-space(text())='{article_title}']") + + def click_on_a_particular_article(self, article_title: str): + self._click(f"//h3[@class='sumo-card-heading']/" + f"a[normalize-space(text())='{article_title}']") + + def get_all_bolded_content(self) -> list[str]: + return self._get_text_of_elements(self.__all_bolded_article_content) + + def get_all_search_results_article_bolded_content(self, article_title: str) -> list[str]: + if "'" in article_title: + parts = article_title.split("'") + if len(parts) > 1: + # Construct XPath using concat function + xpath = (f"//h3[@class='sumo-card-heading']/a[normalize-space(text())=concat(" + f"'{parts[0]}', \"'\", '{parts[1]}')]/../following-sibling::p/strong") + else: + # Handle the case where the text ends with a single quote + xpath = (f"//h3[@class='sumo-card-heading']/a[normalize-space(text())=concat(" + f"'{parts[0]}', \"'\")]/../following-sibling::p/strong") + else: + # Construct XPath without concat for texts without single quotes + + xpath = (f"//h3[@class='sumo-card-heading']/a[normalize-space(text()" + f")='{article_title}']/../following-sibling::p/strong") + return self._get_text_of_elements(xpath) + + def get_all_search_results_article_titles(self) -> list[str]: + return self._get_text_of_elements(self.__search_results_titles) + + def get_all_search_results_articles_summary(self) -> list[str]: + return self._get_text_of_elements(self.__search_results_articles_summary) + + def get_locator_of_a_particular_article(self, article_title: str) -> Locator: + return self._get_element_locator(f"//h3[@class='sumo-card-heading']/" + f"a[normalize-space(text())='{article_title}']") + + def is_search_content_section_displayed(self) -> bool: + return self._is_element_visible(self.__search_results_content) - def _type_into_searchbar(self, text: str): - super()._type(self.__search_bar, text, 200) + """ + Actions against the search bar + """ - def _clear_the_searchbar(self): - super()._clear_field(self.__search_bar) + def get_text_of_searchbar_field(self) -> str: + return self._get_element_input_value(self.__searchbar) - def _click_on_search_button(self): - super()._click(self.__search_bar_button) + def fill_into_searchbar(self, text: str): + self._fill(self.__searchbar, text) - def _get_list_of_popular_searches(self) -> list[str]: - return super()._get_text_of_elements(self.__popular_searches) + def clear_the_searchbar(self): + self._clear_field(self.__searchbar) - def _click_on_a_particular_popular_search(self, popular_search_option: str): - super()._click(f"//p[@class='popular-searches']/a[text()='{popular_search_option}']") + def click_on_search_button(self): + self._click(self.__searchbar_search_button) - def _get_search_result_summary_text_of_a_particular_article(self, article_title) -> str: - return super()._get_text_of_element(f"//h3[@class='sumo-card-heading']/" - f"a[normalize-space(text())='{article_title}']/../" - f"../p") + def get_list_of_popular_searches(self) -> list[str]: + return self._get_text_of_elements(self.__popular_searches) - def _click_on_a_particular_article(self, article_title): - super()._click(f"//h3[@class='sumo-card-heading']/" - f"a[normalize-space(text())='{article_title}']") + def click_on_a_popular_search(self, popular_search_name: str): + self._click(f"//p[@class='popular-searches']/a[text()='{popular_search_name}']") - def _get_all_search_results_article_titles(self) -> list[str]: - return super()._get_text_of_elements(self.__search_results_article_titles) + """ + Actions against the side navbar + """ + def get_the_highlighted_side_nav_item(self) -> str: + return self._get_text_of_element(self.__search_results_side_nav_selected_item) - def _get_all_search_results_articles_summary(self) -> list[str]: - return super()._get_text_of_elements(self.__search_results_articles_summary) + def click_on_a_particular_side_nav_item(self, product_name: str): + self._click(f"//ul[@id='product-filter']//a[normalize-space(text())='{product_name}']") - def _get_locator_of_a_particular_article(self, article_title: str) -> Locator: - return super()._get_element_locator(f"//h3[@class='sumo-card-heading']/" - f"a[normalize-space(text())='{article_title}']") + """ + General page actions + """ + def get_search_results_header(self) -> str: + return self._get_text_of_element(self.__search_results_header) diff --git a/playwright_tests/test_data/search_synonym.py b/playwright_tests/test_data/search_synonym.py new file mode 100644 index 00000000000..c010a08e8e2 --- /dev/null +++ b/playwright_tests/test_data/search_synonym.py @@ -0,0 +1,152 @@ +class SearchSynonyms: + + synonym_dict = { + # English dialects + 'favorite': ['favourite'], + 'favourite': ['favorite'], + + # Media + 'multimedia': ['media', 'audio', 'sound', 'voice', 'music', 'mp3', 'song', 'picture', + 'photo', 'image', 'graphic', 'video', 'movie', 'film'], + 'media': ['multimedia', 'audio', 'sound', 'voice', 'music', 'mp3', 'song', 'picture', + 'photo', 'image', 'graphic', 'video', 'movie', 'film'], + 'audio': ['media', 'sound', 'voice', 'music', 'mp3', 'song'], + 'sound': ['media', 'audio', 'voice', 'music', 'mp3', 'song'], + 'voice': ['media', 'audio', 'sound'], + 'music': ['media', 'audio', 'sound', 'mp3', 'song'], + 'mp3': ['media', 'audio', 'sound', 'music', 'song'], + 'song': ['media', 'audio', 'sound', 'music', 'mp3'], + 'picture': ['media', 'photo', 'image', 'graphic'], + 'photo': ['media', 'picture', 'image', 'graphic'], + 'image': ['media', 'picture', 'photo', 'graphic'], + 'graphic': ['media', 'picture', 'photo', 'image'], + 'video': ['media', 'movie', 'film'], + 'movie': ['media', 'video', 'film'], + 'film': ['media', 'video', 'movie'], + + # Technical terms + 'site': ['website', 'web site', 'page', 'web page', 'webpage'], + 'website': ['site', 'web site', 'page', 'web page', 'webpage'], + 'web site': ['site', 'website', 'page', 'web page', 'webpage'], + 'page': ['site', 'website', 'web site', 'web page', 'webpage'], + 'web page': ['site', 'website', 'web site', 'page', 'webpage'], + 'webpage': ['site', 'website', 'web site', 'page', 'web page'], + 'url': ['uri', 'link', 'hyperlink', 'web address', 'address'], + 'uri': ['url', 'link', 'hyperlink', 'web address', 'address'], + 'link': ['url', 'uri', 'hyperlink', 'web address', 'address'], + 'hyperlink': ['url', 'uri', 'link', 'web address', 'address'], + 'web address': ['url', 'uri', 'link', 'hyperlink', 'address'], + 'address': ['url', 'uri', 'link', 'hyperlink', 'web address'], + 'cache': ['cash', 'cookies'], + 'cash': ['cache', 'cookies'], + 'cookies': ['cache', 'cash'], + 'popup': ['pop up'], + 'pop up': ['popup'], + 'popups': ['pop-up', 'pop-ups', 'pop ups'], + 'virus': ['malware'], + 'malware': ['virus'], + 'mouse': ['cursor'], + 'cursor': ['mouse'], + 'vpn': ['virtual private network'], + 'virtual private network': ['vpn'], + 'mobile': ['phone', 'smartphone', 'smart phone'], + 'phone': ['mobile', 'smartphone', 'smart phone'], + 'smartphone': ['mobile', 'phone', 'smart phone'], + 'smart phone': ['mobile', 'phone', 'smartphone'], + + # Actions + 'delete': ['clear', 'remove'], + 'clear': ['delete', 'remove', 'deleting'], + 'remove': ['delete', 'clear'], + 'start': ['open', 'run'], + 'open': ['start', 'run'], + 'run': ['start', 'open'], + 'change': ['set'], + 'set': ['change'], + 'reset': ['refresh'], + 'refresh': ['reset'], + 'disable': ['block', 'turn off', 'deactivate', 'block'], + 'block': ['disable', 'turn off', 'deactivate'], + 'turn off': ['disable', 'block', 'deactivate'], + 'deactivate': ['disable', 'block', 'turn off'], + 'enable': ['activate', 'allow', 'turn on'], + 'activate': ['enable', 'allow', 'turn on'], + 'allow': ['enable', 'activate', 'turn on'], + 'turn on': ['enable', 'activate', 'allow'], + 'update': ['upgrade'], + 'upgrade': ['update'], + 'signin': ['signup', 'sign up', 'login'], + 'signup': ['signin', 'sign up', 'login'], + 'sign up': ['signin', 'signup', 'login'], + 'login': ['signin', 'signup', 'sign up'], + + # Brands + 'browser': ['firefox'], + 'firefox': ['browser'], + 'social': ['facebook', 'face book', 'twitter', 'myspace', 'reddit', 'instagram'], + 'facebook': ['social', 'face book'], + 'face book': ['social', 'facebook'], + 'twitter': ['social'], + 'myspace': ['social'], + 'reddit': ['social'], + 'instagram': ['social'], + 'modzilla': ['mozilla'], + 'mozzila': ['mozilla'], + 'mozzilla': ['mozilla'], + 'mozila': ['mozilla'], + 'ios': ['ipad', 'iphone', 'ipod'], + 'ipad': ['ios', 'iphone', 'ipod'], + 'iphone': ['ios', 'ipad', 'ipod'], + 'ipod': ['ios', 'ipad', 'iphone'], + + + # Product features + 'addon': ['extension', 'theme'], + 'add-on': ['extension', 'theme'], + 'add-ons': ['extensions', 'themes', "addon"], + 'extension': ['addon', 'theme'], + 'theme': ['addon', 'extension'], + 'awesome bar': ['address bar', 'url bar', 'location bar', 'location field', 'url field'], + 'address bar': ['awesome bar', 'url bar', 'location bar', 'location field', 'url field'], + 'url bar': ['awesome bar', 'address bar', 'location bar', 'location field', 'url field'], + 'location bar': ['awesome bar', 'address bar', 'url bar', 'location field', 'url field'], + 'location field': ['awesome bar', 'address bar', 'url bar', 'location bar', 'url field'], + 'url field': ['awesome bar', 'address bar', 'url bar', 'location bar', 'location field'], + 'bookmarks bar': ['bookmark bar', 'bookmarks toolbar', 'bookmark toolbar'], + 'bookmark bar': ['bookmarks bar', 'bookmarks toolbar', 'bookmark toolbar'], + 'bookmarks toolbar': ['bookmarks bar', 'bookmark bar', 'bookmark toolbar'], + 'bookmark toolbar': ['bookmarks bar', 'bookmark bar', 'bookmarks toolbar'], + 'home page': ['homepage', 'home screen', 'homescreen', 'awesome screen', 'firefox hub', + 'start screen'], + 'homepage': ['home page', 'home screen', 'homescreen', 'awesome screen', 'firefox hub', + 'start screen'], + 'home screen': ['home page', 'homepage', 'homescreen', 'awesome screen', 'firefox hub', + 'start screen'], + 'homescreen': ['home page', 'homepage', 'home screen', 'awesome screen', 'firefox hub', + 'start screen'], + 'awesome screen': ['home page', 'homepage', 'home screen', 'homescreen', 'firefox hub', + 'start screen'], + 'firefox hub': ['home page', 'homepage', 'home screen', 'homescreen', 'awesome screen', + 'start screen'], + 'start screen': ['home page', 'homepage', 'home screen', 'homescreen', 'awesome screen', + 'firefox hub'], + 'search bar': ['search field', 'search strip', 'search box'], + 'search field': ['search bar', 'search strip', 'search box'], + 'search strip': ['search bar', 'search field', 'search box'], + 'search box': ['search bar', 'search field', 'search strip'], + 'search engine': ['search provider'], + 'search provider': ['search engine'], + 'e10s': ['multiprocess', 'multi process'], + 'multiprocess': ['e10s', 'multi process'], + 'multi process': ['e10s', 'multiprocess'], + 'two step': ['two factor', '2fa', 'authentication'], + 'two factor': ['two step', '2fa', 'authentication'], + '2fa': ['two step', 'two factor', 'authentication'], + 'authentication': ['two step', 'two factor', '2fa'], + 'private': ['inprivate', 'incognito'], + 'inprivate': ['private', 'incognito'], + 'incognito': ['private', 'inprivate'], + 'etp': ['tracking protection', 'content blocking'], + 'tracking protection': ['etp', 'content blocking'], + 'content blocking': ['etp', 'tracking protection'] + } diff --git a/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_article_creation_and_access.py b/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_article_creation_and_access.py index 18448b783f7..43fc5c774f8 100644 --- a/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_article_creation_and_access.py +++ b/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_article_creation_and_access.py @@ -710,38 +710,38 @@ def test_kb_article_keywords_and_summary(page: Page, username): with allure.step("Typing the article keyword inside the search field and verifying that " "the article is displayed inside the search results"): - sumo_pages.search_page._type_into_searchbar(article_details['keyword']) + sumo_pages.search_page.fill_into_searchbar(article_details['keyword']) expect( - sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'] ) ).to_be_visible() with check, allure.step("Verifying that the correct kb summary is displayed inside the " "search results"): - assert (sumo_pages.search_page._get_search_result_summary_text_of_a_particular_article( + assert (sumo_pages.search_page.get_search_result_summary_text_of_a_particular_article( article_details['article_title'] )) == article_details['search_results_summary'] with allure.step("Clearing the searchbar, typing the article summary inside the search " "field and verifying that the article is displayed inside the search " "results"): - sumo_pages.search_page._clear_the_searchbar() - sumo_pages.search_page._type_into_searchbar(article_details['search_results_summary']) + sumo_pages.search_page.clear_the_searchbar() + sumo_pages.search_page.fill_into_searchbar(article_details['search_results_summary']) expect( - sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'] ) ).to_be_visible() with allure.step("Verifying that the correct kb summary is displayed inside the search " "results"): - assert (sumo_pages.search_page._get_search_result_summary_text_of_a_particular_article( + assert (sumo_pages.search_page.get_search_result_summary_text_of_a_particular_article( article_details['article_title'])) == article_details['search_results_summary'] with check, allure.step("Clicking on the article and verifying that the user is " "redirected to the kb article"): - sumo_pages.search_page._click_on_a_particular_article(article_details['article_title']) + sumo_pages.search_page.click_on_a_particular_article(article_details['article_title']) assert sumo_pages.kb_article_page.get_text_of_article_title( ) == article_details['article_title'] @@ -855,13 +855,13 @@ def test_kb_article_keyword_and_summary_update(page: Page): utilities.delete_cookies() with allure.step("Typing the article keyword inside the search field"): - sumo_pages.search_page._type_into_searchbar( + sumo_pages.search_page.fill_into_searchbar( utilities.kb_article_test_data['updated_keywords'] ) with allure.step("Verifying that the article is displayed inside the search results"): expect( - sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'] ) ).to_be_visible() @@ -869,23 +869,23 @@ def test_kb_article_keyword_and_summary_update(page: Page): with check, allure.step("Verifying that the correct kb summary is displayed inside the " "search results"): check.equal( - sumo_pages.search_page._get_search_result_summary_text_of_a_particular_article( + sumo_pages.search_page.get_search_result_summary_text_of_a_particular_article( article_details['article_title'] ), utilities.kb_article_test_data['updated_search_result_summary'] ) with allure.step("Clearing the searchbar"): - sumo_pages.search_page._clear_the_searchbar() + sumo_pages.search_page.clear_the_searchbar() with allure.step("Typing the article summary inside the search field"): - sumo_pages.search_page._type_into_searchbar( + sumo_pages.search_page.fill_into_searchbar( utilities.kb_article_test_data['updated_search_result_summary'] ) with allure.step("Verifying that the article is displayed inside the search results"): expect( - sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'] ) ).to_be_visible() @@ -893,7 +893,7 @@ def test_kb_article_keyword_and_summary_update(page: Page): with check, allure.step("Verifying that the correct kb summary is displayed inside the " "search results"): check.equal( - sumo_pages.search_page._get_search_result_summary_text_of_a_particular_article( + sumo_pages.search_page.get_search_result_summary_text_of_a_particular_article( article_details['article_title'] ), utilities.kb_article_test_data['updated_search_result_summary'] @@ -901,7 +901,7 @@ def test_kb_article_keyword_and_summary_update(page: Page): with check, allure.step("Clicking on the article and verifying that the user is " "redirected to the kb article"): - sumo_pages.search_page._click_on_a_particular_article(article_details['article_title']) + sumo_pages.search_page.click_on_a_particular_article(article_details['article_title']) check.equal( sumo_pages.kb_article_page.get_text_of_article_title(), article_details['article_title'] diff --git a/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_restricted_visibility.py b/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_restricted_visibility.py index a2f76edef47..fef27d62767 100644 --- a/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_restricted_visibility.py +++ b/playwright_tests/tests/explore_help_articles_tests/articles/test_kb_restricted_visibility.py @@ -182,9 +182,9 @@ def test_restricted_visibility_in_search_results(page: Page, create_delete_artic with check, allure.step("Verifying that the article is not displayed inside the search " "results"): - sumo_pages.search_page._type_into_searchbar(article_details['article_title']) + sumo_pages.search_page.fill_into_searchbar(article_details['article_title']) expect( - sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title']) ).to_be_hidden() @@ -195,8 +195,8 @@ def test_restricted_visibility_in_search_results(page: Page, create_delete_artic with check, allure.step("Verifying that the article is not inside the search " "results"): - sumo_pages.search_page._type_into_searchbar(article_details['article_title']) - expect(sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.fill_into_searchbar(article_details['article_title']) + expect(sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'])).to_be_hidden() with allure.step("Signing in with an account that is not part of that whitelisted group"): @@ -206,16 +206,16 @@ def test_restricted_visibility_in_search_results(page: Page, create_delete_artic with check, allure.step("Verifying that the article is not displayed inside the search " "results"): - sumo_pages.search_page._type_into_searchbar(article_details['article_title']) - expect(sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.fill_into_searchbar(article_details['article_title']) + expect(sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'])).to_be_hidden() with allure.step("Deleting the user session"): utilities.delete_cookies() with allure.step("Verifying that the article is not displayed inside the search results"): - sumo_pages.search_page._type_into_searchbar(article_details['article_title']) - expect(sumo_pages.search_page._get_locator_of_a_particular_article( + sumo_pages.search_page.fill_into_searchbar(article_details['article_title']) + expect(sumo_pages.search_page.get_locator_of_a_particular_article( article_details['article_title'])).to_be_hidden() diff --git a/poetry.lock b/poetry.lock index 2e90f1d796b..ccd67686cbf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2302,6 +2302,17 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + [[package]] name = "josepy" version = "1.14.0" @@ -2833,6 +2844,31 @@ files = [ [package.extras] infinite-tracing = ["grpcio", "protobuf"] +[[package]] +name = "nltk" +version = "3.9.1" +description = "Natural Language Toolkit" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + [[package]] name = "nodeenv" version = "1.8.0" @@ -4752,6 +4788,26 @@ files = [ {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] +[[package]] +name = "tqdm" +version = "4.66.5" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "traitlets" version = "5.14.1" @@ -5306,4 +5362,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "fea53028c4e4ddabf04699afcfa6c7a578b206abbe24032297429a3129e5c3d8" +content-hash = "e53420ab41b77037f210be0d958e7f4d8a4642f76445f2f56afce3547784bb0d" diff --git a/pyproject.toml b/pyproject.toml index db610479749..7ac00968efa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -125,6 +125,7 @@ pytest-check = "^2.3.1" allure-pytest = "^2.13.2" pytest-playwright = "^0.5.1" playwright = "^1.47.0" +nltk = "^3.9.1" [build-system] requires = ["poetry-core>=1.0.0"]