Skip to content

Commit

Permalink
wip: filmweb critics
Browse files Browse the repository at this point in the history
  • Loading branch information
suchencjusz committed Sep 17, 2024
1 parent edad238 commit 384dbe7
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 10 deletions.
23 changes: 19 additions & 4 deletions src/filman_crawler/tasks/scrap_movie.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
# TODO make it more robust and add
# https://www.filmweb.pl/api/v1/film/628/critics/rating


class Scraper:
def __init__(self, headers=None, movie_id=None, endpoint_url=None):
self.headers = headers
Expand All @@ -28,20 +29,32 @@ def __init__(self, headers=None, movie_id=None, endpoint_url=None):
def scrap(self, task: Task):
info_url = f"https://www.filmweb.pl/api/v1/title/{task.task_job}/info"
rating_url = f"https://www.filmweb.pl/api/v1/film/{task.task_job}/rating"
crictics_url = f"https://www.filmweb.pl/api/v1/film/{task.task_job}/critics/rating"

info_data = self.fetch(info_url)
rating_data = self.fetch(rating_url)
crictics_data = self.fetch(crictics_url)

crictics_rate = None

if info_data is None or rating_data is None:
if info_data is not None and rating_data is not None:
info_data = ujson.loads(info_data)
rating_data = ujson.loads(rating_data)
else:
return False

info_data = ujson.loads(info_data)
rating_data = ujson.loads(rating_data)
if crictics_data is not None:
crictics_data = ujson.loads(crictics_data)
else:
crictics_rate = None

# TODO critics_rate typo fix

title = info_data.get("title", None)
year = int(info_data.get("year", None))
poster_url = info_data.get("posterPath", "https://vectorified.com/images/no-data-icon-23.png")
community_rate = rating_data.get("rate", None)
critics_rate = critics_rate.get("rate", None)

if title is None or year is None or poster_url is None:
return False
Expand All @@ -52,6 +65,7 @@ def scrap(self, task: Task):
year,
poster_url,
community_rate,
critics_rate,
task.task_id,
)

Expand All @@ -60,7 +74,7 @@ def scrap(self, task: Task):

return update

def update_data(self, movie_id, title, year, poster_url, community_rate, task_id):
def update_data(self, movie_id, title, year, poster_url, community_rate, critics_rate, task_id):
try:
filmweb = FilmWeb(self.headers, self.endpoint_url)
filmweb.update_movie(
Expand All @@ -70,6 +84,7 @@ def update_data(self, movie_id, title, year, poster_url, community_rate, task_id
year=year,
poster_url=poster_url,
community_rate=community_rate,
critics_rate=critics_rate,
)
)

Expand Down
1 change: 1 addition & 0 deletions src/filman_crawler/tasks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def update_movie(self, movie: FilmWebMovie):
"year": int(movie.year),
"poster_url": str(movie.poster_url),
"community_rate": float(movie.community_rate),
"critics_rate": float(movie.critics_rate),
},
)

Expand Down
6 changes: 0 additions & 6 deletions src/filman_discord/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,6 @@
import lightbulb
from lightbulb.ext import tasks

# bot = lightbulb.BotApp(
# "MTE4NDUzMTQxMjY2MjEwNDA5NQ.GDmZof.QH06crcIcS3vdiFeH5JhLkkCv-pz2GcccB8360",
# intents=hikari.Intents.ALL,
# banner=None,
# )

bot = lightbulb.BotApp(
os.environ.get("DISCORD_TOKEN"),
intents=hikari.Intents.ALL,
Expand Down

0 comments on commit 384dbe7

Please sign in to comment.