diff --git a/.gitignore b/.gitignore
index 86d542d..a26cefb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,7 +4,7 @@
# https://www.atlassian.com/git/tutorials/saving-changes/gitignore
.env
-data/*
+data/.ipynb_checkpoints/
# Node artifact files
@@ -53,7 +53,7 @@ Thumbs.db
*.wmv
*.pyc
-notebooks/.ipynb_checkpoints
+notebooks/**/.ipynb_checkpoints/
.env
.env
.venv
diff --git a/climateguard/gdelt_scrapper.py b/climateguard/gdelt_scrapper.py
new file mode 100644
index 0000000..f871ca9
--- /dev/null
+++ b/climateguard/gdelt_scrapper.py
@@ -0,0 +1,76 @@
+from urllib.request import urlopen
+import pandas as pd
+import gdeltdoc as gdelt
+import functools
+import itertools
+from pathlib import Path
+
+class GDELTScrapper:
+ THEMES_URL = "http://data.gdeltproject.org/api/v2/guides/LOOKUP-GKGTHEMES.TXT"
+
+ @functools.cached_property
+ def themes_df(self) -> pd.DataFrame:
+ # Fetch the content using urllib
+ with urlopen(self.THEMES_URL) as response:
+ data = response.read().decode()
+
+ # Split the data into lines
+ lines = data.strip().split("\n")
+
+ # Split each line into key-value pairs
+ rows = [line.split("\t") for line in lines]
+
+ # Create a DataFrame from the rows
+ df = pd.DataFrame(rows, columns=['theme', 'count'])
+ df['count'] = df['count'].astype(int)
+
+ return df
+
+ def find_themes_related_to_keyword(self, keyword: str) -> list[str]:
+ return self.themes_df[self.themes_df["theme"].str.contains(keyword, case=False)]["theme"].to_list()
+
+ def find_articles(self, themes: list[str], years: list[int]) -> pd.DataFrame:
+ partial_articles_dfs = []
+
+ gd = gdelt.GdeltDoc()
+ for theme, year in itertools.product(themes, years):
+ f = gdelt.Filters(
+ #keyword = "climate change",
+ start_date=f"{year}-01-01",
+ end_date=f"{year}-12-31",
+ theme=theme,
+ country="LG", # Latvia
+ )
+
+ partial_articles_df = gd.article_search(f)
+ print(f"{len(partial_articles_df)} articles found for theme {theme}, in {year}")
+ partial_articles_dfs.append(partial_articles_df)
+
+ articles_df = pd.concat(partial_articles_dfs)
+
+ articles_df = articles_df[articles_df["language"] == "Latvian"]
+ articles_df["seendate"] = pd.to_datetime(articles_df["seendate"])
+
+ print(f"Deleting {articles_df["url"].duplicated().sum()} duplicates")
+ articles_df = articles_df.drop_duplicates("url")
+ print(f"{len(articles_df)} unique articles found")
+ return articles_df
+
+
+# Usage example:
+if __name__ == "__main__":
+ scraper = GDELTScrapper()
+
+ # Find themes related to climate
+ themes = scraper.find_themes_related_to_keyword("CLIMATE")
+ print(f"Themes related to climate: {themes}")
+
+ # Find articles for these themes and year range
+ articles_df = scraper.find_articles(themes=themes, years=[2022, 2023, 2024])
+
+ # This can be used as input for NewsScraper
+ article_urls = articles_df["url"].to_list()
+
+ # Save dataframe to a csv file
+ file_path = Path(__file__).parent.parent / "data/latvian_article_links.csv"
+ articles_df.to_csv(file_path)
\ No newline at end of file
diff --git a/climateguard/news_scrapper.py b/climateguard/news_scrapper.py
index 54054a1..d1789df 100644
--- a/climateguard/news_scrapper.py
+++ b/climateguard/news_scrapper.py
@@ -1,116 +1,310 @@
+from urllib.parse import urlparse
import requests
from bs4 import BeautifulSoup
-import re
-from datetime import datetime
import json
from models import Article
from newspaper import Article as NewspaperArticle
-from urllib.parse import urlparse
+from newspaper.article import ArticleException
+import multiprocessing
+from functools import partial
+
class NewsScraper:
def __init__(self):
self.headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
}
def scrape_article(self, url):
- # Try NewspaperArticle first
- newspaper_article = NewspaperArticle(url)
- newspaper_article.download()
- newspaper_article.parse()
+ try:
+ # Try NewspaperArticle first
+ newspaper_article = NewspaperArticle(url)
+ try:
+ newspaper_article.download()
+ newspaper_article.parse()
- if newspaper_article.text:
- return Article(
- title=newspaper_article.title,
- content=newspaper_article.text,
- url=url,
- date=str(newspaper_article.publish_date) if newspaper_article.publish_date else '',
- topic='', # NewspaperArticle doesn't provide a topic
- source=url
- )
-
- # If NewspaperArticle fails to extract text, use custom scrapers
- response = requests.get(url, headers=self.headers)
- soup = BeautifulSoup(response.content, 'html.parser')
-
- if 'lsm.lv' in url:
- return self._scrape_lsm(soup, url)
- elif 'delfi.lv' in url:
- return self._scrape_delfi(soup, url)
- elif 'nra.lv' in url:
- return self._scrape_nra(soup, url)
- else:
- raise ValueError("Unsupported website")
+ if newspaper_article.text:
+ return Article(
+ title=newspaper_article.title,
+ content=newspaper_article.text,
+ url=url,
+ date=(
+ str(newspaper_article.publish_date)
+ if newspaper_article.publish_date
+ else ""
+ ),
+ topic="", # NewspaperArticle doesn't provide a topic
+ source=url,
+ )
+ except ArticleException:
+ print(f"NewspaperArticle failed for {url}. Falling back to custom scraper.")
+
+ # If NewspaperArticle fails, use custom scrapers
+ response = requests.get(url, headers=self.headers)
+ soup = BeautifulSoup(response.content, "html.parser")
+ domain = urlparse(url).netloc
+
+
+ if 'lsm.lv' in domain:
+ return self._scrape_lsm(soup, url)
+ elif 'delfi.lv' in domain:
+ return self._scrape_delfi(soup, url)
+ elif 'nra.lv' in domain:
+ return self._scrape_nra(soup, url)
+ elif 'la.lv' in domain:
+ return self._scrape_la(soup, url)
+ elif 'diena.lv' in domain:
+ return self._scrape_diena(soup, url)
+ elif 'ventasbalss.lv' in domain:
+ return self._scrape_ventasbalss(soup, url)
+ elif 'reitingi.lv' in domain:
+ return self._scrape_reitingi(soup, url)
+ elif 'bnn.lv' or 'ir.lv' or 'latgaleslaiks.lv'in domain:
+ return self._scrape_bnn_or_ir_or_latgaleslaiks(soup, url)
+ elif 'president.lv' or 'ogrenet.lv' in domain:
+ print("Unsupported website")
+ else:
+ raise ValueError("Unsupported website")
+ except Exception as e:
+ print(f"Error scraping {url}: {str(e)}")
+ return None
def _scrape_lsm(self, soup, url):
- content = ' '.join([p.text for p in soup.find_all('p')])
- title = soup.find('h1').text.strip() if soup.find('h1') else ''
- topic = soup.find('meta', {'property': 'article:section'})['content'] if soup.find('meta', {'property': 'article:section'}) else ''
- date = soup.find('meta', {'property': 'article:published_time'})['content'] if soup.find('meta', {'property': 'article:published_time'}) else ''
-
- return Article(
- title=title,
- content=content,
- url=url,
- date=date,
- topic=topic,
- source=url
+ content = " ".join([p.text for p in soup.find_all("p")])
+ title = soup.find("h1").text.strip() if soup.find("h1") else ""
+ topic = (
+ soup.find("meta", {"property": "article:section"})["content"]
+ if soup.find("meta", {"property": "article:section"})
+ else ""
+ )
+ date = (
+ soup.find("meta", {"property": "article:published_time"})["content"]
+ if soup.find("meta", {"property": "article:published_time"})
+ else ""
+ )
+
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
+
+ def _scrape_bnn_or_ir_or_latgaleslaiks(self, soup, url):
+
+ # Extracting title
+ title = soup.find("h1").text.strip() if soup.find("h1") else ""
+
+ # Extracting content
+ content = " ".join([p.text for p in soup.find_all("p")])
+
+ # Extracting topic (usually under a breadcrumb or meta tag)
+ topic = (
+ soup.find("meta", {"property": "article:section"})["content"]
+ if soup.find("meta", {"property": "article:section"})
+ else ""
+ )
+
+ # Extracting date (if available)
+ date = (
+ soup.find("time", {"class": "entry-date published updated"}).text.strip()
+ if soup.find("time", {"class": "entry-date published updated"})
+ else ""
)
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
+
+ def _scrape_reitingi(self, soup, url):
+ title = soup.find("h1").text.strip() if soup.find("h1") else ""
+ content = " ".join([p.text for p in soup.find_all("p")])
+ topic = soup.find("meta", {"name": "news_keywords"})["content"] if soup.find("meta", {"name": "news_keywords"}) else ""
+ date = soup.find("div", class_="date").text.strip() if soup.find("div", class_="date") else ""
+
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
+
+
def _scrape_delfi(self, soup, url):
- content = ' '.join([p.text for p in soup.find_all('p', class_='C-article-body__paragraph')])
- title = soup.find('h1', class_='C-article-headline').text.strip() if soup.find('h1', class_='C-article-headline') else ''
- topic = soup.find('a', class_='C-article-info__category').text.strip() if soup.find('a', class_='C-article-info__category') else ''
- date = soup.find('time', class_='C-article-info__time')['datetime'] if soup.find('time', class_='C-article-info__time') else ''
-
- return Article(
- title=title,
- content=content,
- url=url,
- date=date,
- topic=topic,
- source=url
+ content = " ".join([p.text for p in soup.find_all("p", class_="C-article-body__paragraph")])
+ title = (
+ soup.find("h1", class_="C-article-headline").text.strip()
+ if soup.find("h1", class_="C-article-headline")
+ else ""
+ )
+ topic = (
+ soup.find("a", class_="C-article-info__category").text.strip()
+ if soup.find("a", class_="C-article-info__category")
+ else ""
)
+ date = (
+ soup.find("time", class_="C-article-info__time")["datetime"]
+ if soup.find("time", class_="C-article-info__time")
+ else ""
+ )
+ if not content:
+ content_div = soup.find_all("article", class_="mt-4")
+ if content_div:
+ content = " ".join(p.text for p in content_div[0].findAll("section"))
+ title = " ".join(p.text for p in content_div[0].findAll("h1"))
+
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
def _scrape_nra(self, soup, url):
- content = ' '.join([p.text for p in soup.find_all('p', class_='article-text')])
- title = soup.find('h1', class_='article-title').text.strip() if soup.find('h1', class_='article-title') else ''
- topic = soup.find('span', class_='article-category').text.strip() if soup.find('span', class_='article-category') else ''
- date = soup.find('time', class_='article-date')['datetime'] if soup.find('time', class_='article-date') else ''
-
- return Article(
- title=title,
- content=content,
- url=url,
- date=date,
- topic=topic,
- source=url
+ content = " ".join([p.text for p in soup.find_all("p", class_="article-text")])
+ title = (
+ soup.find("h1", class_="article-title").text.strip()
+ if soup.find("h1", class_="article-title")
+ else ""
+ )
+ topic = (
+ soup.find("span", class_="article-category").text.strip()
+ if soup.find("span", class_="article-category")
+ else ""
)
+ date = (
+ soup.find("time", class_="article-date")["datetime"]
+ if soup.find("time", class_="article-date")
+ else ""
+ )
+ if not content:
+ content_div = soup.find_all("div", class_="text-bl-wrap")
+ if content_div:
+ content = " ".join(p.text for p in content_div[0].findAll("p"))
+
+ title_div = soup.find_all("div", class_="section-title")
+ if title_div:
+ title = " ".join(p.text for p in title_div[0].findAll("h1"))
+
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
+
+
+ def _scrape_ventasbalss(self, soup, url):
+ try:
+
+ title = soup.find('h1', class_='article-title').text.strip() if soup.find('h1', class_='article-title') else ''
+ content_div = soup.find('div', class_='article-content')
+ paragraphs = content_div.find_all(['p', 'h2', 'h3']) if content_div else []
+ content = ' '.join([p.text.strip() for p in paragraphs])
+
+ return Article(
+ title=title,
+ content=content,
+ url=url,
+ date="",
+ topic="",
+ source=url
+ )
+ except Exception as e:
+ print(f"Error scraping ventasbalss.lv: {str(e)}")
+ return None
+
+
+ def _scrape_la(self, soup, url):
+ try:
+ title = soup.find('article').find('h1', class_='article-title').text.strip() if soup.find('article').find('h1', class_='article-title') else ''
+
+ date_str = soup.find('article').find('div', class_='article-date').text.strip() if soup.find('article').find('div', class_='article-date') else ''
+
+ content_div = soup.find('article', class_='article-content-block')
+
+
+ # Extract content from p tags
+ paragraphs = content_div.find_all('p') if content_div else []
+ content = ' '.join([p.text.strip() for p in paragraphs])
+
+ topic = soup.find('div', class_='article-breadcrumbs').find_all('a')[-1].text.strip() if soup.find('div', class_='article-breadcrumbs').find_all('a') else ''
+
+ return Article(
+ title=title,
+ content=content,
+ url=url,
+ date=date_str,
+ topic=topic,
+ source=url
+ )
+ except AttributeError as e:
+ print(f"Error scraping la.lv: {str(e)}")
+ return None
+
+ def _parse_la_date(self, date_str):
+ # Convert Latvian month names to numbers
+ lv_months = {
+ 'janvāris': '01', 'februāris': '02', 'marts': '03', 'aprīlis': '04',
+ 'maijs': '05', 'jūnijs': '06', 'jūlijs': '07', 'augusts': '08',
+ 'septembris': '09', 'oktobris': '10', 'novembris': '11', 'decembris': '12'
+ }
+
+ # Split the date string
+ day, month, year = date_str.split()
+
+ # Convert month to number
+ month_num = lv_months[month.lower()]
+
+ # Format the date as YYYY-MM-DD
+ return f"{year}-{month_num}-{day.zfill(2)}"
+
+ def _scrape_diena(self, soup, url):
+ try:
+ title = soup.find('h1', class_='article-headline').text.strip() if soup.find('h1', class_='article-headline') else ''
+ content = ' '.join([p.text for p in soup.find('div', class_='article-body').find_all('p')]) if soup.find('div', class_='article-body') else ''
+ date = soup.find('time', class_='article-date')['datetime'] if soup.find('time', class_='article-date') else ''
+ topic = soup.find('a', class_='article-category').text.strip() if soup.find('a', class_='article-category') else ''
+
+ return Article(
+ title=title,
+ content=content,
+ url=url,
+ date=date,
+ topic=topic,
+ source=url
+ )
+ except AttributeError as e:
+ print(f"Error scraping diena.lv: {str(e)}")
+ return None
+
+def scrape_single_article(scraper, url):
+ article = scraper.scrape_article(url)
+ if article:
+ if len(article.content) > 100:
+ print(f"Scraped: {article.title}")
+ print(f"Content length: {len(article.content)}")
+ print(f"Date: {article.date}")
+ print("---")
+ return article, None
+ else:
+ print(f"Skipped: {article.title} - Content length too short")
+ return None, url
+ else:
+ print(f"Failed to scrape: {url}")
+ print("---")
+ return None, url
-# Usage example:
if __name__ == "__main__":
scraper = NewsScraper()
- urls = [
- "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/03.10.2024-zinojums-lidz-gadsimta-beigam-latvija-prognozeta-krasta-linijas-atkapsanas-par-47-72-metriem.a571093/",
- "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/zinam-problemu-un-neizmantojam-risinajumus-ko-latvijas-iedzivotaji-doma-par-klimata-parmainam",
- "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/kutri-izmantojam-dzerama-udens-kranus-kapec-iedzivotajiem-trukst-pamudinajuma-dzivot-zalak",
- "https://nra.lv/pasaule/465572-sliktas-zinas-baltvina-cienitajiem.htm",
- "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/20.09.2024-par-zalaku-rigu-spriedis-piecas-sestdienas-ko-sagaida-no-pirmas-iedzivotaju-klimata-asamblejas.a569637/"
- ]
-
- articles = []
-
- for url in urls:
- article = scraper.scrape_article(url)
- articles.append(article)
- print(f"Scraped: {article.title}")
- print(f"Content length: {len(article.content)}")
- print(f"Date: {article.date}")
- print("---")
- # Save to JSON
- output_file = 'scraped_articles.json'
+ # Read URLs from the all_urls.json file
+ with open('data/all_urls.json', 'r', encoding='utf-8') as f:
+ urls = json.load(f)
+
+ # Create a partial function with the scraper object
+ scrape_func = partial(scrape_single_article, scraper)
+
+ # Get the number of CPU cores
+ num_cores = multiprocessing.cpu_count()
+
+ # Create a multiprocessing pool
+ with multiprocessing.Pool(processes=4) as pool:
+ # Map the scraping function to the URLs in parallel
+ results = pool.map(scrape_func, urls)
+
+ # Separate the results into scraped articles and failed URLs
+ scraped_articles = [article for article, _ in results if article]
+ failed_urls = [url for _, url in results if url]
+
+ # Save successfully scraped articles to JSON
+ output_file = 'data/scraped_articles_2.json'
with open(output_file, 'w', encoding='utf-8') as f:
- json.dump([article.dict() for article in articles], f, ensure_ascii=False, indent=4)
+ json.dump([article.dict() for article in scraped_articles], f, ensure_ascii=False, indent=4)
+
+ print(f"\nSuccessfully scraped articles saved to {output_file}")
+
+ # Save failed URLs to a separate file
+ failed_file = 'data/failed_urls_2.json'
+ with open(failed_file, 'w', encoding='utf-8') as f:
+ json.dump(failed_urls, f, ensure_ascii=False, indent=4)
- print(f"\nArticles saved to {output_file}")
\ No newline at end of file
+ print(f"Failed URLs saved to {failed_file}")
\ No newline at end of file
diff --git a/climateguard/pipeline.py b/climateguard/pipeline.py
new file mode 100644
index 0000000..3df135b
--- /dev/null
+++ b/climateguard/pipeline.py
@@ -0,0 +1,97 @@
+from gdelt_scrapper import GDELTScrapper
+from news_scrapper import NewsScraper
+from pathlib import Path
+import json
+import multiprocessing
+from functools import partial
+
+class Pipeline:
+ def __init__(self):
+ self.gdelt_scraper = GDELTScrapper()
+ self.news_scraper = NewsScraper()
+
+ def run(self, keyword: str, years: list[int], output_dir: Path):
+ # Step 1: Find themes related to the keyword
+ themes = self.gdelt_scraper.find_themes_related_to_keyword(keyword)
+ print(f"Themes related to {keyword}: {themes}")
+
+ # Step 2: Find articles for these themes and years
+ articles_df = self.gdelt_scraper.find_articles(themes=themes, years=years)
+
+ # Step 3: Extract URLs from the DataFrame
+ urls = articles_df["url"].tolist()
+
+ # Save the list of URLs to a separate file
+ self._save_urls(urls, output_dir)
+
+ # Step 4: Scrape each URL using multiprocessing
+ scraped_articles, failed_urls = self._scrape_urls_parallel(urls)
+
+ # Step 5: Save results
+ self._save_results(scraped_articles, failed_urls, output_dir)
+
+ def _save_urls(self, urls: list, output_dir: Path):
+ output_dir.mkdir(parents=True, exist_ok=True)
+ urls_file = output_dir / 'all_urls.json'
+ with open(urls_file, 'w', encoding='utf-8') as f:
+ json.dump(urls, f, ensure_ascii=False, indent=4)
+ print(f"All URLs saved to {urls_file}")
+
+ def _scrape_urls_parallel(self, urls):
+ # Create a partial function with self.news_scraper
+ scrape_func = partial(self._scrape_single_url, news_scraper=self.news_scraper)
+
+ # Use all available cores
+ num_cores = multiprocessing.cpu_count()
+
+ # Create a multiprocessing pool
+ with multiprocessing.Pool(num_cores) as pool:
+ results = pool.map(scrape_func, urls)
+
+ # Process results
+ scraped_articles = []
+ failed_urls = []
+ for result in results:
+ if result['success']:
+ article = result['article']
+ scraped_articles.append(article)
+ print(f"Scraped: {article.title}")
+ print(f"Content length: {len(article.content)}")
+ print(f"Date: {article.date}")
+ print("---")
+ else:
+ failed_urls.append(result['url'])
+ print(f"Failed to scrape: {result['url']}")
+ print("---")
+
+ return scraped_articles, failed_urls
+
+ @staticmethod
+ def _scrape_single_url(url, news_scraper):
+ article = news_scraper.scrape_article(url)
+ if article:
+ return {'success': True, 'article': article}
+ else:
+ return {'success': False, 'url': url}
+
+ def _save_results(self, scraped_articles, failed_urls, output_dir):
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ # Save successfully scraped articles to JSON
+ output_file = output_dir / 'scraped_articles.json'
+ with open(output_file, 'w', encoding='utf-8') as f:
+ json.dump([article.dict() for article in scraped_articles], f, ensure_ascii=False, indent=4)
+
+ print(f"\nSuccessfully scraped articles saved to {output_file}")
+
+ # Save failed URLs to a separate file
+ failed_file = output_dir / 'failed_urls.json'
+ with open(failed_file, 'w', encoding='utf-8') as f:
+ json.dump(failed_urls, f, ensure_ascii=False, indent=4)
+
+ print(f"Failed URLs saved to {failed_file}")
+
+if __name__ == "__main__":
+ pipeline = Pipeline()
+ output_dir = Path(__file__).parent.parent / "data"
+ pipeline.run(keyword="CLIMATE", years=[2022, 2023, 2024], output_dir=output_dir)
diff --git a/climateguard/test.py b/climateguard/test.py
new file mode 100644
index 0000000..9a6fefd
--- /dev/null
+++ b/climateguard/test.py
@@ -0,0 +1,58 @@
+import requests
+from bs4 import BeautifulSoup
+from dataclasses import dataclass
+from datetime import datetime
+import re
+
+@dataclass
+class Article:
+ title: str
+ content: str
+ url: str
+ date: str
+ topic: str
+ source: str
+
+class VentasbalsScraper:
+ def _scrape_presidentlv(self, url):
+ print(url)
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
+ }
+ response = requests.get(url, headers=headers)
+ soup = BeautifulSoup(response.content, 'html.parser')
+
+ # Extracting title
+ title = soup.find("h1").text.strip() if soup.find("h1") else ""
+
+ # Extracting content
+ content = " ".join([p.text for p in soup.find_all("p")])
+
+ # Extracting topic (if available)
+ topic = (
+ soup.find("meta", {"property": "og:title"})["content"]
+ if soup.find("meta", {"property": "og:title"})
+ else ""
+ )
+
+ # Extracting date (if available)
+ date = (
+ soup.find("time", {"class": "entry-date published"}).text.strip()
+ if soup.find("time", {"class": "entry-date published"})
+ else ""
+ )
+
+ return Article(title=title, content=content, url=url, date=date, topic=topic, source=url)
+
+# Usage example
+if __name__ == "__main__":
+ scraper = VentasbalsScraper()
+ url = "https://www.president.lv/lv/jaunums/valsts-prezidents-18-novembri-rigas-pili-pasniedz-augstakos-latvijas-valsts-apbalvojumus-90-izcilam-personibam"
+ article = scraper._scrape_presidentlv(url)
+ if article:
+ print(f"Title: {article.title}")
+ print(f"Date: {article.date}")
+ print(f"Topic: {article.topic}")
+ print(f"Content preview: {article.content[:200]}...")
+ else:
+ print("Failed to scrape the article.")
diff --git a/notebooks/latvia/scrape GDELT api.ipynb b/notebooks/latvia/scrape GDELT api.ipynb
new file mode 100644
index 0000000..7e64037
--- /dev/null
+++ b/notebooks/latvia/scrape GDELT api.ipynb
@@ -0,0 +1,456 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "id": "C2H7hlVkt_ep"
+ },
+ "outputs": [],
+ "source": [
+ "from urllib.request import urlopen\n",
+ "import pandas as pd\n",
+ "from gdeltdoc import GdeltDoc, Filters"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Get list off all GDELT themes\n",
+ "We need the list of predefined themes to be able to filter"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " theme | \n",
+ " count | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " TAX_FNCACT | \n",
+ " 999601552 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " TAX_ETHNICITY | \n",
+ " 410780218 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " EPU_POLICY | \n",
+ " 384818230 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " CRISISLEX_CRISISLEXREC | \n",
+ " 373229208 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " TAX_WORLDLANGUAGES | \n",
+ " 348186680 | \n",
+ "
\n",
+ " \n",
+ " ... | \n",
+ " ... | \n",
+ " ... | \n",
+ "
\n",
+ " \n",
+ " 59310 | \n",
+ " TAX_WORLDLANGUAGES_PUNAPA | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 59311 | \n",
+ " TAX_WORLDBIRDS_SWALLOWTAILED_HUMMINGBIRDS | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 59312 | \n",
+ " TAX_WORLDMAMMALS_PACIFIC_DEGU | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 59313 | \n",
+ " TAX_WORLDBIRDS_FLAMECRESTED_TANAGER | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 59314 | \n",
+ " TAX_WORLDLANGUAGES_BOROAS | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
59315 rows × 2 columns
\n",
+ "
"
+ ],
+ "text/plain": [
+ " theme count\n",
+ "0 TAX_FNCACT 999601552\n",
+ "1 TAX_ETHNICITY 410780218\n",
+ "2 EPU_POLICY 384818230\n",
+ "3 CRISISLEX_CRISISLEXREC 373229208\n",
+ "4 TAX_WORLDLANGUAGES 348186680\n",
+ "... ... ...\n",
+ "59310 TAX_WORLDLANGUAGES_PUNAPA 1\n",
+ "59311 TAX_WORLDBIRDS_SWALLOWTAILED_HUMMINGBIRDS 1\n",
+ "59312 TAX_WORLDMAMMALS_PACIFIC_DEGU 1\n",
+ "59313 TAX_WORLDBIRDS_FLAMECRESTED_TANAGER 1\n",
+ "59314 TAX_WORLDLANGUAGES_BOROAS 1\n",
+ "\n",
+ "[59315 rows x 2 columns]"
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "THEMES_URL = \"http://data.gdeltproject.org/api/v2/guides/LOOKUP-GKGTHEMES.TXT\"\n",
+ "\n",
+ "\n",
+ "def get_themes(url: str) -> pd.DataFrame: \n",
+ " # Fetch the content using urllib\n",
+ " with urlopen(url) as response:\n",
+ " data = response.read().decode()\n",
+ " \n",
+ " # Split the data into lines\n",
+ " lines = data.strip().split(\"\\n\")\n",
+ " \n",
+ " # Split each line into key-value pairs\n",
+ " rows = [line.split(\"\\t\") for line in lines]\n",
+ " \n",
+ " # Create a DataFrame from the rows\n",
+ " df = pd.DataFrame(rows, columns=['theme', 'count'])\n",
+ " df['count'] = df['count'].astype(int)\n",
+ " \n",
+ " return df\n",
+ "\n",
+ "def get_climate_themes(themes_df) -> list[str] : \n",
+ " return themes_df[themes_df[\"theme\"].str.contains(\"CLIMATE\")][\"theme\"].to_list()\n",
+ "\n",
+ "themes_df = get_themes(THEMES_URL)\n",
+ "themes_df"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "['WB_405_BUSINESS_CLIMATE',\n",
+ " 'WB_567_CLIMATE_CHANGE',\n",
+ " 'ENV_CLIMATECHANGE',\n",
+ " 'UNGP_CLIMATE_CHANGE_ACTION',\n",
+ " 'WB_1949_CLIMATE_SMART_AGRICULTURE',\n",
+ " 'WB_568_CLIMATE_SERVICES',\n",
+ " 'WB_579_CLIMATE_CHANGE_MITIGATION',\n",
+ " 'WB_571_CLIMATE_SCIENCE',\n",
+ " 'WB_1841_SHORT_LIVED_CLIMATE_POLLUTANTS',\n",
+ " 'WB_1844_MARKET_BASED_CLIMATE_CHANGE_MITIGATION',\n",
+ " 'WB_1773_CLIMATE_CHANGE_IMPACTS',\n",
+ " 'WB_1847_CLIMATE_FINANCE',\n",
+ " 'WB_574_CLIMATE_CHANGE_ADAPTATION',\n",
+ " 'WB_959_CLIMATE_CHANGE_LAW',\n",
+ " 'WB_747_SOCIAL_RESILIENCE_AND_CLIMATE_CHANGE',\n",
+ " 'WB_1774_CLIMATE_FORECASTING',\n",
+ " 'WB_2673_JOBS_AND_CLIMATE_CHANGE',\n",
+ " 'TAX_AIDGROUPS_CLIMATE_ACTION_NETWORK',\n",
+ " 'WB_572_CLIMATE_RESILIENT_DEVELOPMENT',\n",
+ " 'WB_2639_CLIMATE_EFFICIENT_INDUSTRIES',\n",
+ " 'WB_573_CLIMATE_RISK_MANAGEMENT',\n",
+ " 'WB_1849_PUBLIC_CLIMATE_FINANCE',\n",
+ " 'WB_1838_CLIMATE_RISK_SCREENING',\n",
+ " 'WB_1850_PRIVATE_CLIMATE_FINANCE',\n",
+ " 'WB_1839_OZONE_LAYER_DEPLETION_AND_CLIMATE_CHANGE',\n",
+ " 'WB_575_COMMUNITY_BASED_CLIMATE_ADAPTATION',\n",
+ " 'WB_1750_CLIMATE_CHANGE_ADAPTATION_IMPACTS']"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "climate_themes = get_climate_themes(themes_df)\n",
+ "climate_themes"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Scrape gdlet api fo latvian climate articles\n",
+ "We will use this wrapper around the GDELT api : https://github.com/alex9smith/gdelt-doc-api"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "250 articles found for theme WB_405_BUSINESS_CLIMATE, in 2022\n",
+ "250 articles found for theme WB_405_BUSINESS_CLIMATE, in 2023\n",
+ "250 articles found for theme WB_405_BUSINESS_CLIMATE, in 2024\n",
+ "250 articles found for theme WB_567_CLIMATE_CHANGE, in 2022\n",
+ "205 articles found for theme WB_567_CLIMATE_CHANGE, in 2023\n",
+ "250 articles found for theme WB_567_CLIMATE_CHANGE, in 2024\n",
+ "250 articles found for theme ENV_CLIMATECHANGE, in 2022\n",
+ "151 articles found for theme ENV_CLIMATECHANGE, in 2023\n",
+ "150 articles found for theme ENV_CLIMATECHANGE, in 2024\n",
+ "250 articles found for theme UNGP_CLIMATE_CHANGE_ACTION, in 2022\n",
+ "156 articles found for theme UNGP_CLIMATE_CHANGE_ACTION, in 2023\n",
+ "159 articles found for theme UNGP_CLIMATE_CHANGE_ACTION, in 2024\n",
+ "149 articles found for theme WB_1949_CLIMATE_SMART_AGRICULTURE, in 2022\n",
+ "73 articles found for theme WB_1949_CLIMATE_SMART_AGRICULTURE, in 2023\n",
+ "78 articles found for theme WB_1949_CLIMATE_SMART_AGRICULTURE, in 2024\n",
+ "126 articles found for theme WB_568_CLIMATE_SERVICES, in 2022\n",
+ "70 articles found for theme WB_568_CLIMATE_SERVICES, in 2023\n",
+ "124 articles found for theme WB_568_CLIMATE_SERVICES, in 2024\n",
+ "26 articles found for theme WB_579_CLIMATE_CHANGE_MITIGATION, in 2022\n",
+ "9 articles found for theme WB_579_CLIMATE_CHANGE_MITIGATION, in 2023\n",
+ "7 articles found for theme WB_579_CLIMATE_CHANGE_MITIGATION, in 2024\n",
+ "7 articles found for theme WB_571_CLIMATE_SCIENCE, in 2022\n",
+ "14 articles found for theme WB_571_CLIMATE_SCIENCE, in 2023\n",
+ "6 articles found for theme WB_571_CLIMATE_SCIENCE, in 2024\n",
+ "10 articles found for theme WB_1841_SHORT_LIVED_CLIMATE_POLLUTANTS, in 2022\n",
+ "4 articles found for theme WB_1841_SHORT_LIVED_CLIMATE_POLLUTANTS, in 2023\n",
+ "3 articles found for theme WB_1841_SHORT_LIVED_CLIMATE_POLLUTANTS, in 2024\n",
+ "7 articles found for theme WB_1844_MARKET_BASED_CLIMATE_CHANGE_MITIGATION, in 2022\n",
+ "2 articles found for theme WB_1844_MARKET_BASED_CLIMATE_CHANGE_MITIGATION, in 2023\n",
+ "0 articles found for theme WB_1844_MARKET_BASED_CLIMATE_CHANGE_MITIGATION, in 2024\n",
+ "6 articles found for theme WB_1773_CLIMATE_CHANGE_IMPACTS, in 2022\n",
+ "12 articles found for theme WB_1773_CLIMATE_CHANGE_IMPACTS, in 2023\n",
+ "5 articles found for theme WB_1773_CLIMATE_CHANGE_IMPACTS, in 2024\n",
+ "4 articles found for theme WB_1847_CLIMATE_FINANCE, in 2022\n",
+ "1 articles found for theme WB_1847_CLIMATE_FINANCE, in 2023\n",
+ "0 articles found for theme WB_1847_CLIMATE_FINANCE, in 2024\n",
+ "5 articles found for theme WB_574_CLIMATE_CHANGE_ADAPTATION, in 2022\n",
+ "2 articles found for theme WB_574_CLIMATE_CHANGE_ADAPTATION, in 2023\n",
+ "0 articles found for theme WB_574_CLIMATE_CHANGE_ADAPTATION, in 2024\n",
+ "0 articles found for theme WB_959_CLIMATE_CHANGE_LAW, in 2022\n",
+ "2 articles found for theme WB_959_CLIMATE_CHANGE_LAW, in 2023\n",
+ "0 articles found for theme WB_959_CLIMATE_CHANGE_LAW, in 2024\n",
+ "1 articles found for theme WB_747_SOCIAL_RESILIENCE_AND_CLIMATE_CHANGE, in 2022\n",
+ "0 articles found for theme WB_747_SOCIAL_RESILIENCE_AND_CLIMATE_CHANGE, in 2023\n",
+ "1 articles found for theme WB_747_SOCIAL_RESILIENCE_AND_CLIMATE_CHANGE, in 2024\n",
+ "0 articles found for theme WB_1774_CLIMATE_FORECASTING, in 2022\n",
+ "1 articles found for theme WB_1774_CLIMATE_FORECASTING, in 2023\n",
+ "0 articles found for theme WB_1774_CLIMATE_FORECASTING, in 2024\n",
+ "2 articles found for theme WB_2673_JOBS_AND_CLIMATE_CHANGE, in 2022\n",
+ "0 articles found for theme WB_2673_JOBS_AND_CLIMATE_CHANGE, in 2023\n",
+ "0 articles found for theme WB_2673_JOBS_AND_CLIMATE_CHANGE, in 2024\n",
+ "2 articles found for theme TAX_AIDGROUPS_CLIMATE_ACTION_NETWORK, in 2022\n",
+ "0 articles found for theme TAX_AIDGROUPS_CLIMATE_ACTION_NETWORK, in 2023\n",
+ "0 articles found for theme TAX_AIDGROUPS_CLIMATE_ACTION_NETWORK, in 2024\n",
+ "0 articles found for theme WB_572_CLIMATE_RESILIENT_DEVELOPMENT, in 2022\n",
+ "0 articles found for theme WB_572_CLIMATE_RESILIENT_DEVELOPMENT, in 2023\n",
+ "0 articles found for theme WB_572_CLIMATE_RESILIENT_DEVELOPMENT, in 2024\n",
+ "0 articles found for theme WB_2639_CLIMATE_EFFICIENT_INDUSTRIES, in 2022\n",
+ "0 articles found for theme WB_2639_CLIMATE_EFFICIENT_INDUSTRIES, in 2023\n",
+ "0 articles found for theme WB_2639_CLIMATE_EFFICIENT_INDUSTRIES, in 2024\n",
+ "0 articles found for theme WB_573_CLIMATE_RISK_MANAGEMENT, in 2022\n",
+ "0 articles found for theme WB_573_CLIMATE_RISK_MANAGEMENT, in 2023\n",
+ "0 articles found for theme WB_573_CLIMATE_RISK_MANAGEMENT, in 2024\n",
+ "2 articles found for theme WB_1849_PUBLIC_CLIMATE_FINANCE, in 2022\n",
+ "0 articles found for theme WB_1849_PUBLIC_CLIMATE_FINANCE, in 2023\n",
+ "0 articles found for theme WB_1849_PUBLIC_CLIMATE_FINANCE, in 2024\n",
+ "0 articles found for theme WB_1838_CLIMATE_RISK_SCREENING, in 2022\n",
+ "0 articles found for theme WB_1838_CLIMATE_RISK_SCREENING, in 2023\n",
+ "0 articles found for theme WB_1838_CLIMATE_RISK_SCREENING, in 2024\n",
+ "0 articles found for theme WB_1850_PRIVATE_CLIMATE_FINANCE, in 2022\n",
+ "0 articles found for theme WB_1850_PRIVATE_CLIMATE_FINANCE, in 2023\n",
+ "0 articles found for theme WB_1850_PRIVATE_CLIMATE_FINANCE, in 2024\n",
+ "0 articles found for theme WB_1839_OZONE_LAYER_DEPLETION_AND_CLIMATE_CHANGE, in 2022\n",
+ "0 articles found for theme WB_1839_OZONE_LAYER_DEPLETION_AND_CLIMATE_CHANGE, in 2023\n",
+ "0 articles found for theme WB_1839_OZONE_LAYER_DEPLETION_AND_CLIMATE_CHANGE, in 2024\n",
+ "0 articles found for theme WB_575_COMMUNITY_BASED_CLIMATE_ADAPTATION, in 2022\n",
+ "0 articles found for theme WB_575_COMMUNITY_BASED_CLIMATE_ADAPTATION, in 2023\n",
+ "0 articles found for theme WB_575_COMMUNITY_BASED_CLIMATE_ADAPTATION, in 2024\n",
+ "0 articles found for theme WB_1750_CLIMATE_CHANGE_ADAPTATION_IMPACTS, in 2022\n",
+ "0 articles found for theme WB_1750_CLIMATE_CHANGE_ADAPTATION_IMPACTS, in 2023\n",
+ "0 articles found for theme WB_1750_CLIMATE_CHANGE_ADAPTATION_IMPACTS, in 2024\n"
+ ]
+ }
+ ],
+ "source": [
+ "partial_articles_dfs = []\n",
+ "\n",
+ "for theme in climate_themes: \n",
+ " for year in [2022, 2023, 2024]: \n",
+ " f = Filters(\n",
+ " #keyword = \"climate change\",\n",
+ " start_date = f\"{year}-01-01\",\n",
+ " end_date = f\"{year}-12-31\", \n",
+ " theme = theme, \n",
+ " country = \"LG\", \n",
+ " )\n",
+ " \n",
+ " gd = GdeltDoc()\n",
+ " \n",
+ " # Search for articles matching the filters\n",
+ " partial_articles_df = gd.article_search(f)\n",
+ " print(f\"{len(partial_articles_df)} articles found for theme {theme}, in {year}\")\n",
+ " if partial_articles_df.empty: \n",
+ " continue\n",
+ " partial_articles_dfs.append(partial_articles_df)\n",
+ "\n",
+ "articles_df = pd.concat(partial_articles_dfs)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Deleting 1191 duplicates\n",
+ "1683 unique articles found\n"
+ ]
+ }
+ ],
+ "source": [
+ "articles_df = articles_df[articles_df[\"language\"] == \"Latvian\"]\n",
+ "articles_df[\"seendate\"] = pd.to_datetime(articles_df[\"seendate\"])\n",
+ "\n",
+ "print(f\"Deleting {articles_df[\"url\"].duplicated().sum()} duplicates\")\n",
+ "articles_df = articles_df.drop_duplicates(\"url\")\n",
+ "print(f\"{len(articles_df)} unique articles found\")\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "domain\n",
+ "nra.lv 459\n",
+ "lsm.lv 388\n",
+ "delfi.lv 351\n",
+ "la.lv 225\n",
+ "diena.lv 65\n",
+ "reitingi.lv 60\n",
+ "ogrenet.lv 30\n",
+ "bnn.lv 20\n",
+ "tvnet.lv 19\n",
+ "ventasbalss.lv 19\n",
+ "ir.lv 17\n",
+ "mfa.gov.lv 13\n",
+ "ntz.lv 5\n",
+ "president.lv 5\n",
+ "latgaleslaiks.lv 3\n",
+ "vm.gov.lv 2\n",
+ "220.lv 1\n",
+ "brivalatvija.lv 1\n",
+ "Name: count, dtype: int64"
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "articles_df[\"domain\"].value_counts()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "articles_df.to_csv(\"../data/latvian_article_links.csv\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "colab": {
+ "provenance": []
+ },
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/pyproject.toml b/pyproject.toml
index edcdbb8..9c093e0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,6 +15,10 @@ dependencies = [
"ipykernel>=6.29.5",
"newspaper3k>=0.2.8",
"lxml[html-clean]>=5.3.0",
+ "gdeltdoc>=1.5.0",
+ "newspaper3k>=0.2.8",
+ "lxml-html-clean>=0.2.2",
+ "jupyter>=1.1.1",
]
requires-python = ">=3.12"
diff --git a/scraped_articles.json b/scraped_articles.json
deleted file mode 100644
index 6540fba..0000000
--- a/scraped_articles.json
+++ /dev/null
@@ -1,42 +0,0 @@
-[
- {
- "title": "Ziņojums: Līdz gadsimta beigām Latvijā prognozēta krasta līnijas atkāpšanās par 47–72 metriem",
- "content": "Piektdiena, 4. oktobris \nVārda dienas: Modra, Francis, Dmitrijs Vairāk Līdzšinējo datu analīze norāda uz relatīvi nelielu jūras līmeņa pieaugumu – salīdzinot 1961.–1990. un 1991.–2020. gada klimatiskās normas periodu vidējās vērtības, jūras līmenis Latvijā ir kāpis par aptuveni pusotru centimetru. Ņemot vērā gaidāmo klimata pārmaiņu paātrināšanos 21. gadsimtā, paredzams, ka jūras līmeņa izmaiņas kļūs straujākas. Prognozēts, ka vidējais jūras līmenis Latvijas teritorijā līdz 2100. gadam varētu kāpt par 32,6 centimetriem nelielu klimata pārmaiņu gadījumā un par 50,3 centimetriem būtisku pārmaiņu gadījumā. Visi klimata pārmaiņu modeļi norāda uz krasta līnijas atkāpšanos, atšķirīgs ir tikai prognozētais atkāpšanās apjoms. Dažādās Latvijas vietās paredzamas dažāda mēroga krasta izmaiņas, piemēram, pie būtiskām klimata pārmaiņām Saulkrastu un Mazirbes pusē prognozēta krasta līnijas atkāpšanās pat par 113 metriem, savukārt pie Klapkalnciema – par 40 metriem. Krasta līnijas izmaiņas ietekmē piekrastes zemūdens nogāzes un pludmales stāvums, tādēļ vietās, kur vēsturiski novērota erozija, ne vienmēr prognozētas lielākās izmaiņas nākotnē. Piemēram, Jūrkalnes apkārtnē, ņemot vērā stāvo nogāzes leņķi un pludmales stāvkrastu, krasta līnijas izmaiņas nākotnē varētu nepārsniegt 21–33 metrus. Atbilstoši nelielām klimata pārmaiņām 2100. gadā Latvijas piekraste varētu atkāpties par vidēji 47 metriem, atbilstoši vidējām klimata pārmaiņām – par 61 metru, savukārt būtisku klimata pārmaiņu gadījumā – par 72 metriem. Ziņojuma autori Viesturs Zandersons, Valters Žeizis, Jānis Lapinskis un Andris Vīksna uzsver, ka nākotnes prognozes uzskatāmas par indikatīvām. Precīzāku prognožu izveidei nepieciešami plašāki hidrodinamiskie sanešu plūsmu modeļi, kas spētu labāk raksturot pludmaļu attīstību, ņemot vērā ietekmi no tādiem mainīgiem faktoriem kā ūdens līmenis, sanešu plūsmas, viļņi, piekrastes veģetācija un arī dažādi cilvēku īstenoti krasta aizsardzības pasākumi. Laika posmā no 2017. līdz 2022. gadam vairāk nekā 55% Latvijas piekrastes – 270 kilometru garumā – novērota krasta atkāpšanās ar vidējo atkāpšanās ātrumu 1,46 metri gadā. Atlikušajā piekrastē, vairāk nekā 220 kilometros, novērota krasta uzskalošana ar vidējo ātrumu 1,41 metri gadā. Lielākajā daļā Latvijas pludmaļu vērojamas nelielas krasta izmaiņas – mazākas par diviem metriem gadā. Īpaši straujas izmaiņas, kas pārsniedz piecus metrus gadā, notiek tikai 13 kilometru garumā, septiņos no tiem vērojama strauja krasta atkāpšanās, savukārt sešu kilometru garumā notiek strauja krasta uzvirzīšanās. Jaunākie dati par piekrastes izmaiņām pieejami klimata pārmaiņu analīzes rīkā šeit. Iezīmējiet tekstu un spiediet Ctrl+Enter, lai nosūtītu labojamo teksta fragmentu redaktoram! Iezīmējiet tekstu un spiediet uz Ziņot par kļūdu pogas, lai nosūtītu labojamo teksta fragmentu redaktoram! Vairāk \nVairāk\n \n © 2024, Jebkādu materiālu pilnīga vai daļēja izmantošana atļauta tikai ar lsm.lv redakcijas atļauju. ",
- "url": "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/03.10.2024-zinojums-lidz-gadsimta-beigam-latvija-prognozeta-krasta-linijas-atkapsanas-par-47-72-metriem.a571093/",
- "date": "",
- "topic": "",
- "source": "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/03.10.2024-zinojums-lidz-gadsimta-beigam-latvija-prognozeta-krasta-linijas-atkapsanas-par-47-72-metriem.a571093/"
- },
- {
- "title": "Kūtri izmantojam dzeramā ūdens krānus. Kāpēc iedzīvotājiem trūkst pamudinājuma dzīvot \"zaļāk\"?",
- "content": "Pieaugot klimata pārmaiņu radītajām sekām, organizācijas Eiropas Savienībā (ES) un arī Latvijā arvien vairāk domā, kā šīs sekas mazināt. Lai gan Latvijas iedzīvotāji ir labi informēti par klimata pārmaiņu cēloņiem, viņiem trūkst zināšanu tieši par praktiskiem risinājumiem to radīto seku mazināšanā. Tomēr arī tad, kad risinājumi ir zināmi, rīcība klimata pārmaiņu seku mazināšanai atpaliek – cilvēki salīdzinoši kūtri izmanto, piemēram, tādus pašvaldībās pieejamos resursus kā dzeramā ūdens uzpildes vietas.\n\n\"Delfi Bizness\" skaidro, kādas ir Latvijas iedzīvotāju zināšanas par klimata pārmaiņām un kāpēc tās ir vienas no zemākajām ES.",
- "url": "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/zinam-problemu-un-neizmantojam-risinajumus-ko-latvijas-iedzivotaji-doma-par-klimata-parmainam",
- "date": "",
- "topic": "",
- "source": "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/zinam-problemu-un-neizmantojam-risinajumus-ko-latvijas-iedzivotaji-doma-par-klimata-parmainam"
- },
- {
- "title": "Kūtri izmantojam dzeramā ūdens krānus. Kāpēc iedzīvotājiem trūkst pamudinājuma dzīvot \"zaļāk\"?",
- "content": "Pieaugot klimata pārmaiņu radītajām sekām, organizācijas Eiropas Savienībā (ES) un arī Latvijā arvien vairāk domā, kā šīs sekas mazināt. Lai gan Latvijas iedzīvotāji ir labi informēti par klimata pārmaiņu cēloņiem, viņiem trūkst zināšanu tieši par praktiskiem risinājumiem to radīto seku mazināšanā. Tomēr arī tad, kad risinājumi ir zināmi, rīcība klimata pārmaiņu seku mazināšanai atpaliek – cilvēki salīdzinoši kūtri izmanto, piemēram, tādus pašvaldībās pieejamos resursus kā dzeramā ūdens uzpildes vietas.\n\n\"Delfi Bizness\" skaidro, kādas ir Latvijas iedzīvotāju zināšanas par klimata pārmaiņām un kāpēc tās ir vienas no zemākajām ES.",
- "url": "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/kutri-izmantojam-dzerama-udens-kranus-kapec-iedzivotajiem-trukst-pamudinajuma-dzivot-zalak",
- "date": "",
- "topic": "",
- "source": "https://www.delfi.lv/bizness/56234200/eiropas-zinas/120042670/kutri-izmantojam-dzerama-udens-kranus-kapec-iedzivotajiem-trukst-pamudinajuma-dzivot-zalak"
- },
- {
- "title": "",
- "content": "",
- "url": "https://nra.lv/pasaule/465572-sliktas-zinas-baltvina-cienitajiem.htm",
- "date": "",
- "topic": "",
- "source": "https://nra.lv/pasaule/465572-sliktas-zinas-baltvina-cienitajiem.htm"
- },
- {
- "title": "Par zaļāku Rīgu spriedīs piecas sestdienas. Ko sagaida no pirmās iedzīvotāju klimata asamblejas",
- "content": "Organizatori uzskata, ka šī metode ir ļoti piemērota tādu jautājumu risināšanā, kas ir sarežģīti, nonākuši politiskā strupceļā vai polarizē sabiedrību, kādi ir ar klimatu un vides problēmu risināšanu saistītie jautājumi. Asamblejas laikā dalībnieki izstrādās konkrētus priekšlikumus Rīgas pilsētvides zaļināšanas plānam. Ko varam sagaidīt no Latvijā pirmās iedzīvotāju asamblejas, kā tā noritēs un kādus jautājumus risinās?\n\nSanita Rībena: Ingrīda, es saprotu, ka ideja par iedzīvotāju klimata asambleju nāk no \"Zaļās brīvības\". Kā tā radās un kā jūs nonācāt līdz tam, ka tas patiešām notiks?\n\n\n\nIngrīda Strazdiņa: Hronoloģija droši vien ir saistīta ar to, ka mēs esam \"ļoti iekšā\" klimata tēmās. Mēs ļoti daudz par to runājam, izglītojam sabiedrību, bet arvien biežāk mums bija sajūta, ka vajag sasaistīt šīs divas puses kopā – pašvaldību un cilvēkus. Mēs iesaistījāmies projektā, kur izstrādā vajadzīgo metodoloģiju, kā klimatu asambleju rīkot, un sākām to organizēt. Esam vieni no retajiem, kas iniciējuši klimata asambleju, parasti to rīko pašvaldības vai valsts pārvaldes institūcijas. Mēs esam mazliet celmlauži. Taču esam arī ļoti priecīgi, ka pašvaldība ir ar mums kopā, ka sadarbojas ar mums.\n\n\n\nSanita Rībena: Aktīvisti visu ko var lemt savā nodabā un izspriest savās virtuvēs, bet bieži vien pašvaldības un valsts institūcijas ir tās, kas daudzas lietas padara reālas, jo tām ir vara lemt. Kā jūs par to uzzinājāt? Un kas bija jūsu galvenā motivācija?\n\n\n\nJānis Ušča: Mūs uzrunāja Ingrīda ar kolēģiem no \"Zaļās brīvības\". Tā mums ir aktuāla tēma, jo tieši šobrīd strādājam pie Rīgas zaļināšanas plāna.\n\nIngrīda Strazdiņa un Jānis Ušča. Foto: Latvijas Radio\n\nIngrīda, vai tu varētu mazliet izstāstīt par metodes būtību? Ir skaidrs, ka ir nianses katrā valstī, bet kaut kas tomēr strukturāli ir nemainīgs.\n\nIngrīda Strazdiņa: Strukturāli nemainīgs ir tas, ka asambleja aicina kopā cilvēkus, kuri nav eksperti. Mēs esam pieraduši, ka par zaļām tēmām, par zaļināšanu vai par klimata pārmaiņu mazināšanu runā eksperti. Šinī gadījumā tie būs 35 cilvēki – visdažādākā vecumā, ar visdažādāko izglītību – tāda mini Rīgas publika. Vēl ir būtiski, ka tas nebūs vienas dienas pasākums. Rīgas klimata asambleja notiks piecas sestdienas. Sākumā dažādi eksperti viņiem palīdzēs, sniegs nepieciešamās zināšanas. 21. septembris ir pirmā reize, un tad katra otrā sestdiena līdz 23. novembrim.\n\nAktivitātes ir ieplānotas tā, lai cilvēki gan mācītos, gan sadraudzētos.\n\nIr iecerēts, ka piecu sestdienu laikā viņi sagatavos rekomendācijas, kuras pēc tam Rīgas dome ņems vērā, izstrādājot pilsētas zaļināšanas plānu.\n\n\n\nZinu, ka jūs esat izvēlējušies četras tēmas, uz ko fokusēties, lai tā nebūtu tikai tāda papļāpāšana, bet lai tas būtu patiešām lietderīgi. Varbūt jūs varat īsumā ieskicēt, kas tās ir par tēmām un kāpēc tieši tādas izvēlējāties?\n\n\n\nJānis Ušča: Te droši vien ir jāskatās uz zaļināšanas jautājumu kā tādu, ko mēs ar to saprotam. Primāri mēs skatāmies tieši klimata pārmaiņu kontekstā. Mērķis mums ir sistemātiski attīstīt zaļo infrastruktūru pilsētā, it īpaši koncentrējoties uz dabā balstītiem risinājumiem, kas reizē pilda arī kādas sociālas, ekonomiskas un arī ekoloģiskas funkcijas. Piemēram, mēs varam runāt par dažādiem ilgtspējīgiem lietusūdens risinājumiem.\n\nPavisam nesen pēc lielajām lietavām, manuprāt, reti kurš rīdzinieks nepamanīja, ka vietām mēs kļūstam par tādu Venēciju.\n\nJānis Ušča: Galvenais, to var arī risināt ar daļēji dabiskiem risinājumiem. Tas ir centrālais, par ko mēs šeit runājam – ir tāda tipa risinājumi, bet ne tikai saistībā ar lietus ūdeni.\n\nJa skatāmies nedaudz plašāk, tad esam definējuši četrus virzienus. Šajā plānā numur viens ir lietus ūdens risinājumi, numur divi – karstumsalas mazināšana, numur trīs ir bioloģiskās daudzveidības veicināšana un saglabāšana, un numur četri ir zaļo teritoriju pieejamība iedzīvotājiem. Ja mēs skatāmies uz pirmajiem trim, kurus es nosaucu, tie visi ir saistīti arī ar klimata pārmaiņām. Tā kā šis daļēji ir arī klimata adaptācijas plāns, kur mēs varam risināt kaut kādas problēmas, ar kurām mēs nākotnē varam saskarties arvien biežāk.\n\nPirms dažiem gadiem iedzīvotāju klimata asambleja pirmoreiz notika arī Francijā. Toreiz [prezidents Emanuels] Makrons ierosināja, ka kaut ko tādu vajadzētu. Francijā iedzīvotājiem bija pieejami dažādi konsultanti, kas viņus izglītoja, kā arī sniedza atbalstu, jo skaidrs, ka par jebkuru tēmu, lai varētu sniegt kaut cik jēdzīgas un vērtīgas rekomendācijas, vismaz mazliet tomēr ir jāizglītojas. Arī Rīgas klimata asamblejā cilvēki tiks izglītoti, pirms no viņiem gaidīs kaut kādus ieteikumus?\n\nIngrīda Strazdiņa: Cilvēki noteikti tiks izglītoti. Mums būs zinātnieks, kurš stāstīs par klimata pārmaiņām. Mums ir ieplānots stāstījums par to, kas ir zaļināšanas plāns un kas tajā ir ietverts. Mums būs pat ainavu arhitekte. Mums būs pārstāvis no Rīgas mežiem, meža terapeite. Būs arī Cēsu pašvaldības pārstāvji, kas padalīsies ar savu pieredzi, jo viņi ir daudzas foršas lietas Cēsīs izdarījuši. Mums būs arī brīnišķīga eksperte, kas stāstīs par pilsētu. Mēs arī iesim ārā. Pirmajā dienā mums būs ekspedīcija, mēs paskatīsimies uz pilsētu citādāk.\n\nVai visās tikšanās reizēs būs vieni un tie paši dalībnieki, vai viņi mainīsies?\n\n\n\nIngrīda Strazdiņa: Dalībnieki nemainīsies, viņi paliks visās reizēs vieni un tie paši.\n\nVai dalībnieki klimata asamblejas laikā uzzinās, vai un kad Rīgas pašvaldība plāno īstenot viņu idejas?\n\nJānis Ušča: Sākotnējo atgriezenisko saiti ir paredzēts sniegt jau pašas klimata asamblejas noslēgumā. Bet, protams, mums ir ļoti būtiski radītās idejas nedaudz apskatīties no praktiskās puses – ko mēs varam, ko nevaram, cik ilgs laiks īstenošanai vajadzīgs. Ir jāatrisina daži praktiskas dabas jautājumi. Tā mums būs pirmā šāda veida pieredze. Tā ir ne tikai \"Zaļā brīvība\", kas pirmo reizi kaut ko tādu īsteno, arī mēs tam ejam pirmo reizi cauri. Esam ļoti atvērti šādai pieejai, un, domāju, tur varētu būt praktisks pienesums.\n\nIgaunijā ir jau notikušas četras iedzīvotāju asamblejas, un katrai no tām bija cits fokuss. Arī jūs esat izvēlējušies četras tēmas, uz ko fokusēties. Kas noteica šo tēmu izvēli, vai izvēlējāties tādas, kur ir visvairāk nepieciešama palīdzība un atbalsts?\n\nJānis Ušča: Šīs tēmas ir balstītas kopējā problēmā, ko mēs gribam risināt.\n\nZaļināšanas plāns – tā ir tēma, ar ko mēs nosacīti nesen esam sākuši strādāt, taču turpināsim ar šo jautājumu strādāt vēl vienu vai divus gadus.\n\nČetras tēmas, ar ko strādāsim asamblejā, izriet no kopējā ietvara, kur mēs esam definējuši Rīgas zaļās infrastruktūras attīstību. Šīs tēmas nav izdomātas speciāli asamblejai, mēs ar tām strādātu tāpat. Asambleja sniedz papildu iespēju, izmantojot jaunu metodi, iegūt strukturētu iedzīvotāju skatījumu un, galvenais, konkrētus priekšlikumus, kurus potenciāli arī varam iekļaut zaļināšanas plānā.\n\nIespējams, ir mazāki risinājumi, kurus pati kopiena var īstenot, piemēram, apzaļumot savu pagalmu vai, piemēram, iekopt dobes pie savas ēkas, bet ir kaut kādi iemesli, kāpēc tas nenotiek. Ja mēs ar iedzīvotāju asamblejas metodi varētu iegūt šāda veida idejas, identificēt problēmas, riskus, tas mums ļoti noderētu.\n\n\n\nIespējamie ieguvumi no iedzīvotāju asamblejas varētu būt divi: pirmkārt, atgriezeniskā saite, kas vispār ir iedzīvotāju uzmanības fokusā, kur viņi redz problēmu saistībā ar šīm tēmām un, otrkārt, rekomendācijas. Respektīvi, labums būs jebkurā gadījumā – vai nu pirmajā, vai otrajā līmenī.\n\nJānis Ušča: Ideālais variants būtu saraksts tādiem pārdomātiem, praktiskiem ieteikumiem.\n\nParunāsim mazliet par izvēlētajām četrām tēmām. Lietus ūdens risinājumi ir ļoti saprotama tēma. Vai par pārējām tēmām arī varētu sniegt mazliet plašāku kontekstu?\n\nJānis Ušča: Otrs virziens ir karstumsalas efekta mazināšana. Mēs zinām, ka pilsētas uzkarst nedaudz vairāk nekā piepilsētas. Karstumsalas efekts, kā tas tiek definēts arī grāmatās, ir tas, ka apbūvētā teritorijā ir siltāk nekā apkārt pilsētā esošās teritorijās. Mēs visi to zinām – atgriežoties no laukiem pilsētā, te mēdz būt siltāks, un lielā mērā tas notiek tāpēc, ka saules enerģiju uzņem gan asfalts, gan ēkas. Kaut kādos brīžos, varbūt agrā rudenī, tas pat var būt patīkami, bet vasaras vidū, kad pieredzam lielos karstuma viļņus, tas mēdz būt traucējoši.\n\nPamatdoma ir tāda, ka zaļās teritorijas – zāle, koki, arī ūdens virsma – uzkarst mazāk nekā, piemēram, asfalts.\n\nJa skatāmies uz šo problēmu no dabā balstīto risinājumu perspektīvas, mēs varam šo efektu mazināt, piemēram, apzaļumojot pilsētvidi. Piemēram, koki ielās, puķu dobes – tas viss var mazināt kopējo siltuma efektu.\n\nZaļināšanas plāna ietvaros mums jau ir veikta datu izpēte un interpretācija – mēs zinām, kur pilsētā ir karstumsalas. Tas, piemēram, ir Rīgas vēsturiskais centrs, Vidzemes tirgus apkārtne.\n\nUn nākamā tēma – bioloģiskā daudzveidība?\n\nJānis Ušča: Ja skatāmies klimata pārmaiņu kontekstā, ir prognozes, ka, mainoties klimatam, nomainās arī sugas.\n\nAttiecīgi var samazināties bioloģiskā daudzveidība kā tāda. Saistībā ar to redzam tādus risinājumus, kā, piemēram, pilsētas pļavas, kas, iespējams, var palīdzēt. Mēs varam runāt arī par kādām mazākām iniciatīvām. Piemēram, kukaiņu mājas, sikspārņu mājas, bišu stropi – jebkas, kas veicina dabisko daudzveidību plašākajā mērogā. Mēs varam uzstādīt putnu dzirdinātavas, putnu barotavas un tā tālāk – tas pastarpinātā veidā var veicināt bioloģisko daudzveidību.\n\nUn ceturtā tēma – zaļo teritoriju pieejamība.\n\nJānis Ušča: Mēs skatāmies uz to no pilsētas iedzīvotāju perspektīvas – vai viņiem ir pieejamas zaļās teritorijas? Ne tikai labiekārtotas teritorijas, kuras sniedz rekreācijas funkciju.\n\nMēs runājam par jebkādām zaļām teritorijām, kuras pilda arī ekoloģisko, arī psihoemocionālo funkciju. Tas arī noteikti ir viens no mūsu mērķiem – veicināt šādu teritoriju pieejamību.\n\nKāda jūsu ideālajā vīzijā būs Rīga pēc 20 vai 30 gadiem?\n\nIngrīda Strazdiņa: Man liekas, ka cilvēcība un savstarpēja sarunāšanās ir ļoti svarīga. Arī tad, ja tu domā savādāk un es domāju savādāk, bet mēs varam sarunāties, tad mēs kopā varam izdarīt labas lietas. Manuprāt, tas ir svarīgāk par to, uz kura stūra varētu būt vēl kāds parks vai cik puķudobes, bet drīzāk, ka tā mēs protam sarunāties.\n\nJānis Ušča: Man tomēr zaļuma prasītos nedaudz vairāk, it īpaši, ja runājam par centrālo pilsētas daļu. Protams, manā vīzijā Rīgas ielas būtu nedaudz mierīgākas, klusākas. Arī to sniedz apzaļumojums – iespēju mazināt gan trokšņus, gan arī iepriekš pieminēto karstuma efektu. Ja mēs runājam par zaļo Rīgu plašākā kontekstā, tad man gribētos redzēt ilgtspējīgāku resursu izmantošanu.\n\nIngrīda Strazdiņa: Atcerējos vēl vienu lietu par nākotnes Rīgu. Mums ir arī neapbūvētas teritorijas Rīgā, un tiek uzskatīts, ka tas ir milzīgs trūkums, kas par visām varītēm ir jālabo. Bet varbūt var ieraudzīt citādāk to neapbūvētu teritoriju – varbūt tur tiešām var būt pļava, koki. Atstāt arī kādas vietas brīvas – tas nākotnes Rīgai arī ļoti piestāvētu. Lai mums nav viss blīvi apbūvēts, bet ir arī tādas kā atelpas vietas.",
- "url": "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/20.09.2024-par-zalaku-rigu-spriedis-piecas-sestdienas-ko-sagaida-no-pirmas-iedzivotaju-klimata-asamblejas.a569637/",
- "date": "",
- "topic": "",
- "source": "https://www.lsm.lv/raksts/dzive--stils/vide-un-dzivnieki/20.09.2024-par-zalaku-rigu-spriedis-piecas-sestdienas-ko-sagaida-no-pirmas-iedzivotaju-klimata-asamblejas.a569637/"
- }
-]
\ No newline at end of file
diff --git a/uv.lock b/uv.lock
index 7eb02a5..5354083 100644
--- a/uv.lock
+++ b/uv.lock
@@ -64,6 +64,52 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 },
]
+[[package]]
+name = "argon2-cffi"
+version = "23.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "argon2-cffi-bindings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/31/fa/57ec2c6d16ecd2ba0cf15f3c7d1c3c2e7b5fcb83555ff56d7ab10888ec8f/argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08", size = 42798 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a4/6a/e8a041599e78b6b3752da48000b14c8d1e8a04ded09c88c714ba047f34f5/argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea", size = 15124 },
+]
+
+[[package]]
+name = "argon2-cffi-bindings"
+version = "21.2.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cffi" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b9/e9/184b8ccce6683b0aa2fbb7ba5683ea4b9c5763f1356347f1312c32e3c66e/argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3", size = 1779911 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d4/13/838ce2620025e9666aa8f686431f67a29052241692a3dd1ae9d3692a89d3/argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367", size = 29658 },
+ { url = "https://files.pythonhosted.org/packages/b3/02/f7f7bb6b6af6031edb11037639c697b912e1dea2db94d436e681aea2f495/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d", size = 80583 },
+ { url = "https://files.pythonhosted.org/packages/ec/f7/378254e6dd7ae6f31fe40c8649eea7d4832a42243acaf0f1fff9083b2bed/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae", size = 86168 },
+ { url = "https://files.pythonhosted.org/packages/74/f6/4a34a37a98311ed73bb80efe422fed95f2ac25a4cacc5ae1d7ae6a144505/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c", size = 82709 },
+ { url = "https://files.pythonhosted.org/packages/74/2b/73d767bfdaab25484f7e7901379d5f8793cccbb86c6e0cbc4c1b96f63896/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86", size = 83613 },
+ { url = "https://files.pythonhosted.org/packages/4f/fd/37f86deef67ff57c76f137a67181949c2d408077e2e3dd70c6c42912c9bf/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f", size = 84583 },
+ { url = "https://files.pythonhosted.org/packages/6f/52/5a60085a3dae8fded8327a4f564223029f5f54b0cb0455a31131b5363a01/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e", size = 88475 },
+ { url = "https://files.pythonhosted.org/packages/8b/95/143cd64feb24a15fa4b189a3e1e7efbaeeb00f39a51e99b26fc62fbacabd/argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082", size = 27698 },
+ { url = "https://files.pythonhosted.org/packages/37/2c/e34e47c7dee97ba6f01a6203e0383e15b60fb85d78ac9a15cd066f6fe28b/argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f", size = 30817 },
+ { url = "https://files.pythonhosted.org/packages/5a/e4/bf8034d25edaa495da3c8a3405627d2e35758e44ff6eaa7948092646fdcc/argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93", size = 53104 },
+]
+
+[[package]]
+name = "arrow"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "python-dateutil" },
+ { name = "types-python-dateutil" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419 },
+]
+
[[package]]
name = "asttokens"
version = "2.4.1"
@@ -76,6 +122,33 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764 },
]
+[[package]]
+name = "async-lru"
+version = "2.0.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/80/e2/2b4651eff771f6fd900d233e175ddc5e2be502c7eb62c0c42f975c6d36cd/async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627", size = 10019 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fa/9f/3c3503693386c4b0f245eaf5ca6198e3b28879ca0a40bde6b0e319793453/async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224", size = 6111 },
+]
+
+[[package]]
+name = "attrs"
+version = "24.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fc/0f/aafca9af9315aee06a89ffde799a10a582fe8de76c563ee80bbcdc08b3fb/attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346", size = 792678 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/21/5b6702a7f963e95456c0de2d495f67bf5fd62840ac655dc451586d23d39a/attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2", size = 63001 },
+]
+
+[[package]]
+name = "babel"
+version = "2.16.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 },
+]
+
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
@@ -88,6 +161,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 },
]
+[[package]]
+name = "bleach"
+version = "6.1.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/10/77f32b088738f40d4f5be801daa5f327879eadd4562f36a2b5ab975ae571/bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe", size = 202119 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ea/63/da7237f805089ecc28a3f36bca6a21c31fcbc2eb380f3b8f1be3312abd14/bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6", size = 162750 },
+]
+
[[package]]
name = "certifi"
version = "2024.8.30"
@@ -173,9 +259,12 @@ source = { editable = "." }
dependencies = [
{ name = "anthropic" },
{ name = "beautifulsoup4" },
+ { name = "gdeltdoc" },
{ name = "gradio" },
{ name = "ipykernel" },
+ { name = "jupyter" },
{ name = "lxml", extra = ["html-clean"] },
+ { name = "lxml-html-clean" },
{ name = "newspaper3k" },
{ name = "openai" },
{ name = "python-dotenv" },
@@ -186,9 +275,12 @@ dependencies = [
requires-dist = [
{ name = "anthropic", specifier = ">=0.34.2" },
{ name = "beautifulsoup4", specifier = ">=4.10.0" },
+ { name = "gdeltdoc", specifier = ">=1.5.0" },
{ name = "gradio", specifier = ">=4.44.1" },
{ name = "ipykernel", specifier = ">=6.29.5" },
+ { name = "jupyter", specifier = ">=1.1.1" },
{ name = "lxml", extras = ["html-clean"], specifier = ">=5.3.0" },
+ { name = "lxml-html-clean", specifier = ">=0.2.2" },
{ name = "newspaper3k", specifier = ">=0.2.8" },
{ name = "openai", specifier = ">=1.51.0" },
{ name = "python-dotenv", specifier = ">=1.0.1" },
@@ -295,6 +387,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 },
]
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 },
+]
+
[[package]]
name = "distro"
version = "1.9.0"
@@ -327,6 +428,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/06/ab/a1f7eed031aeb1c406a6e9d45ca04bff401c8a25a30dd0e4fd2caae767c3/fastapi-0.115.0-py3-none-any.whl", hash = "sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631", size = 94625 },
]
+[[package]]
+name = "fastjsonschema"
+version = "2.20.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/03/3f/3ad5e7be13b4b8b55f4477141885ab2364f65d5f6ad5f7a9daffd634d066/fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23", size = 373056 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6d/ca/086311cdfc017ec964b2436fe0c98c1f4efcb7e4c328956a22456e497655/fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a", size = 23543 },
+]
+
[[package]]
name = "feedfinder2"
version = "0.0.4"
@@ -391,6 +501,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/57/5e/de2e6e51cb6894f2f2bc2641f6c845561361b622e96df3cca04df77222c9/fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd", size = 1096920 },
]
+[[package]]
+name = "fqdn"
+version = "1.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121 },
+]
+
[[package]]
name = "fsspec"
version = "2024.9.0"
@@ -400,6 +519,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1d/a0/6aaea0c2fbea2f89bfd5db25fb1e3481896a423002ebe4e55288907a97a3/fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b", size = 179253 },
]
+[[package]]
+name = "gdeltdoc"
+version = "1.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pandas" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7e/95/09d4213fe00455ef2ff0151cda312b024b19d6b2b687ec3dfe8cee5ec2db/gdeltdoc-1.5.0.tar.gz", hash = "sha256:3ad0726a03325f5ea76c6bf9c00bd2680c624866130b3bc6bc90cb7297327dc3", size = 11465 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/da/79ab86dafbc96c67f9551b3bf1b93e97c1de687b5bab4196ed50a28ddcdc/gdeltdoc-1.5.0-py3-none-any.whl", hash = "sha256:e14775fdfe07ecd2781a1fbb836b9e97b2de3435a8ec468f1a780a3a6a8c13ec", size = 13088 },
+]
+
[[package]]
name = "gradio"
version = "4.44.1"
@@ -573,6 +705,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f4/3a/5d8680279ada9571de8469220069d27024ee47624af534e537c9ff49a450/ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35", size = 819456 },
]
+[[package]]
+name = "ipywidgets"
+version = "8.1.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "comm" },
+ { name = "ipython" },
+ { name = "jupyterlab-widgets" },
+ { name = "traitlets" },
+ { name = "widgetsnbextension" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c7/4c/dab2a281b07596a5fc220d49827fe6c794c66f1493d7a74f1df0640f2cc5/ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17", size = 116723 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/22/2d/9c0b76f2f9cc0ebede1b9371b6f317243028ed60b90705863d493bae622e/ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245", size = 139767 },
+]
+
+[[package]]
+name = "isoduration"
+version = "20.11.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "arrow" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321 },
+]
+
[[package]]
name = "jedi"
version = "0.19.1"
@@ -632,6 +792,80 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/91/29/df4b9b42f2be0b623cbd5e2140cafcaa2bef0759a00b7b70104dcfe2fb51/joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6", size = 301817 },
]
+[[package]]
+name = "json5"
+version = "0.9.25"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/91/59/51b032d53212a51f17ebbcc01bd4217faab6d6c09ed0d856a987a5f42bbc/json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae", size = 40332 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/3c/4f8791ee53ab9eeb0b022205aa79387119a74cc9429582ce04098e6fc540/json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f", size = 30109 },
+]
+
+[[package]]
+name = "jsonpointer"
+version = "3.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595 },
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.23.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "jsonschema-specifications" },
+ { name = "referencing" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 },
+]
+
+[package.optional-dependencies]
+format-nongpl = [
+ { name = "fqdn" },
+ { name = "idna" },
+ { name = "isoduration" },
+ { name = "jsonpointer" },
+ { name = "rfc3339-validator" },
+ { name = "rfc3986-validator" },
+ { name = "uri-template" },
+ { name = "webcolors" },
+]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2023.12.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "referencing" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f8/b9/cc0cc592e7c195fb8a650c1d5990b10175cf13b4c97465c72ec841de9e4b/jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc", size = 13983 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ee/07/44bd408781594c4d0a027666ef27fab1e441b109dc3b76b4f836f8fd04fe/jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c", size = 18482 },
+]
+
+[[package]]
+name = "jupyter"
+version = "1.1.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ipykernel" },
+ { name = "ipywidgets" },
+ { name = "jupyter-console" },
+ { name = "jupyterlab" },
+ { name = "nbconvert" },
+ { name = "notebook" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657 },
+]
+
[[package]]
name = "jupyter-client"
version = "8.6.3"
@@ -648,6 +882,25 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 },
]
+[[package]]
+name = "jupyter-console"
+version = "6.6.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ipykernel" },
+ { name = "ipython" },
+ { name = "jupyter-client" },
+ { name = "jupyter-core" },
+ { name = "prompt-toolkit" },
+ { name = "pygments" },
+ { name = "pyzmq" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510 },
+]
+
[[package]]
name = "jupyter-core"
version = "5.7.2"
@@ -662,6 +915,139 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/fb/108ecd1fe961941959ad0ee4e12ee7b8b1477247f30b1fdfd83ceaf017f0/jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409", size = 28965 },
]
+[[package]]
+name = "jupyter-events"
+version = "0.10.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jsonschema", extra = ["format-nongpl"] },
+ { name = "python-json-logger" },
+ { name = "pyyaml" },
+ { name = "referencing" },
+ { name = "rfc3339-validator" },
+ { name = "rfc3986-validator" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8d/53/7537a1aa558229bb0b1b178d814c9d68a9c697d3aecb808a1cb2646acf1f/jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22", size = 61516 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a5/94/059180ea70a9a326e1815176b2370da56376da347a796f8c4f0b830208ef/jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960", size = 18777 },
+]
+
+[[package]]
+name = "jupyter-lsp"
+version = "2.2.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-server" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/85/b4/3200b0b09c12bc3b72d943d923323c398eff382d1dcc7c0dbc8b74630e40/jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001", size = 48741 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/e0/7bd7cff65594fd9936e2f9385701e44574fc7d721331ff676ce440b14100/jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da", size = 69146 },
+]
+
+[[package]]
+name = "jupyter-server"
+version = "2.14.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "anyio" },
+ { name = "argon2-cffi" },
+ { name = "jinja2" },
+ { name = "jupyter-client" },
+ { name = "jupyter-core" },
+ { name = "jupyter-events" },
+ { name = "jupyter-server-terminals" },
+ { name = "nbconvert" },
+ { name = "nbformat" },
+ { name = "overrides" },
+ { name = "packaging" },
+ { name = "prometheus-client" },
+ { name = "pywinpty", marker = "os_name == 'nt'" },
+ { name = "pyzmq" },
+ { name = "send2trash" },
+ { name = "terminado" },
+ { name = "tornado" },
+ { name = "traitlets" },
+ { name = "websocket-client" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0c/34/88b47749c7fa9358e10eac356c4b97d94a91a67d5c935a73f69bc4a31118/jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b", size = 719933 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/57/e1/085edea6187a127ca8ea053eb01f4e1792d778b4d192c74d32eb6730fed6/jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd", size = 383556 },
+]
+
+[[package]]
+name = "jupyter-server-terminals"
+version = "0.5.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pywinpty", marker = "os_name == 'nt'" },
+ { name = "terminado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656 },
+]
+
+[[package]]
+name = "jupyterlab"
+version = "4.2.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "async-lru" },
+ { name = "httpx" },
+ { name = "ipykernel" },
+ { name = "jinja2" },
+ { name = "jupyter-core" },
+ { name = "jupyter-lsp" },
+ { name = "jupyter-server" },
+ { name = "jupyterlab-server" },
+ { name = "notebook-shim" },
+ { name = "packaging" },
+ { name = "setuptools" },
+ { name = "tornado" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/4a/78/ba006df6edaa561fe40be26c35e9da3f9316f071167cd7cc1a1a25bd2664/jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75", size = 21508698 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fd/3f/24a0f0ce60959cfd9756a3291cd3a5581e51cbd6f7b4aa121f5bba5320e3/jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321", size = 11641981 },
+]
+
+[[package]]
+name = "jupyterlab-pygments"
+version = "0.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884 },
+]
+
+[[package]]
+name = "jupyterlab-server"
+version = "2.27.3"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "babel" },
+ { name = "jinja2" },
+ { name = "json5" },
+ { name = "jsonschema" },
+ { name = "jupyter-server" },
+ { name = "packaging" },
+ { name = "requests" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0a/c9/a883ce65eb27905ce77ace410d83587c82ea64dc85a48d1f7ed52bcfa68d/jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4", size = 76173 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/09/2032e7d15c544a0e3cd831c51d77a8ca57f7555b2e1b2922142eddb02a84/jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", size = 59700 },
+]
+
+[[package]]
+name = "jupyterlab-widgets"
+version = "3.0.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/59/73/fa26bbb747a9ea4fca6b01453aa22990d52ab62dd61384f1ac0dc9d4e7ba/jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed", size = 203556 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a9/93/858e87edc634d628e5d752ba944c2833133a28fa87bb093e6832ced36a3e/jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54", size = 214392 },
+]
+
[[package]]
name = "kiwisolver"
version = "1.4.7"
@@ -848,6 +1234,71 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
]
+[[package]]
+name = "mistune"
+version = "3.0.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ef/c8/f0173fe3bf85fd891aee2e7bcd8207dfe26c2c683d727c5a6cc3aec7b628/mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8", size = 90840 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f0/74/c95adcdf032956d9ef6c89a9b8a5152bf73915f8c633f3e3d88d06bd699c/mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205", size = 47958 },
+]
+
+[[package]]
+name = "nbclient"
+version = "0.10.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-client" },
+ { name = "jupyter-core" },
+ { name = "nbformat" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e2/d2/39bc36604f24bccd44d374ac34769bc58c53a1da5acd1e83f0165aa4940e/nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09", size = 62246 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/66/e8/00517a23d3eeaed0513e718fbc94aab26eaa1758f5690fc8578839791c79/nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f", size = 25318 },
+]
+
+[[package]]
+name = "nbconvert"
+version = "7.16.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "beautifulsoup4" },
+ { name = "bleach" },
+ { name = "defusedxml" },
+ { name = "jinja2" },
+ { name = "jupyter-core" },
+ { name = "jupyterlab-pygments" },
+ { name = "markupsafe" },
+ { name = "mistune" },
+ { name = "nbclient" },
+ { name = "nbformat" },
+ { name = "packaging" },
+ { name = "pandocfilters" },
+ { name = "pygments" },
+ { name = "tinycss2" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/af/e8/ba521a033b21132008e520c28ceb818f9f092da5f0261e94e509401b29f9/nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4", size = 854422 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b8/bb/bb5b6a515d1584aa2fd89965b11db6632e4bdc69495a52374bcc36e56cfa/nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3", size = 257388 },
+]
+
+[[package]]
+name = "nbformat"
+version = "5.10.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "fastjsonschema" },
+ { name = "jsonschema" },
+ { name = "jupyter-core" },
+ { name = "traitlets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454 },
+]
+
[[package]]
name = "nest-asyncio"
version = "1.6.0"
@@ -896,6 +1347,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442 },
]
+[[package]]
+name = "notebook"
+version = "7.2.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-server" },
+ { name = "jupyterlab" },
+ { name = "jupyterlab-server" },
+ { name = "notebook-shim" },
+ { name = "tornado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0f/33/30b83c1c84e368087059bde1269549612584924db156bff53654e165a498/notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e", size = 4948876 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/46/77/53732fbf48196af9e51c2a61833471021c1d77d335d57b96ee3588c0c53d/notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c", size = 5037123 },
+]
+
+[[package]]
+name = "notebook-shim"
+version = "0.2.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "jupyter-server" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307 },
+]
+
[[package]]
name = "numpy"
version = "2.1.1"
@@ -975,6 +1454,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1a/72/a424db9116c7cad2950a8f9e4aeb655a7b57de988eb015acd0fcd1b4609b/orjson-3.10.7-cp313-none-win_amd64.whl", hash = "sha256:eef44224729e9525d5261cc8d28d6b11cafc90e6bd0be2157bde69a52ec83024", size = 137081 },
]
+[[package]]
+name = "overrides"
+version = "7.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 },
+]
+
[[package]]
name = "packaging"
version = "24.1"
@@ -1018,6 +1506,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436 },
]
+[[package]]
+name = "pandocfilters"
+version = "1.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663 },
+]
+
[[package]]
name = "parso"
version = "0.8.4"
@@ -1078,6 +1575,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 },
]
+[[package]]
+name = "prometheus-client"
+version = "0.21.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e1/54/a369868ed7a7f1ea5163030f4fc07d85d22d7a1d270560dab675188fb612/prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e", size = 78634 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/84/2d/46ed6436849c2c88228c3111865f44311cff784b4aabcdef4ea2545dbc3d/prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166", size = 54686 },
+]
+
[[package]]
name = "prompt-toolkit"
version = "3.0.48"
@@ -1231,6 +1737,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 },
]
+[[package]]
+name = "python-json-logger"
+version = "2.0.7"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4f/da/95963cebfc578dabd323d7263958dfb68898617912bb09327dd30e9c8d13/python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c", size = 10508 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/a6/145655273568ee78a581e734cf35beb9e33a370b29c5d3c8fee3744de29f/python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd", size = 8067 },
+]
+
[[package]]
name = "python-multipart"
version = "0.0.12"
@@ -1262,6 +1777,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/08/9b/3c797468a96f68ce86f84917c198f60fc4189ab2ddc5841bcd71ead7680f/pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a", size = 7952027 },
]
+[[package]]
+name = "pywinpty"
+version = "2.0.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/33/d9/93956af389ab7d4ef2f558b1cc6c5cb48885d254ac882f212964c30a1e4f/pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde", size = 28240 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/49/37/c0dcb1dca094af3605dd22c0528839a65bc4e1e78bb91eb12841d18fa3f1/pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4", size = 1399803 },
+]
+
[[package]]
name = "pyyaml"
version = "6.0.2"
@@ -1332,6 +1856,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7b/d6/32fd69744afb53995619bc5effa2a405ae0d343cd3e747d0fbc43fe894ee/pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0", size = 1392485 },
]
+[[package]]
+name = "referencing"
+version = "0.35.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "attrs" },
+ { name = "rpds-py" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 },
+]
+
[[package]]
name = "regex"
version = "2024.9.11"
@@ -1397,6 +1934,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/25/dd878a121fcfdf38f52850f11c512e13ec87c2ea72385933818e5b6c15ce/requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c", size = 4244 },
]
+[[package]]
+name = "rfc3339-validator"
+version = "0.1.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "six" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490 },
+]
+
+[[package]]
+name = "rfc3986-validator"
+version = "0.1.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242 },
+]
+
[[package]]
name = "rich"
version = "13.9.2"
@@ -1410,6 +1968,40 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/67/91/5474b84e505a6ccc295b2d322d90ff6aa0746745717839ee0c5fb4fdcceb/rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1", size = 242117 },
]
+[[package]]
+name = "rpds-py"
+version = "0.20.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/55/64/b693f262791b818880d17268f3f8181ef799b0d187f6f731b1772e05a29a/rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121", size = 25814 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/89/b7/f9682c5cc37fcc035f4a0fc33c1fe92ec9cbfdee0cdfd071cf948f53e0df/rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6", size = 321468 },
+ { url = "https://files.pythonhosted.org/packages/b8/ad/fc82be4eaceb8d444cb6fc1956ce972b3a0795104279de05e0e4131d0a47/rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b", size = 313062 },
+ { url = "https://files.pythonhosted.org/packages/0e/1c/6039e80b13a08569a304dc13476dc986352dca4598e909384db043b4e2bb/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739", size = 370168 },
+ { url = "https://files.pythonhosted.org/packages/dc/c9/5b9aa35acfb58946b4b785bc8e700ac313669e02fb100f3efa6176a83e81/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c", size = 371376 },
+ { url = "https://files.pythonhosted.org/packages/7b/dd/0e0dbeb70d8a5357d2814764d467ded98d81d90d3570de4fb05ec7224f6b/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee", size = 397200 },
+ { url = "https://files.pythonhosted.org/packages/e4/da/a47d931eb688ccfd77a7389e45935c79c41e8098d984d87335004baccb1d/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96", size = 426824 },
+ { url = "https://files.pythonhosted.org/packages/0f/f7/a59a673594e6c2ff2dbc44b00fd4ecdec2fc399bb6a7bd82d612699a0121/rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4", size = 357967 },
+ { url = "https://files.pythonhosted.org/packages/5f/61/3ba1905396b2cb7088f9503a460b87da33452da54d478cb9241f6ad16d00/rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef", size = 378905 },
+ { url = "https://files.pythonhosted.org/packages/08/31/6d0df9356b4edb0a3a077f1ef714e25ad21f9f5382fc490c2383691885ea/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821", size = 546348 },
+ { url = "https://files.pythonhosted.org/packages/ae/15/d33c021de5cb793101df9961c3c746dfc476953dbbf5db337d8010dffd4e/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940", size = 553152 },
+ { url = "https://files.pythonhosted.org/packages/70/2d/5536d28c507a4679179ab15aa0049440e4d3dd6752050fa0843ed11e9354/rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174", size = 528807 },
+ { url = "https://files.pythonhosted.org/packages/e3/62/7ebe6ec0d3dd6130921f8cffb7e34afb7f71b3819aa0446a24c5e81245ec/rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139", size = 200993 },
+ { url = "https://files.pythonhosted.org/packages/ec/2f/b938864d66b86a6e4acadefdc56de75ef56f7cafdfd568a6464605457bd5/rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585", size = 214458 },
+ { url = "https://files.pythonhosted.org/packages/99/32/43b919a0a423c270a838ac2726b1c7168b946f2563fd99a51aaa9692d00f/rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29", size = 321465 },
+ { url = "https://files.pythonhosted.org/packages/58/a9/c4d899cb28e9e47b0ff12462e8f827381f243176036f17bef9c1604667f2/rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91", size = 312900 },
+ { url = "https://files.pythonhosted.org/packages/8f/90/9e51670575b5dfaa8c823369ef7d943087bfb73d4f124a99ad6ef19a2b26/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24", size = 370973 },
+ { url = "https://files.pythonhosted.org/packages/fc/c1/523f2a03f853fc0d4c1acbef161747e9ab7df0a8abf6236106e333540921/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7", size = 370890 },
+ { url = "https://files.pythonhosted.org/packages/51/ca/2458a771f16b0931de4d384decbe43016710bc948036c8f4562d6e063437/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9", size = 397174 },
+ { url = "https://files.pythonhosted.org/packages/00/7d/6e06807f6305ea2408b364efb0eef83a6e21b5e7b5267ad6b473b9a7e416/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8", size = 426449 },
+ { url = "https://files.pythonhosted.org/packages/8c/d1/6c9e65260a819a1714510a7d69ac1d68aa23ee9ce8a2d9da12187263c8fc/rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879", size = 357698 },
+ { url = "https://files.pythonhosted.org/packages/5d/fb/ecea8b5286d2f03eec922be7173a03ed17278944f7c124348f535116db15/rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f", size = 378530 },
+ { url = "https://files.pythonhosted.org/packages/e3/e3/ac72f858957f52a109c588589b73bd2fad4a0fc82387fb55fb34aeb0f9cd/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c", size = 545753 },
+ { url = "https://files.pythonhosted.org/packages/b2/a4/a27683b519d5fc98e4390a3b130117d80fd475c67aeda8aac83c0e8e326a/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2", size = 552443 },
+ { url = "https://files.pythonhosted.org/packages/a1/ed/c074d248409b4432b1ccb2056974175fa0af2d1bc1f9c21121f80a358fa3/rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57", size = 528380 },
+ { url = "https://files.pythonhosted.org/packages/d5/bd/04caf938895d2d78201e89c0c8a94dfd9990c34a19ff52fb01d0912343e3/rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a", size = 200540 },
+ { url = "https://files.pythonhosted.org/packages/95/cc/109eb8b9863680411ae703664abacaa035820c7755acc9686d5dd02cdd2e/rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2", size = 214111 },
+]
+
[[package]]
name = "ruff"
version = "0.6.9"
@@ -1444,6 +2036,24 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552 },
]
+[[package]]
+name = "send2trash"
+version = "1.8.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072 },
+]
+
+[[package]]
+name = "setuptools"
+version = "75.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/27/b8/f21073fde99492b33ca357876430822e4800cdf522011f18041351dfa74b/setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538", size = 1348057 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ff/ae/f19306b5a221f6a436d8f2238d5b80925004093fa3edea59835b514d9057/setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2", size = 1248506 },
+]
+
[[package]]
name = "sgmllib3k"
version = "1.0.0"
@@ -1512,6 +2122,32 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/9c/93f7bc03ff03199074e81974cc148908ead60dcf189f68ba1761a0ee35cf/starlette-0.38.6-py3-none-any.whl", hash = "sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05", size = 71451 },
]
+[[package]]
+name = "terminado"
+version = "0.18.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "ptyprocess", marker = "os_name != 'nt'" },
+ { name = "pywinpty", marker = "os_name == 'nt'" },
+ { name = "tornado" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154 },
+]
+
+[[package]]
+name = "tinycss2"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "webencodings" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/44/6f/38d2335a2b70b9982d112bb177e3dbe169746423e33f718bf5e9c7b3ddd3/tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d", size = 67360 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2c/4d/0db5b8a613d2a59bbc29bc5bb44a2f8070eb9ceab11c50d477502a8a0092/tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7", size = 22532 },
+]
+
[[package]]
name = "tinysegmenter"
version = "0.3"
@@ -1619,6 +2255,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/2b/886d13e742e514f704c33c4caa7df0f3b89e5a25ef8db02aa9ca3d9535d5/typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b", size = 47288 },
]
+[[package]]
+name = "types-python-dateutil"
+version = "2.9.0.20241003"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/31/f8/f6ee4c803a7beccffee21bb29a71573b39f7037c224843eff53e5308c16e/types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446", size = 9210 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/d6/ba5f61958f358028f2e2ba1b8e225b8e263053bd57d3a79e2d2db64c807b/types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d", size = 9693 },
+]
+
[[package]]
name = "typing-extensions"
version = "4.12.2"
@@ -1637,6 +2282,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 },
]
+[[package]]
+name = "uri-template"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140 },
+]
+
[[package]]
name = "urllib3"
version = "2.2.3"
@@ -1668,6 +2322,33 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
]
+[[package]]
+name = "webcolors"
+version = "24.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/f8/53150a5bda7e042840b14f0236e1c0a4819d403658e3d453237983addfac/webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d", size = 42392 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f0/33/12020ba99beaff91682b28dc0bbf0345bbc3244a4afbae7644e4fa348f23/webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a", size = 15027 },
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 },
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 },
+]
+
[[package]]
name = "websockets"
version = "12.0"
@@ -1687,3 +2368,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/52/8915f51f9aaef4e4361c89dd6cf69f72a0159f14e0d25026c81b6ad22525/websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f", size = 124985 },
{ url = "https://files.pythonhosted.org/packages/79/4d/9cc401e7b07e80532ebc8c8e993f42541534da9e9249c59ee0139dcb0352/websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e", size = 118370 },
]
+
+[[package]]
+name = "widgetsnbextension"
+version = "4.0.13"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/56/fc/238c424fd7f4ebb25f8b1da9a934a3ad7c848286732ae04263661eb0fc03/widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6", size = 1164730 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/21/02/88b65cc394961a60c43c70517066b6b679738caf78506a5da7b88ffcb643/widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71", size = 2335872 },
+]