diff --git a/README.md b/README.md index 457a97e..3d53c17 100644 --- a/README.md +++ b/README.md @@ -73,9 +73,8 @@ to view the status of haproxy, navigate to `your-multisocks-host:1337` in a brow to fetch state of each circuit you could leverage something similar to the below ```shell -watch -n 5 "curl -s 'http://localhost:1337/;csv' \ -| awk -F ',' '{print \$2 \" - \" \$18}' \ -| grep -v 'status\|FRONT\|BACK'" +curl -s 'http://multisocks:1337/;csv' \ +| sed 's/,/ ,/g' | column -t -s, | less -S ``` ## debugging @@ -117,6 +116,8 @@ curl -sL ransomwhat.telemetry.ltd/groups \ | head -n 10 ``` +see [loadtest.py](loadtest.py) & [speedtest.sh](speedtest.sh) for more thorough examples + ## notes the current health-check implementation leaves much room for improvement. it uses netcat to send an _authenticated_ telnet command `getinfo circuit-status`. an alternate could be to use stem, with something like the below diff --git a/loadtest.py b/loadtest.py new file mode 100644 index 0000000..0127764 --- /dev/null +++ b/loadtest.py @@ -0,0 +1,50 @@ +import asyncio +import aiohttp +import aiohttp_socks +import time +import requests +import logging + +logging.basicConfig( + format="%(asctime)s [%(levelname)s]: %(message)s", + level=logging.INFO +) + +def fetch_rwonline(): + try: + response = requests.get("https://ransomwhat.telemetry.ltd/groups") + response.raise_for_status() + data = response.json() + urls = [] + for group in data: + for location in group.get("locations", []): + if location.get("available") == True: + urls.append(location.get("slug")) + print(f"found {len(urls)} online hosts from ransomwatch to fetch with") + return urls + except requests.RequestException as e: + logging.error(f"An error occurred: {e}") + return [] + +async def fetch_url(session, url, semaphore: asyncio.Semaphore): + async with semaphore: + start_time = time.time() + try: + async with session.get(url, ssl=False) as response: + duration = time.time() - start_time + logging.info(f"Fetched {url} with status {response.status}. Time taken: {duration:.2f} seconds") + except Exception as e: + logging.error(f"Failed to fetch {url}. Error: {e}") + +async def main(): + urls = fetch_rwonline() + semaphore = asyncio.Semaphore(200) + proxy = "socks5://multisocks.dark:8080" + connector = aiohttp_socks.ProxyConnector.from_url(proxy) + timeout = aiohttp.ClientTimeout(total=30) + async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: + tasks = [fetch_url(session, url, semaphore) for url in urls] + await asyncio.gather(*tasks) + +if __name__ == '__main__': + asyncio.run(main())