2020-08-03 12:15:53 +05:30
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
2020-08-06 17:50:23 +02:00
|
|
|
"""
|
2020-08-03 12:15:53 +05:30
|
|
|
Provide the current worldwide COVID-19 statistics.
|
|
|
|
|
This data is being scrapped from 'https://www.worldometers.info/coronavirus/'.
|
2020-08-06 17:50:23 +02:00
|
|
|
"""
|
2020-08-03 12:15:53 +05:30
|
|
|
|
2025-05-14 03:42:11 +02:00
|
|
|
# /// script
|
|
|
|
|
# requires-python = ">=3.13"
|
|
|
|
|
# dependencies = [
|
|
|
|
|
# "beautifulsoup4",
|
|
|
|
|
# "httpx",
|
|
|
|
|
# ]
|
|
|
|
|
# ///
|
|
|
|
|
|
|
|
|
|
import httpx
|
2020-08-03 12:15:53 +05:30
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
|
|
|
|
|
|
2025-05-14 03:42:11 +02:00
|
|
|
def world_covid19_stats(
|
|
|
|
|
url: str = "https://www.worldometers.info/coronavirus/",
|
|
|
|
|
) -> dict:
|
2020-08-03 12:15:53 +05:30
|
|
|
"""
|
|
|
|
|
Return a dict of current worldwide COVID-19 statistics
|
|
|
|
|
"""
|
2025-05-14 03:42:11 +02:00
|
|
|
soup = BeautifulSoup(
|
|
|
|
|
httpx.get(url, timeout=10, follow_redirects=True).text, "html.parser"
|
|
|
|
|
)
|
|
|
|
|
keys = soup.find_all("h1")
|
|
|
|
|
values = soup.find_all("div", {"class": "maincounter-number"})
|
|
|
|
|
keys += soup.find_all("span", {"class": "panel-title"})
|
|
|
|
|
values += soup.find_all("div", {"class": "number-table-main"})
|
2020-08-03 12:15:53 +05:30
|
|
|
return {key.text.strip(): value.text.strip() for key, value in zip(keys, values)}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-07-28 18:53:09 +02:00
|
|
|
print("\033[1m COVID-19 Status of the World \033[0m\n")
|
|
|
|
|
print("\n".join(f"{key}\n{value}" for key, value in world_covid19_stats().items()))
|