2025-05-14 03:42:11 +02:00
|
|
|
# /// script
|
|
|
|
|
# requires-python = ">=3.13"
|
|
|
|
|
# dependencies = [
|
|
|
|
|
# "beautifulsoup4",
|
|
|
|
|
# "fake-useragent",
|
|
|
|
|
# "httpx",
|
|
|
|
|
# ]
|
|
|
|
|
# ///
|
|
|
|
|
|
2022-10-13 21:04:52 +05:30
|
|
|
import webbrowser
|
|
|
|
|
from sys import argv
|
2022-10-13 17:03:06 +01:00
|
|
|
from urllib.parse import parse_qs, quote
|
2022-10-13 21:04:52 +05:30
|
|
|
|
2025-05-14 03:42:11 +02:00
|
|
|
import httpx
|
2022-10-13 21:04:52 +05:30
|
|
|
from bs4 import BeautifulSoup
|
2022-10-13 17:03:06 +01:00
|
|
|
from fake_useragent import UserAgent
|
2022-10-13 21:04:52 +05:30
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-03-01 17:23:33 +01:00
|
|
|
query = "%20".join(argv[1:]) if len(argv) > 1 else quote(str(input("Search: ")))
|
2022-10-13 21:04:52 +05:30
|
|
|
|
|
|
|
|
print("Googling.....")
|
|
|
|
|
|
|
|
|
|
url = f"https://www.google.com/search?q={query}&num=100"
|
|
|
|
|
|
2025-05-14 03:42:11 +02:00
|
|
|
res = httpx.get(
|
2022-10-13 21:04:52 +05:30
|
|
|
url,
|
2022-10-13 17:03:06 +01:00
|
|
|
headers={"User-Agent": str(UserAgent().random)},
|
2024-04-21 20:34:18 +03:00
|
|
|
timeout=10,
|
2022-10-13 21:04:52 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
try:
|
2025-05-14 03:42:11 +02:00
|
|
|
link = BeautifulSoup(res.text, "html.parser").find("div").find("a").get("href")
|
2022-10-13 21:04:52 +05:30
|
|
|
|
|
|
|
|
except AttributeError:
|
|
|
|
|
link = parse_qs(
|
2025-05-14 03:42:11 +02:00
|
|
|
BeautifulSoup(res.text, "html.parser").find("div").find("a").get("href")
|
2022-10-13 21:04:52 +05:30
|
|
|
)["url"][0]
|
|
|
|
|
|
|
|
|
|
webbrowser.open(link)
|