crawl google results Algorithm

DigitalGov puts it this manner," If the website is optimized correctly, Google and other search engines will spider and index the pages and their respective keywords, letting the government website to show up high on search engines when a user is searching for related information."

Google Alerts continued to face critical performance topics and temporary regional unavailability but the Google technical support has been successfully addressing the reported topics by users on its official forum.
import sys
import webbrowser

from bs4 import BeautifulSoup
from fake_useragent import UserAgent
import requests


if __name__ == "__main__":
    print("Googling.....")
    url = "https://www.google.com/search?q=" + " ".join(sys.argv[1:])
    res = requests.get(url, headers={"UserAgent": UserAgent().random})
    # res.raise_for_status()
    with open("project1a.html", "wb") as out_file:  # only for knowing the class
        for data in res.iter_content(10000):
            out_file.write(data)
    soup = BeautifulSoup(res.text, "html.parser")
    links = list(soup.select(".eZt8xd"))[:5]

    print(len(links))
    for link in links:
        if link.text == "Maps":
            webbrowser.open(link.get("href"))
        else:
            webbrowser.open(f"http://google.com{link.get('href')}")

LANGUAGE:

DARK MODE: