import requests
from bs4 import BeautifulSoup
str1='Python' # search string
link = "https://suggestqueries.google.com/complete/search?output=toolbar&hl=en&q="+str1
content = requests.get(link)
soup = BeautifulSoup(content.text, "xml") # creating bs4 object
for d in soup.find_all('CompleteSuggestion'): # loop through all suggestions
print(d.suggestion['data'])
Output is here
python
python online compiler
python download
python interview questions
python compiler
python tutorial
python programming
python w3schools
python interview questions for freshers
python basics
#str1='Python' # search string
str1=input("Enter keyword: ") # user input for search string
import requests
from bs4 import BeautifulSoup
kw=['Tkinter','Python']
kw_data=[]
for s in kw:
link = "https://suggestqueries.google.com/complete/search?output=toolbar&hl=en&q="+s
content = requests.get(link)
soup = BeautifulSoup(content.text, "xml")
my_list=[]
for d in soup.find_all('CompleteSuggestion'):
#print(d.suggestion['data']) # printing the data
my_list.append(d.suggestion['data']) # adding to list
kw_data.append(my_list)
print(kw_data)
We can store the above data in a google sheet, add this part at the end of the above code.
import pygsheets
path='G:\\My drive\\testing\\google-sheet\\creds1.json'
gc=pygsheets.authorize(service_account_file=path)
sh=gc.open('my_gsheets1')
wk1=sh[0]
wk1.append_table(kw_data,start='A1')
wk1.adjust_column_width(start=1,end=10,pixel_size=None)
Save as google_suggest.py
.
# suggest_to_sqlite.py
# Fetch Google autocomplete suggestions for a keyword
# and store them into a local SQLite DB.
import sqlite3
import requests
from bs4 import BeautifulSoup
from datetime import datetime
from pathlib import Path
DB_PATH = Path("E:\\testing3\\suggestions.db") # change if you want a different location
UA = "Mozilla/5.0 (compatible; Plus2net-Suggest-Collector/1.0)"
TIMEOUT = 20
def init_db(db_path: Path):
con = sqlite3.connect(str(db_path))
cur = con.cursor()
cur.execute("""
CREATE TABLE IF NOT EXISTS suggestions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
keyword TEXT NOT NULL,
suggestion TEXT NOT NULL,
source TEXT NOT NULL,
fetched_at TEXT NOT NULL,
UNIQUE(keyword, suggestion, source)
)
""")
# helpful index for lookups by keyword
cur.execute("CREATE INDEX IF NOT EXISTS idx_suggestions_keyword ON suggestions(keyword)")
con.commit()
con.close()
def fetch_suggestions(keyword: str) -> list[str]:
url = "https://suggestqueries.google.com/complete/search"
params = {"output": "toolbar", "hl": "en", "q": keyword}
headers = {"User-Agent": UA}
r = requests.get(url, params=params, headers=headers, timeout=TIMEOUT)
r.raise_for_status()
soup = BeautifulSoup(r.text, "xml")
sugs = [node["data"] for node in soup.find_all("suggestion")]
return sugs
def save_suggestions(db_path: Path, keyword: str, suggestions: list[str], source: str = "google"):
con = sqlite3.connect(str(db_path))
cur = con.cursor()
now = datetime.utcnow().isoformat(timespec="seconds") + "Z"
rows = [(keyword, s, source, now) for s in suggestions]
# use INSERT OR IGNORE to avoid duplicates on (keyword, suggestion, source)
cur.executemany("""
INSERT OR IGNORE INTO suggestions (keyword, suggestion, source, fetched_at)
VALUES (?, ?, ?, ?)
""", rows)
con.commit()
inserted = cur.rowcount # number of rows changed by last operation (works for executemany in sqlite3)
con.close()
return inserted
def main():
init_db(DB_PATH)
keyword = input("Enter keyword: ").strip()
if not keyword:
print("No keyword provided.")
return
try:
suggestions = fetch_suggestions(keyword)
except requests.RequestException as e:
print("Error fetching suggestions:", e)
return
if not suggestions:
print("No suggestions found.")
return
# print to console
print("\nSuggestions:")
for s in suggestions:
print(" -", s)
# save to DB
inserted = save_suggestions(DB_PATH, keyword, suggestions)
print(f"\nSaved {inserted} new suggestion(s) to {DB_PATH}")
# optional: show last few rows for this keyword
con = sqlite3.connect(str(DB_PATH))
cur = con.cursor()
cur.execute("""
SELECT suggestion, fetched_at FROM suggestions
WHERE keyword = ?
ORDER BY fetched_at DESC, suggestion ASC
LIMIT 10
""", (keyword,))
rows = cur.fetchall()
con.close()
print("\nRecent entries in DB for this keyword:")
for s, ts in rows:
print(f" - {s} ({ts})")
if __name__ == "__main__":
main()
Author
🎥 Join me live on YouTubePassionate about coding and teaching, I publish practical tutorials on PHP, Python, JavaScript, SQL, and web development. My goal is to make learning simple, engaging, and project‑oriented with real examples and source code.