Search Google Without Ads
ctxt.ioPython script to search Google without ads.
Terminal: pip install googlesearch-python requests beautifulsoup4
Script: from googlesearch import search import requests from bs4 import BeautifulSoup
def get_site_description(url): try: response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser') description = soup.find('meta', attrs={'name': 'description'}) or soup.find('meta', attrs={'property': 'og:description'}) return description.get('content') if description else "No description available" except Exception as e: return f"Error retrieving description: {e}"
def print_rainbow_link(idx, result): colors = ['\033[91m', '\033[93m', '\033[92m', '\033[94m', '\033[95m', '\033[96m']
def print_click_instruction(): print("\033[92mClick the rainbow-colored text to show the page.\033[0m\n")link = f"{idx}. " for i, char in enumerate(result): link += f"{colors[i % len(colors)]}{char}" link += '\033[0m' return linkdef google_search(query): try: for idx, result in enumerate(search(query, num_results=5), start=1): description = get_site_description(result) rainbow_link = print_rainbow_link(idx, result) print(rainbow_link) print(f" Description: {description}\n") print_click_instruction() except Exception as e: print(f"An error occurred: {e}")
if __name__ == "__main__": while True: user_input = input("Enter your search query (or type 'exit' to quit): ")
if user_input.lower() == 'exit': break google_search(user_input)import threading import requests from googlesearch import search from bs4 import BeautifulSoup import random
# Caesar cipher encryption function def encrypt(text, key): result = "" for char in text: if char.isalpha(): if char.isupper(): result += chr((ord(char) + key - 65) % 26 + 65) else: result += chr((ord(char) + key - 97) % 26 + 97) else: result += char return result
# Caesar cipher decryption function def decrypt(text, key): return encrypt(text, -key)
def get_site_description(url): try: response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser') description = soup.find('meta', attrs={'name': 'description'}) or soup.find('meta', attrs={'property': 'og:description'}) return description.get('content') if description else "No description available" except Exception as e: return f"Error retrieving description: {e}"
def print_rainbow_link(idx, result): colors = ['\033[91m', '\033[93m', '\033[92m', '\033[94m', '\033[95m', '\033[96m']
def print_click_instruction(): print("\033[92mClick the rainbow-colored text to show the page.\033[0m\n")link = f"{idx}. " for i, char in enumerate(result): link += f"{colors[i % len(colors)]}{char}" link += '\033[0m' return linkdef fetch_random_proxy(): try: search_query = "free proxy list" search_results = list(search(search_query, num_results=10)) proxy_list = []
def prompt_proxy(): user_input = input("Enter the proxy IP address and port (format: ip_address:port), type 'random' to use a random proxy, or type 'exit' to cancel: ") if user_input.lower() == 'random': proxy = fetch_random_proxy() if proxy: print("\033[91mRandom proxy fetched successfully.\033[0m") else: print("\033[91mFailed to fetch random proxy.\033[0m") return proxy else: return user_inputfor result in search_results: try: response = requests.get(result, timeout=5) if response.status_code == 200: soup = BeautifulSoup(response.text, 'html.parser') proxies = soup.find_all("tr") for proxy in proxies: elements = proxy.find_all("td") if len(elements) >= 2: ip = elements[0].text.strip() port = elements[1].text.strip() proxy_list.append(encrypt(f"http://{ip}:{port}", 3)) # Encrypt the proxy URL except Exception as e: continue if len(proxy_list) > 0: return random.choice(proxy_list) else: return None except Exception as e: print(f"An error occurred while fetching random proxy: {e}") return Nonedef search_with_timeout(query, searching_torrents, proxy=None): try: if searching_torrents: search_query = query + " filetype:torrent" else: search_query = query
def main(): proxy = None searching_torrents = False while True: user_input = input("\033[97mEnter your search query (or type 'exit' to quit), type '\033[93m#torrents\033[97m' for torrent search, type '\033[94m#show\033[97m' to show current proxy, type '\033[91m#proxy\033[97m' to set a proxy, or type '\033[92m#science\033[97m' for scientific search: ")# Add filter for scientific results if not searching_torrents: search_query += " site:sciencedirect.com OR site:researchgate.net OR site:nature.com" search_results = list(search(search_query, num_results=5, proxy=proxy, timeout=33)) if len(search_results) == 0: print("No results found.") return True for idx, result in enumerate(search_results, start=1): description = get_site_description(result) rainbow_link = print_rainbow_link(idx, result) print(rainbow_link) print(f" Description: {description}\n") print_click_instruction() return True except Exception as e: print(f"An error occurred: {e}") return False
def google_search(query, searching_torrents, proxy=None): try: if searching_torrents: search_query = query + " filetype:torrent" else: search_query = queryif user_input.lower() == 'exit': break # Encrypt the user input before processing encrypted_input = encrypt(user_input, 3) if encrypted_input.lower() == encrypt('#torrents', 3): # Encrypt the '#torrents' command searching_torrents = True print("\033[93mNow searching for torrents related to the query. Type '\033[94m#regular\033[93m' to switch back to regular searches.\033[0m\n") elif encrypted_input.lower() == encrypt('#regular', 3): # Encrypt the '#regular' command searching_torrents = False print("\033[93mNow searching regular queries.\033[0m\n") elif encrypted_input.lower() == encrypt('#show', 3): # Encrypt the '#show' command if proxy: print(f"\033[94mCurrent proxy: {proxy}\033[0m") else: print("\033[91mNo proxy is currently set.\033[0m") elif encrypted_input.lower() == encrypt('#proxy', 3): # Encrypt the '#proxy' command proxy = prompt_proxy() elif encrypted_input.lower() == encrypt('#science', 3): # Encrypt the '#science' command search_query = input("Enter your scientific search query: ") # Encrypt the search query before passing to google_search encrypted_search_query = encrypt(search_query, 3) google_search(encrypted_search_query, searching_torrents, proxy) else: if proxy: continue_searching = search_with_timeout(encrypted_input, searching_torrents, proxy) if not continue_searching: continue else: continue_searching = search_with_timeout(encrypted_input, searching_torrents) if not continue_searching: continue
if __name__ == "__main__": main()# Add filter for scientific results if not searching_torrents: search_query += " site:sciencedirect.com OR site:researchgate.net OR site:nature.com" search_results = list(search(search_query, num_results=5, proxy=proxy)) if len(search_results) == 0: print("No results found.") return True for idx, result in enumerate(search_results, start=1): description = get_site_description(result) rainbow_link = print_rainbow_link(idx, result) print(rainbow_link) print(f" Description: {description}\n") print_click_instruction() return True except Exception as e: print(f"An error occurred: {e}") return False