|
| 1 | +# Import necessary libraries |
| 2 | +importrequests# For making HTTP requests |
| 3 | +importargparse# For parsing command line arguments |
| 4 | +importconcurrent.futures# For concurrent execution |
| 5 | +fromcollectionsimportOrderedDict# For maintaining order of websites |
| 6 | +fromcoloramaimportinit,Fore# For colored terminal output |
| 7 | +importtime# For handling time-related tasks |
| 8 | +importrandom# For generating random numbers |
| 9 | + |
| 10 | +# Initialize colorama for colored output. |
| 11 | +init() |
| 12 | + |
| 13 | +# Ordered dictionary of websites to check for a given username. |
| 14 | +WEBSITES=OrderedDict([ |
| 15 | + ("Instagram","https://www.instagram.com/{}"), |
| 16 | + ("Facebook","https://www.facebook.com/{}"), |
| 17 | + ("YouTube","https://www.youtube.com/user/{}"), |
| 18 | + ("Reddit","https://www.reddit.com/user/{}"), |
| 19 | + ("GitHub","https://github.com/{}"), |
| 20 | + ("Twitch","https://www.twitch.tv/{}"), |
| 21 | + ("Pinterest","https://www.pinterest.com/{}/"), |
| 22 | + ("TikTok","https://www.tiktok.com/@{}"), |
| 23 | + ("Flickr","https://www.flickr.com/photos/{}") |
| 24 | +]) |
| 25 | + |
| 26 | +REQUEST_DELAY=2# Delay in seconds between requests to the same website |
| 27 | +MAX_RETRIES=3# Maximum number of retries for a failed request |
| 28 | +last_request_times= {}# Dictionary to track the last request time for each website |
| 29 | + |
| 30 | +defcheck_username(website,username): |
| 31 | +""" |
| 32 | + Check if the username exists on the given website. |
| 33 | + Returns the full URL if the username exists, False otherwise. |
| 34 | + """ |
| 35 | +url=website.format(username)# Format the URL with the given username |
| 36 | +retries=0# Initialize retry counter |
| 37 | + |
| 38 | +# Retry loop |
| 39 | +whileretries<MAX_RETRIES: |
| 40 | +try: |
| 41 | +# Implement rate limiting. |
| 42 | +current_time=time.time() |
| 43 | +ifwebsiteinlast_request_timesandcurrent_time-last_request_times[website]<REQUEST_DELAY: |
| 44 | +delay=REQUEST_DELAY- (current_time-last_request_times[website]) |
| 45 | +time.sleep(delay)# Sleep to maintain the request delay. |
| 46 | + |
| 47 | +response=requests.get(url)# Make the HTTP request |
| 48 | +last_request_times[website]=time.time()# Update the last request time. |
| 49 | + |
| 50 | +ifresponse.status_code==200:# Check if the request was successful. |
| 51 | +returnurl |
| 52 | +else: |
| 53 | +returnFalse |
| 54 | +exceptrequests.exceptions.RequestException: |
| 55 | +retries+=1# Increment retry counter on exception. |
| 56 | +delay=random.uniform(1,3)# Random delay between retries. |
| 57 | +time.sleep(delay)# Sleep for the delay period. |
| 58 | + |
| 59 | +returnFalse# Return False if all retries failed. |
| 60 | + |
| 61 | +defmain(): |
| 62 | +# Parse command line arguments. |
| 63 | +parser=argparse.ArgumentParser(description="Check if a username exists on various websites.") |
| 64 | +parser.add_argument("username",help="The username to check.") |
| 65 | +parser.add_argument("-o","--output",help="Path to save the results to a file.") |
| 66 | +args=parser.parse_args() |
| 67 | + |
| 68 | +username=args.username# Username to check. |
| 69 | +output_file=args.output# Output file path. |
| 70 | + |
| 71 | +print(f"Checking for username:{username}") |
| 72 | + |
| 73 | +results=OrderedDict()# Dictionary to store results. |
| 74 | + |
| 75 | +# Use ThreadPoolExecutor for concurrent execution. |
| 76 | +withconcurrent.futures.ThreadPoolExecutor()asexecutor: |
| 77 | +# Submit tasks to the executor. |
| 78 | +futures= {executor.submit(check_username,website,username):website_nameforwebsite_name,websiteinWEBSITES.items()} |
| 79 | +forfutureinconcurrent.futures.as_completed(futures): |
| 80 | +website_name=futures[future]# Get the website name. |
| 81 | +try: |
| 82 | +result=future.result()# Get the result. |
| 83 | +exceptExceptionasexc: |
| 84 | +print(f"{website_name} generated an exception:{exc}") |
| 85 | +result=False |
| 86 | +finally: |
| 87 | +results[website_name]=result# Store the result. |
| 88 | + |
| 89 | +# Print the results. |
| 90 | +print("\nResults:") |
| 91 | +forwebsite,resultinresults.items(): |
| 92 | +ifresult: |
| 93 | +print(f"{Fore.GREEN}{website}: Found ({result})") |
| 94 | +else: |
| 95 | +print(f"{Fore.RED}{website}: Not Found") |
| 96 | + |
| 97 | +# Save results to a file if specified. |
| 98 | +ifoutput_file: |
| 99 | +withopen(output_file,"w")asf: |
| 100 | +forwebsite,resultinresults.items(): |
| 101 | +ifresult: |
| 102 | +f.write(f"{website}: Found ({result})\n") |
| 103 | +else: |
| 104 | +f.write(f"{website}: Not Found\n") |
| 105 | +print(f"{Fore.GREEN}\nResults saved to{output_file}") |
| 106 | + |
| 107 | +# Call the main function |
| 108 | +main() |