-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathterm.py
More file actions
233 lines (190 loc) · 8.04 KB
/
term.py
File metadata and controls
233 lines (190 loc) · 8.04 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
import requests
from colorama import Fore, Back, Style
import time
import sys
import os
import scanlib.js as js
import googlesearch
import threading
import urllib3
import logging
import urllib.parse
import random
import platform
from Modules import agents
import sys
from bs4 import BeautifulSoup
import instaloader
import subprocess
sys.tracebacklimit = 0
try:
raise Exception('This is an exception')
except Exception:
pass
urllib3.disable_warnings()
user_agent_ = agents.get_useragent()
header = {"User-Agent": user_agent_}
#Banner
banner = print(f"""⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⡀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣰⣾⣿⣿⣿⣶⡄⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣿⣿⣿⣿⠋⠉⢻⣤⣀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢀⣴⣾⣿⣿⣿⣿⣿⣷⣶⣿⣿⣿⣷⡄⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⢠⣿⣿⣿⣿⣿⣿⣿⣿⣿⣷⠀⠀⠀⠉⠃⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣰⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⡟⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⣠⣴⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⡿⠁⠀⠀⠀
⠀⠀⠀⠀⠀⠀⠀⢀⣴⣾⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠟⠀⠀ Platform Detection⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⢀⣠⣴⣾⣿⣿⣿⣿⣿⣿⣿⣿⣿⣿⠟⠋⠀⠀⠀{platform.system()} {platform.release()}⠀⠀⠀
⠀⠀⠀⠀⠀⠉⠩⠽⢿⣿⣿⣿⣿⣿⣿⣿⣿⡟⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠀⠀⠀⠀⠀⢀⣠⣶⣿⣿⣿⣿⣿⣿⣿⣿⡟⠀⠀⠀⠀⠀⠀⠀Project: Vulture⠀⠀⠀⠀⠀⠀
⠀⠀⣀⣴⣾⣿⣿⣿⣿⠟⠁⠙⢿⣿⠿⠋⠀⠀⠀⠀⠀⠀⠀⠀Category: Username Search⠀⠀⠀⠀⠀⠀
⠀⠈⣩⣿⣿⣿⡿⠋⠁⠀⠀⠀⠀⠻⣆⠀⠀⠀⠀⠀⠀⠀⠀⠀Developer: AnonCatalyst
⠀⠈⠉⠈⠟⠉ ⣀⣽⣦⣤⡀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀
⠉⠉⠉⠉⠉⠉⠁""")
#Platform Detection
def check_platform():
if platform.system() == "Arch":
return False
elif platform.system() == "Debian" or platform.system() == "Windows":
return True
def print_banner():
print(banner)
#print(f"You are running {platform.system()} {platform.release()}")
if check_platform():
print_banner()
#Input
username = input("\nEnter a username: ")
#Animation
def load_animation():
load_str = f"vulture is launching username search..."
ls_len = len(load_str)
animation = "|/-\\"
anicount = 0
counttime = 0
i = 0
while (counttime != 100):
time.sleep(0.075)
load_str_list = list(load_str)
x = ord(load_str_list[i])
y = 0
if x != 32 and x != 46:
if x>90:
y = x-32
else:
y = x + 32
load_str_list[i]= chr(y)
res =''
for j in range(ls_len):
res = res + load_str_list[j]
sys.stdout.write("\r"+res + animation[anicount])
sys.stdout.flush()
load_str = res
anicount = (anicount + 1)% 4
i =(i + 1)% ls_len
counttime = counttime + 1
if os.name =="nt":
os.system("cls")
else:
os.system("clear")
if __name__ == '__main__':
load_animation()
#Username Search
print(f" {Fore.RED}〘{Fore.WHITE} Username Search{Fore.YELLOW}: {Fore.CYAN}{username}{Fore.RED} 〙\n")
with open("urls.txt", "r") as f:
url_list = (x.strip() for x in f.readlines())
def username_search(username: str, url: str):
try:
s = requests.Session()
s.headers.update(header)
response = s.get(urllib.parse.urljoin(url, username))
status_code = response.status_code
if status_code == 200:
print(f"{Fore.CYAN}• {Fore.BLUE}{username} {Fore.RED}| {Fore.YELLOW}[{Fore.GREEN}✓{Fore.YELLOW}]{Fore.WHITE} URL{Fore.YELLOW}: {Fore.GREEN}{url}{Fore.WHITE} {status_code}")
elif status_code == 404:
print(f" {Fore.YELLOW}[{Fore.RED}×{Fore.YELLOW}] {Fore.WHITE}Profile page {Fore.RED}not found{Fore.YELLOW}:{Fore.RED} {status_code}{Fore.YELLOW}: {Fore.MAGENTA}{url}{Fore.WHITE}")
except requests.exceptions.ConnectionError:
print(f"{Fore.RED}╘{Fore.WHITE} Connection error{Fore.RED} !")
except requests.exceptions.TooManyRedirects as err:
print(f"\n{Fore.RED}╘{Fore.WHITE} Too many redirects{Fore.RED} !")
#threading
def main(username):
threads = []
for url in url_list:
url = urllib.parse.urljoin(url, username)
t = threading.Thread(target=username_search, args=(username, url))
t.start()
threads.append(t)
for thread in threads:
thread.join()
time.sleep(0.3)
if __name__ == "__main__":
try:
main(username)
except (urllib3.exceptions.MaxRetryError, requests.exceptions.RequestException):
pass
print(f"\n {Fore.RED}〘 {Fore.WHITE}Domains Associated With{Fore.YELLOW}: {Fore.BLUE}{username} {Fore.RED}〙{Fore.WHITE}\n")
# Username association
try:
for link in googlesearch.search(username, 15):
if username in link:
print(f"{Fore.CYAN}⊶ {Fore.WHITE}", link)
except google.cloud.exceptions.TooManyRequests:
print(f"{Fore.RED}[!] Too many requests, please try again later.{Fore.WHITE}")
else:
print(f"No Other Domains Associated With: {username}")
#Google Search
#print(f"\n {Fore.RED}〘 {Fore.WHITE}Google Search For{Fore.YELLOW}: {Fore.BLUE}{username} {Fore.RED}〙{Fore.WHITE}\n")
#for urlx in googlesearch.search(username):
# print(f"{Fore.CYAN}⊶ :{Fore.WHITE}",urlx)
import googlesearch
print(f"\n {Fore.RED}〘 {Fore.WHITE}Google Search For{Fore.YELLOW}: {Fore.BLUE}{username} {Fore.RED}〙{Fore.WHITE}\n")
choice = input(f"{Fore.YELLOW}[{Fore.CYAN}?{Fore.YELLOW}]{Fore.WHITE} Do you want to print the results? {Fore.CYAN}({Fore.WHITE}y{Fore.MAGENTA}/{Fore.WHITE}n{Fore.CYAN}){Fore.YELLOW}:{Fore.WHITE} ").lower()
if choice == "y":
with open("usrassosiation.txt", "w") as f:
for urlx in googlesearch.search(username, 50):
f.write(f"{urlx}\n")
print("50 Results saved to googleresults.txt")
else:
print(f"""{Fore.CYAN}> {Fore.RED}Results not saved and only {Fore.GREEN}15 results{Fore.RED} will be shown
unless you chose to {Fore.BLUE}save results{Fore.YELLOW}. {Fore.MAGENTA}This will prevent {Fore.RED}error {Fore.YELLOW}429{Fore.RED} too
many requests from happening sooner witch allows you to search
more until googles {Fore.MAGENTA}search cap of 999 {Fore.RED}is reached{Fore.YELLOW}...""")
for urlx in googlesearch.search(username, 15):
print(f"{Fore.CYAN}⊶ :{Fore.WHITE}",urlx)
# Instagram profile information
print(f"\nGetting Profile Information For: {username}...\n")
bot = instaloader.Instaloader()
print("~ Instagram Profile Information")
profile = instaloader.Profile.from_username(bot.context, username)
print("Username: ", profile.username)
print("User ID: ", profile.userid)
print("Number of Posts: ", profile.mediacount)
print("Followers Count: ", profile.followers)
print("Following Count: ", profile.followees)
print("Bio: ", profile.biography)
print("External URL: ", profile.external_url)
# Github profile information
print("\n~ Github Profile Information")
url = f"https://api.github.com/users/{username}"
response = requests.get(url, headers=header)
data = response.json()
username = data["login"]
name = data["name"]
bio = data["bio"]
follow = data["followers"]
following = data["following"]
loc = data["location"]
comp = data["company"]
em = data["email"]
print(f"Username: {username}")
print(f"Location: {loc}")
print(f"Name: {name}")
print(f"Company: {comp}")
print(f"Followers: {follow}")
print(f"Following: {following}")
print(f"Bio: {bio}")
print(f"Email: {em}")
print(f"\nDownloading {username} posts ... (close[xterm]window) on completion!")
os.system(f"xterm -hold -e python3 instadl.py {username}")
os.system(f"xterm -hold -e exiftool {username}/*.jpg > metadata.txt")
print(f"""\n{Fore.MAGENTA}More profile pages to gather information from
is coming in future updates{Fore.RED}!""")