Skip to content

Commit 3606040

Browse files
Merge pull request #94 from OSINT-TECHNOLOGIES/rolling
Stabilized v1.1.5
2 parents 2de7259 + ee6ab6f commit 3606040

14 files changed

+345
-72
lines changed

apis/api_securitytrails.py

+14
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,12 @@ def api_securitytrails_check(domain):
1313
api_key = str(row[1])
1414
print(Fore.GREEN + 'Got SecurityTrails API key. Starting SecurityTrails scan...\n')
1515

16+
alive_subdomains = []
17+
txt_records = []
18+
a_records_list = []
19+
mx_records_list = []
20+
ns_records_list = []
21+
soa_records_list = []
1622
subdomains_url = f"https://api.securitytrails.com/v1/domain/{domain}/subdomains?apikey={api_key}"
1723
response = requests.get(subdomains_url)
1824

@@ -31,14 +37,19 @@ def api_securitytrails_check(domain):
3137
for value in record_data.get('values', []):
3238
if record_type == 'a':
3339
print(Fore.GREEN + "IP: " + Fore.LIGHTCYAN_EX + f"{value['ip']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['ip_organization']}")
40+
a_records_list.append({'ip': value.get('ip', ''), 'organization': value.get('ip_organization', '')})
3441
elif record_type == 'mx':
3542
print(Fore.GREEN + "Hostname: " + Fore.LIGHTCYAN_EX + f"{value['hostname']} " + Fore.GREEN + "| Priority: " + Fore.LIGHTCYAN_EX + f"{value['priority']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['hostname_organization']}")
43+
mx_records_list.append({'mx_hostname': value.get('hostname', ''), 'mx_priority': value.get('priority', ''), 'mx_organization': value.get('hostname_organization', '')})
3644
elif record_type == 'ns':
3745
print(Fore.GREEN + "Nameserver: " + Fore.LIGHTCYAN_EX + f"{value['nameserver']} " + Fore.GREEN + "| Organization: " + Fore.LIGHTCYAN_EX + f"{value['nameserver_organization']}")
46+
ns_records_list.append({'ns_nameserver': value.get('nameserver', ''), 'ns_organization': value.get('nameserver_organization', '')})
3847
elif record_type == 'soa':
3948
print(Fore.GREEN + "Email: " + Fore.LIGHTCYAN_EX + f"{value['email']} " + Fore.GREEN + "| TTL: " + Fore.LIGHTCYAN_EX + f"{value['ttl']}")
49+
soa_records_list.append({'soa_email': value.get('email', ''), 'soa_ttl': value.get('ttl', '')})
4050
elif record_type == 'txt':
4151
print(Fore.GREEN + "Value: " + Fore.LIGHTCYAN_EX + f"{value['value']}")
52+
txt_records.append(value['value'])
4253

4354
if response.status_code == 200:
4455
data = response.json()
@@ -51,9 +62,12 @@ def api_securitytrails_check(domain):
5162
response = requests.get(subdomain_url, timeout=5)
5263
if response.status_code == 200:
5364
print(Fore.GREEN + f"{i}. " + Fore.LIGHTCYAN_EX + f"{subdomain_url} " + Fore.GREEN + "is alive")
65+
alive_subdomains.append(subdomain_url)
5466
else:
5567
pass
5668
except Exception:
5769
pass
5870
else:
5971
pass
72+
73+
return general_data['alexa_rank'], general_data['apex_domain'], general_data['hostname'], alive_subdomains, txt_records, a_records_list, mx_records_list, ns_records_list, soa_records_list

apis/api_virustotal.py

+2
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,12 @@ def api_virustotal_check(domain):
4040
print(Fore.GREEN + f"Undetected Samples: {len(result.get('undetected_samples', []))}\n")
4141
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
4242
conn.close()
43+
return result.get('categories'), len(result.get('detected_urls', [])), len(result.get('detected_samples', [])), len(result.get('undetected_samples', []))
4344
else:
4445
print(Fore.RED + "Failed to get domain report\n")
4546
print(Fore.LIGHTGREEN_EX + "-------------------------------------------------\n" + Style.RESET_ALL)
4647
conn.close()
48+
return 'Got no information from VirusTotal API', 'Got no information from VirusTotal API', 'Got no information from VirusTotal API', 'Got no information from VirusTotal API'
4749
pass
4850

4951

datagather_modules/crawl_processor.py

+10-4
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def whois_gather(short_domain):
2626
logging.info('WHOIS INFO GATHERING: OK')
2727
w = whois.whois(short_domain)
2828
if w.org is None:
29-
w['org'] = 'n/a'
29+
w['org'] = 'Organization name was not extracted'
3030
logging.info('WHOIS INFO GATHERING: OK')
3131
return w
3232
except Exception as e:
@@ -110,7 +110,7 @@ def sm_gather(url):
110110
links = [a['href'] for a in soup.find_all('a', href=True)]
111111
categorized_links = {'Facebook': [], 'Twitter': [], 'Instagram': [],
112112
'Telegram': [], 'TikTok': [], 'LinkedIn': [],
113-
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': []}
113+
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': [], 'X.com': []}
114114

115115
for link in links:
116116
parsed_url = urlparse(link)
@@ -135,6 +135,8 @@ def sm_gather(url):
135135
categorized_links['WeChat'].append(urllib.parse.unquote(link))
136136
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
137137
categorized_links['Odnoklassniki'].append(urllib.parse.unquote(link))
138+
elif hostname and (hostname == 'x.com' or hostname.endswith('.x.com')):
139+
categorized_links['X.com'].append(urllib.parse.unquote(link))
138140

139141
if not categorized_links['Odnoklassniki']:
140142
categorized_links['Odnoklassniki'].append('Odnoklassniki links were not found')
@@ -156,6 +158,8 @@ def sm_gather(url):
156158
categorized_links['Twitter'].append('Twitter links were not found')
157159
if not categorized_links['Facebook']:
158160
categorized_links['Facebook'].append('Facebook links were not found')
161+
if not categorized_links['X.com']:
162+
categorized_links['X.com'].append('X.com links were not found')
159163

160164
return categorized_links
161165

@@ -209,7 +213,7 @@ def domains_reverse_research(subdomains, report_file_type):
209213
subdomain_socials_grouped = list(dict(subdomain_socials_grouped).values())
210214

211215
sd_socials = {'Facebook': [], 'Twitter': [], 'Instagram': [], 'Telegram': [], 'TikTok': [], 'LinkedIn': [],
212-
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': []}
216+
'VKontakte': [], 'YouTube': [], 'Odnoklassniki': [], 'WeChat': [], 'X.com': []}
213217

214218
for inner_list in subdomain_socials_grouped:
215219
for link in inner_list:
@@ -234,6 +238,8 @@ def domains_reverse_research(subdomains, report_file_type):
234238
sd_socials['WeChat'].append(urllib.parse.unquote(link))
235239
elif hostname and (hostname == 'ok.ru' or hostname.endswith('.ok.ru')):
236240
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))
241+
elif hostname and (hostname == 'x.com' or hostname.endswith('.x.com')):
242+
sd_socials['Odnoklassniki'].append(urllib.parse.unquote(link))
237243

238244
sd_socials = {k: list(set(v)) for k, v in sd_socials.items()}
239245

@@ -242,7 +248,7 @@ def domains_reverse_research(subdomains, report_file_type):
242248
if not subdomain_ip:
243249
subdomain_ip = ["No subdomains IP's were found"]
244250

245-
if report_file_type == 'pdf' or report_file_type == 'html':
251+
if report_file_type == 'html':
246252
return subdomain_mails, sd_socials, subdomain_ip
247253
elif report_file_type == 'xlsx':
248254
return subdomain_urls, subdomain_mails, subdomain_ip, sd_socials

0 commit comments

Comments
 (0)