forked from rndinfosecguy/Scavenger
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathP_bot.py
More file actions
executable file
·129 lines (114 loc) · 4.34 KB
/
P_bot.py
File metadata and controls
executable file
·129 lines (114 loc) · 4.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
######
# If you do not want to post results on Twitter remove the lines marked with TWITTER
######
import time
import tweepy
import os
import httplib2
import classes.utility
import json
tools = classes.utility.ScavUtility()
iterator = 1
#Twitter API credentials
consumer_key = "" # TWITTER
consumer_secret = "" # TWITTER
access_key = "" # TWITTER
access_secret = "" # TWITTER
#authorize twitter, initialize tweepy
auth = tweepy.OAuthHandler(consumer_key, consumer_secret) # TWITTER
auth.set_access_token(access_key, access_secret) # TWITTER
api = tweepy.API(auth) # TWITTER
print("[#] Using API to gather pastes.")
while 1:
# test if ready to archive
archivepath = "data/raw_pastes"
archiveit = tools.testifreadytoarchive(archivepath)
if archiveit == 1:
print("[*] Get all the pastes with credentials...")
tools.getthejuicythings(archivepath, "pastebincom")
print("[*] Archiving old Paste.org pastes...")
tools.archivepastes(archivepath, "pastebincom")
print(str(iterator) + ". iterator:")
iterator += 1
http = httplib2.Http()
try:
status, response = http.request("https://scrape.pastebin.com/api_scraping.php?limit=100")
result = json.loads(response.decode('utf-8'))
print("[#] Waiting...")
time.sleep(60)
for apiPaste in result:
print("[*] Crawling " + apiPaste["key"])
binStatus, binResponse = http.request(apiPaste["scrape_url"])
try:
foundPasswords = 0
file_ = open("data/raw_pastes/" + apiPaste["key"], "wb")
file_.write(binResponse)
file_.close()
emailPattern = os.popen("grep -l -E -o \"\\b[a-zA-Z0-9.-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z0-9.-]+\\b\" data/raw_pastes/" + apiPaste["key"]).read()
emailPattern = emailPattern.split("\n")
for file in emailPattern:
if file != "":
with open("data/raw_pastes/" + apiPaste["key"]) as f:
pasteContent = f.readlines()
skip = 0
for line in pasteContent:
curLine = line.strip()
if (":" in curLine or ";" in curLine or "," in curLine) and "://" not in curLine and len(curLine) <=100 and "android:" not in curLine and "#EXTINF" not in curLine:
pass
else:
skip = 1
if skip == 0:
foundPasswords = 1
curPasteMySQLi = os.popen("grep mysqli_connect\( data/raw_pastes/" + apiPaste["key"]).read()
curPasteRSA = os.popen("grep 'BEGIN RSA PRIVATE KEY' data/raw_pastes/" + apiPaste["key"]).read()
curPasteWP = os.popen("grep 'The name of the database for WordPress' data/raw_pastes/" + apiPaste["key"]).read()
# search for onion links
containsOnion = 0
containsDocument = 0
with open("data/raw_pastes/" + apiPaste["key"]) as f:
onionContent = f.readlines()
for line in onionContent:
if ".onion" in line and len(line) <= 150:
containsOnion = 1
if ".pdf" in line or ".doc" in line or ".docx" in line or ".xls" in line or ".xlsx" in line:
containsDocument = 1
if foundPasswords == 1:
foundPasswords = 0
print("Found credentials. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
elif curPasteRSA != "":
print("Found RSA key. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes/" + apiPaste["key"] + " data/rsa_leaks/.")
elif curPasteWP != "":
print("Found Wordpress configuration file. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes/" + apiPaste["key"] + " data/wordpress_leaks/.")
elif curPasteMySQLi != "":
print("Found MySQL connect string. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes/" + apiPaste["key"] + " data/mysql_leaks/.")
elif containsOnion == 1:
if containsDocument == 1:
print("Found .onion link to a document. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes/" + apiPaste["key"] + " data/onion_docs/.")
else:
print("Found .onion link. Posting on Twitter...")
api.update_status() # TWITTER
tools.statisticsaddpoint()
os.system("cp data/raw_pastes/" + apiPaste["key"] + " data/onion/.")
time.sleep(1)
except Exception as e:
print(e)
continue
print("++++++++++")
print("")
except Exception as e:
print(e)
continue