forked from evilsocket/pwnagotchi-plugins-contrib
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathhashie.py
239 lines (214 loc) · 12 KB
/
hashie.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
import binascii
import io
import json
import logging
import os
import subprocess
from threading import Lock
from scapy.all import *
import pwnagotchi.plugins as plugins
import pwnagotchi.ui.fonts as fonts
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
class PMKIDPackage(object):
PMKID = ""
MAC_AP = ""
MAC_STN = ""
ESSID = ""
'''
hcxpcapngtool is required, to install:
> git clone https://github.com/ZerBea/hcxtools.git
> cd hcxtools
> apt-get install libcurl4-openssl-dev libssl-dev zlib1g-dev
> make
> sudo make install
'''
class hashie(plugins.Plugin):
__version__ = '1.0.3'
__license__ = 'GPL3'
__description__ = '''
Attempt to automatically convert PCAPs to a crackable format.
If successful, the files containing the hashes will be saved
in the same folder as the handshakes.
The files output by the plugin are intended for use with Hashcat:
- *.16800 files are manually exported with scapy/tcpdump
- *.22000 files are exported by hcxpcapngtool and contain 2 hash types
- "WPA*01*xxxx" hashes (Newer format for 16800s)
- "WPA*02*xxxx" hashes (Newer format for the old 2500s)
- Note: Hashcat accepts both of these in 22000 mode.
If a PCAP file doesn't have enough information to create a hash
it gets stored in a file that can be read by the webgpsmap plugin.
Why use it?:
- Automatically convert handshakes to crackable formats for use offline
- Manually extract PMKIDs that hcxpcapngtool misses
- Be a completionist! If you don't have enough packets captured this
generates an output file for the webgpsmap plugin, use the
location data to revisit networks you need more packets for!
Additional information:
- Currently requires hcxpcapngtool compiled and installed
- Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID
- hcxpcapngtool sometimes has trouble extracting the SSID, so we
use the raw 16800 output and attempt to retrieve the SSID via tcpdump
- When access_point data is available (on_handshake), we leverage
the reported AP name and MAC to complete the hash
- The repair is very basic and could certainly be improved!
Todo:
Make it so users dont need hcxpcapngtool (unless it gets added to the base image)
Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands
Phase 2: Extract/construct 22000/16800 hashes entirely in python
Improve the code, a lot
'''
def __init__(self):
logging.info("[hashie] plugin loaded")
self.lock = Lock()
# called when everything is ready and the main loop is about to start
def on_config_changed(self, config):
handshake_dir = config['bettercap']['handshakes']
if 'interval' not in self.options or not (self.status.newer_then_hours(self.options['interval'])):
with self.lock:
self._process_stale_pcaps(handshake_dir)
def on_handshake(self, agent, filename, access_point, client_station):
with self.lock:
handshake_status = []
fullpathNoExt = filename.split('.')[0]
name = filename.split('/')[-1:][0].split('.')[0]
if os.path.isfile(fullpathNoExt + '.22000'):
handshake_status.append('Already have {}.22000'.format(name))
elif self._write22000(filename):
handshake_status.append('Created {}.22000 from pcap'.format(name))
if os.path.isfile(fullpathNoExt + '.16800'):
handshake_status.append('Already have {}.16800'.format(name))
elif self._write16800(filename, access_point):
handshake_status.append('Created {}.16800 from pcap'.format(name))
if handshake_status:
logging.info('[hashie] Good news:\n\t' + '\n\t'.join(handshake_status))
def _write22000(self, fullpath):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
result = subprocess.getoutput('hcxpcapngtool -o {}.22000 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
if os.path.isfile(fullpathNoExt + '.22000'):
logging.debug('[hashie] [+] EAPOL Success: {}.22000 created'.format(filename))
return True
else:
return False
def _getESSID(self, packet, fullpath):
tmpESSID = ''
if packet.haslayer(Dot11) & packet.type == 0:
try:
tmpESSID = packet.info.hex()
except:
pass
if tmpESSID != '':
return tmpESSID
tcpCatOut = subprocess.check_output("tcpdump -ennr " + fullpath + " \"(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)\" 2>/dev/null | sed -E 's/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g'",shell=True).decode('utf-8')
if ":" in tcpCatOut:
for i in tcpCatOut.split('\n'):
if ":" in i:
tmpESSID = i.split('\t')[0].replace(':','') + ':' + i.split('\t')[1].strip('\n').encode().hex()
ret = tmpESSID.split(':')[1]
return ret
def _populatePMKObj(self, packet, fullpath):
pmkObj = PMKIDPackage()
pmkObj.ESSID = self._getESSID(packet, fullpath)
pmkObj.PMKID = str(binascii.hexlify(packet.getlayer(Raw).load)[202:234], 'utf-8')
pmkObj.MAC_AP = packet.addr2.replace(':','')
pmkObj.MAC_STN = packet.addr1.replace(':','')
return pmkObj
def _getUniquePMKIDs(self, allPMKIDs):
tmpPMKIDs = []
for entry in allPMKIDs:
tmpPMKIDs.append("*".join([str(entry.PMKID), str(entry.MAC_AP), str(entry.MAC_STN), str(entry.ESSID)]))
uniqPMKIDs = sorted(set(tmpPMKIDs))
return uniqPMKIDs
def _write16800(self, fullpath, apJSON):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
allPMKIDs = []
targetPCAP = []
try:
targetPCAP = rdpcap(fullpath)
except:
logging.debug('[hashie] [-] PCAP Read Fail: Scapy didn\'t like {}.pcap'.format(filename))
return False
for packet in targetPCAP:
try:
pmkObj = self._populatePMKObj(packet, fullpath)
if pmkObj.PMKID != '':
allPMKIDs.append(pmkObj)
except:
pass
if allPMKIDs:
uniqPMKIDs = self._getUniquePMKIDs(allPMKIDs)
with open(fullpath.split('.')[0] + '.16800','w') as tempFileOut:
for entry in uniqPMKIDs:
tempFileOut.write(entry + '\n')
logging.debug('[hashie] [+] PMKID Success: {}'.format(filename))
else:
logging.debug('[hashie] [-] PMKID Fail: No hashes extracted from {}'.format(filename))
def _process_stale_pcaps(self, handshake_dir):
logging.info('[hashie] Starting batch conversion of pcap files')
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
failed_jobs = []
successful_jobs = []
lonely_pcaps = []
for num, handshake in enumerate(handshakes_list):
fullpathNoExt = handshake.split('.')[0]
pcapFileName = handshake.split('/')[-1:][0]
if not os.path.isfile(fullpathNoExt + '.22000'): #if no 22000, try
if self._write22000(handshake):
successful_jobs.append('22000: ' + pcapFileName)
else:
failed_jobs.append('22000: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.16800'): #if no 16800, try
if self._write16800(handshake, ""):
successful_jobs.append('16800: ' + pcapFileName)
else:
failed_jobs.append('16800: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.22000'): #if no 16800 AND no 22000
lonely_pcaps.append(handshake)
#logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
if ((num + 1) % 50 == 0) or (num + 1 == len(handshakes_list)): #report progress every 50, or when done
logging.info('[hashie] Batch job: {}/{} done ({} fails)'.format(num + 1,len(handshakes_list),len(lonely_pcaps)))
if successful_jobs:
logging.info('[hashie] Batch job: {} new handshake files created'.format(len(successful_jobs)))
if lonely_pcaps:
logging.info('[hashie] Batch job: {} networks without enough packets to create a hash'.format(len(lonely_pcaps)))
self._getLocations(lonely_pcaps)
def _getLocations(self, lonely_pcaps):
#export a file for webgpsmap to load
with open('/root/.incompletePcaps','w') as isIncomplete:
count = 0
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[-1:][0] #keep extension
fullpathNoExt = pcapFile.split('.')[0]
isIncomplete.write(filename + '\n')
if os.path.isfile(fullpathNoExt + '.gps.json') or os.path.isfile(fullpathNoExt + '.geo.json') or os.path.isfile(fullpathNoExt + '.paw-gps.json'):
count +=1
if count != 0:
logging.info('[hashie] Used {} GPS/GEO/PAW-GPS files to find lonely networks, go check webgpsmap! ;)'.format(str(count)))
else:
logging.info('[hashie] Could not find any GPS/GEO/PAW-GPS files for the lonely networks')
def _getLocationsCSV(self, lonely_pcaps):
# in case we need this later, export locations manually to CSV file, needs try/catch, paw-gps format/etc.
locations = []
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[-1:][0].split('.')[0]
fullpathNoExt = pcapFile.split('.')[0]
if os.path.isfile(fullpathNoExt + '.gps.json'):
with open(fullpathNoExt + '.gps.json','r') as tempFileA:
data = json.load(tempFileA)
locations.append(filename + ',' + str(data['Latitude']) + ',' + str(data['Longitude']) + ',50')
elif os.path.isfile(fullpathNoExt + '.geo.json'):
with open(fullpathNoExt + '.geo.json','r') as tempFileB:
data = json.load(tempFileB)
locations.append(filename + ',' + str(data['location']['lat']) + ',' + str(data['location']['lng']) + ',' + str(data['accuracy']))
elif os.path.isfile(fullpathNoExt + '.paw-gps.json'):
with open(fullpathNoExt + '.paw-gps.json','r') as tempFileC:
data = json.load(tempFileC)
locations.append(filename + ',' + str(data['lat']) + ',' + str(data['long']) + ',50')
if locations:
with open('/root/locations.csv','w') as tempFileD:
for loc in locations:
tempFileD.write(loc + '\n')
logging.info('[hashie] Used {} GPS/GEO files to find lonely networks, load /root/locations.csv into a mapping app and go say hi!'.format(len(locations)))