Big update
This commit is contained in:
parent
5d8d86204a
commit
1f2dd73976
@ -1,7 +1,31 @@
|
||||
# top-most EditorConfig file
|
||||
# EditorConfig helps developers define and maintain consistent
|
||||
# coding styles between different editors and IDEs
|
||||
# editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
# Matches the exact files either package.json or .travis.yml
|
||||
[{*.yml,*.yaml,config.yml,defaults.yml}]
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.json]
|
||||
insert_final_newline = ignore
|
||||
|
||||
[*.js]
|
||||
indent_style = ignore
|
||||
insert_final_newline = ignore
|
||||
|
||||
[*.{md,txt}]
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
@ -2,20 +2,14 @@
|
||||
import logging
|
||||
import argparse
|
||||
import time
|
||||
import yaml
|
||||
import signal
|
||||
import sys
|
||||
import toml
|
||||
|
||||
import pwnagotchi
|
||||
from pwnagotchi import grid
|
||||
from pwnagotchi import utils
|
||||
from pwnagotchi import plugins
|
||||
from pwnagotchi.plugins import cmd as plugins_cmd
|
||||
from pwnagotchi import log
|
||||
from pwnagotchi.identity import KeyPair
|
||||
from pwnagotchi.agent import Agent
|
||||
from pwnagotchi.ui import fonts
|
||||
from pwnagotchi.ui.display import Display
|
||||
from pwnagotchi import restart
|
||||
from pwnagotchi import fs
|
||||
from pwnagotchi.utils import DottedTomlEncoder
|
||||
@ -89,11 +83,12 @@ def do_auto_mode(agent):
|
||||
plugins.on('internet_available', agent)
|
||||
|
||||
except Exception as e:
|
||||
logging.exception("main loop exception")
|
||||
logging.exception("main loop exception (%s)", e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser = plugins_cmd.add_parsers(parser)
|
||||
|
||||
parser.add_argument('-C', '--config', action='store', dest='config', default='/etc/pwnagotchi/default.toml',
|
||||
help='Main configuration file.')
|
||||
@ -118,15 +113,30 @@ if __name__ == '__main__':
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if plugins_cmd.used_plugin_cmd(args):
|
||||
config = utils.load_config(args)
|
||||
log.setup_logging(args, config)
|
||||
rc = plugins_cmd.handle_cmd(args, config)
|
||||
sys.exit(rc)
|
||||
|
||||
if args.version:
|
||||
print(pwnagotchi.__version__)
|
||||
sys.exit(0)
|
||||
|
||||
config = utils.load_config(args)
|
||||
|
||||
if args.print_config:
|
||||
print(toml.dumps(config, encoder=DottedTomlEncoder()))
|
||||
sys.exit(0)
|
||||
|
||||
from pwnagotchi.identity import KeyPair
|
||||
from pwnagotchi.agent import Agent
|
||||
from pwnagotchi.ui import fonts
|
||||
from pwnagotchi.ui.display import Display
|
||||
from pwnagotchi import grid
|
||||
from pwnagotchi import plugins
|
||||
|
||||
pwnagotchi.config = config
|
||||
fs.setup_mounts(config)
|
||||
log.setup_logging(args, config)
|
||||
|
@ -1,6 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
source /usr/bin/pwnlib
|
||||
|
||||
# we need to decrypt something
|
||||
if is_crypted_mode; then
|
||||
while ! is_decrypted; do
|
||||
echo "Waiting for decryption..."
|
||||
sleep 1
|
||||
done
|
||||
fi
|
||||
|
||||
# start mon0
|
||||
start_monitor_interface
|
||||
|
||||
|
71
builder/data/usr/bin/decryption-webserver
Executable file
71
builder/data/usr/bin/decryption-webserver
Executable file
@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
|
||||
|
||||
HTML_FORM = """
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Decryption</title>
|
||||
<style>
|
||||
body { text-align: center; padding: 150px; }
|
||||
h1 { font-size: 50px; }
|
||||
body { font: 20px Helvetica, sans-serif; color: #333; }
|
||||
article { display: block; text-align: center; width: 650px; margin: 0 auto;}
|
||||
input {
|
||||
padding: 12px 20px;
|
||||
margin: 8px 0;
|
||||
box-sizing: border-box;
|
||||
border: 1px solid #ccc;
|
||||
}
|
||||
input[type=password] {
|
||||
width: 75%;
|
||||
font-size: 24px;
|
||||
}
|
||||
input[type=submit] {
|
||||
cursor: pointer;
|
||||
width: 75%;
|
||||
}
|
||||
input[type=submit]:hover {
|
||||
background-color: #d9d9d9;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<article>
|
||||
<h1>Decryption</h1>
|
||||
<p>Some of your files are encrypted.</p>
|
||||
<p>Please provide the decryption password.</p>
|
||||
<div>
|
||||
<form action="/set-password" method="POST">
|
||||
<input type="password" id="password" name="password" value=""><br>
|
||||
<input type="submit" value="Submit">
|
||||
</form>
|
||||
</div>
|
||||
</article>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||
|
||||
def do_GET(self):
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
self.wfile.write(HTML_FORM.encode())
|
||||
|
||||
def do_POST(self):
|
||||
content_length = int(self.headers['Content-Length'])
|
||||
body = self.rfile.read(content_length)
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
password = body.decode('UTF-8').split('=')[1]
|
||||
|
||||
with open('/tmp/.pwnagotchi-secret', 'wt') as pwfile:
|
||||
pwfile.write(password)
|
||||
|
||||
|
||||
httpd = HTTPServer(('0.0.0.0', 80), SimpleHTTPRequestHandler)
|
||||
httpd.serve_forever()
|
@ -1,6 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
source /usr/bin/pwnlib
|
||||
|
||||
# we need to decrypt something
|
||||
if is_crypted_mode; then
|
||||
while ! is_decrypted; do
|
||||
echo "Waiting for decryption..."
|
||||
sleep 1
|
||||
done
|
||||
fi
|
||||
|
||||
# blink 10 times to signal ready state
|
||||
blink_led 10 &
|
||||
|
||||
@ -8,4 +16,4 @@ if is_auto_mode; then
|
||||
/usr/local/bin/pwnagotchi
|
||||
else
|
||||
/usr/local/bin/pwnagotchi --manual
|
||||
fi
|
||||
fi
|
||||
|
@ -84,4 +84,80 @@ is_auto_mode_no_delete() {
|
||||
|
||||
# no override, but none of the interfaces is up -> AUTO
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
# check if we need to decrypt something
|
||||
is_crypted_mode() {
|
||||
if [ -f /root/.pwnagotchi-crypted ]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
# decryption loop
|
||||
is_decrypted() {
|
||||
while read -r mapping container mount; do
|
||||
# mapping = name the device or file will be mapped to
|
||||
# container = the luks encrypted device or file
|
||||
# mount = the mountpoint
|
||||
|
||||
# fail if not mounted
|
||||
if ! mountpoint -q "$mount" >/dev/null 2>&1; then
|
||||
if [ -f /tmp/.pwnagotchi-secret ]; then
|
||||
</tmp/.pwnagotchi-secret read -r SECRET
|
||||
if ! test -b /dev/disk/by-id/dm-uuid-*"$(cryptsetup luksUUID "$container" | tr -d -)"*; then
|
||||
if echo -n "$SECRET" | cryptsetup luksOpen -d- "$container" "$mapping" >/dev/null 2>&1; then
|
||||
echo "Container decrypted!"
|
||||
|
||||
fi
|
||||
fi
|
||||
|
||||
if mount /dev/mapper/"$mapping" "$mount" >/dev/null 2>&1; then
|
||||
echo "Mounted /dev/mapper/$mapping to $mount"
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! ip -4 addr show wlan0 | grep inet >/dev/null 2>&1; then
|
||||
>/dev/null 2>&1 ip addr add 192.168.0.10/24 dev wlan0
|
||||
fi
|
||||
|
||||
if ! pgrep -f decryption-webserver >/dev/null 2>&1; then
|
||||
>/dev/null 2>&1 decryption-webserver &
|
||||
fi
|
||||
|
||||
if ! pgrep wpa_supplicant >/dev/null 2>&1; then
|
||||
>/tmp/wpa_supplicant.conf cat <<EOF
|
||||
ctrl_interface=DIR=/var/run/wpa_supplicant GROUP=netdev
|
||||
update_config=1
|
||||
ap_scan=2
|
||||
|
||||
network={
|
||||
ssid="DECRYPT-ME"
|
||||
mode=2
|
||||
key_mgmt=WPA-PSK
|
||||
psk="pwnagotchi"
|
||||
frequency=2437
|
||||
}
|
||||
EOF
|
||||
>/dev/null 2>&1 wpa_supplicant -D nl80211 -i wlan0 -c /tmp/wpa_supplicant.conf &
|
||||
fi
|
||||
|
||||
if ! pgrep dnsmasq >/dev/null 2>&1; then
|
||||
>/dev/null 2>&1 dnsmasq -k -p 53 -h -O "6,192.168.0.10" -A "/#/192.168.0.10" -i wlan0 -K -F 192.168.0.50,192.168.0.60,255.255.255.0,24h &
|
||||
fi
|
||||
|
||||
return 1
|
||||
fi
|
||||
done </root/.pwnagotchi-crypted
|
||||
|
||||
# overwrite password
|
||||
>/tmp/.pwnagotchi-secret python3 -c 'print("A"*4096)'
|
||||
sync # flush
|
||||
|
||||
pkill wpa_supplicant
|
||||
pkill dnsmasq
|
||||
kill "$(pgrep -f "decryption-webserver")"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
@ -101,10 +101,8 @@
|
||||
- bc
|
||||
- fonts-freefont-ttf
|
||||
- fbi
|
||||
- python3-flask
|
||||
- python3-flask-cors
|
||||
- python3-flaskext.wtf
|
||||
- fonts-ipaexfont-gothic
|
||||
- cryptsetup
|
||||
|
||||
tasks:
|
||||
- name: change hostname
|
||||
@ -218,6 +216,16 @@
|
||||
dest: /usr/local/src/pwnagotchi
|
||||
register: pwnagotchigit
|
||||
|
||||
- name: create /usr/local/share/pwnagotchi/ folder
|
||||
file:
|
||||
path: /usr/local/share/pwnagotchi/
|
||||
state: directory
|
||||
|
||||
- name: clone pwnagotchi plugins repository
|
||||
git:
|
||||
repo: https://github.com/evilsocket/pwnagotchi-plugins-contrib.git
|
||||
dest: /usr/local/share/pwnagotchi/availaible-plugins
|
||||
|
||||
- name: fetch pwnagotchi version
|
||||
set_fact:
|
||||
pwnagotchi_version: "{{ lookup('file', '/usr/local/src/pwnagotchi/pwnagotchi/_version.py') | regex_replace('.*__version__.*=.*''([0-9]+\\.[0-9]+\\.[0-9]+[A-Za-z0-9]*)''.*', '\\1') }}"
|
||||
|
@ -3,10 +3,8 @@ import logging
|
||||
import time
|
||||
import re
|
||||
|
||||
import pwnagotchi.ui.view as view
|
||||
import pwnagotchi
|
||||
|
||||
from pwnagotchi import fs
|
||||
|
||||
from pwnagotchi._version import __version__
|
||||
|
||||
_name = None
|
||||
@ -110,10 +108,13 @@ def temperature(celsius=True):
|
||||
def shutdown():
|
||||
logging.warning("syncing...")
|
||||
|
||||
from pwnagotchi import fs
|
||||
for m in fs.mounts:
|
||||
m.sync()
|
||||
|
||||
logging.warning("shutting down ...")
|
||||
|
||||
from pwnagotchi.ui import view
|
||||
if view.ROOT:
|
||||
view.ROOT.on_shutdown()
|
||||
# give it some time to refresh the ui
|
||||
@ -141,6 +142,7 @@ def reboot(mode=None):
|
||||
else:
|
||||
logging.warning("rebooting ...")
|
||||
|
||||
from pwnagotchi.ui import view
|
||||
if view.ROOT:
|
||||
view.ROOT.on_rebooting()
|
||||
# give it some time to refresh the ui
|
||||
|
@ -3,6 +3,7 @@ import json
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import asyncio
|
||||
import _thread
|
||||
|
||||
import pwnagotchi
|
||||
@ -68,7 +69,7 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
for tag in self._config['bettercap']['silence']:
|
||||
try:
|
||||
self.run('events.ignore %s' % tag, verbose_errors=False)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _reset_wifi_settings(self):
|
||||
@ -121,9 +122,9 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
def _wait_bettercap(self):
|
||||
while True:
|
||||
try:
|
||||
s = self.session()
|
||||
_s = self.session()
|
||||
return
|
||||
except:
|
||||
except Exception:
|
||||
logging.info("waiting for bettercap API to be available ...")
|
||||
time.sleep(1)
|
||||
|
||||
@ -134,6 +135,7 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
self.set_starting()
|
||||
self.start_monitor_mode()
|
||||
self.start_event_polling()
|
||||
self.start_session_fetcher()
|
||||
# print initial stats
|
||||
self.next_epoch()
|
||||
self.set_ready()
|
||||
@ -158,7 +160,7 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
try:
|
||||
self.run('wifi.recon.channel %s' % ','.join(map(str, channels)))
|
||||
except Exception as e:
|
||||
logging.exception("error")
|
||||
logging.exception("Error while setting wifi.recon.channels (%s)", e)
|
||||
|
||||
self.wait_for(recon_time, sleeping=False)
|
||||
|
||||
@ -188,7 +190,7 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
if self._filter_included(ap):
|
||||
aps.append(ap)
|
||||
except Exception as e:
|
||||
logging.exception("error")
|
||||
logging.exception("Error while getting acces points (%s)", e)
|
||||
|
||||
aps.sort(key=lambda ap: ap['channel'])
|
||||
return self.set_access_points(aps)
|
||||
@ -303,60 +305,67 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
if not no_exceptions:
|
||||
raise
|
||||
|
||||
def _event_poller(self):
|
||||
self._load_recovery_data()
|
||||
|
||||
def start_session_fetcher(self):
|
||||
_thread.start_new_thread(self._fetch_stats, ())
|
||||
|
||||
|
||||
def _fetch_stats(self):
|
||||
while True:
|
||||
s = self.session()
|
||||
self._update_uptime(s)
|
||||
self._update_advertisement(s)
|
||||
self._update_peers()
|
||||
self._update_counters()
|
||||
self._update_handshakes(0)
|
||||
time.sleep(1)
|
||||
|
||||
async def _on_event(self, msg):
|
||||
found_handshake = False
|
||||
jmsg = json.loads(msg)
|
||||
|
||||
if jmsg['tag'] == 'wifi.client.handshake':
|
||||
filename = jmsg['data']['file']
|
||||
sta_mac = jmsg['data']['station']
|
||||
ap_mac = jmsg['data']['ap']
|
||||
key = "%s -> %s" % (sta_mac, ap_mac)
|
||||
if key not in self._handshakes:
|
||||
self._handshakes[key] = jmsg
|
||||
ap_and_station = self._find_ap_sta_in(sta_mac, ap_mac, s)
|
||||
if ap_and_station is None:
|
||||
logging.warning("!!! captured new handshake: %s !!!", key)
|
||||
self._last_pwnd = ap_mac
|
||||
plugins.on('handshake', self, filename, ap_mac, sta_mac)
|
||||
else:
|
||||
(ap, sta) = ap_and_station
|
||||
self._last_pwnd = ap['hostname'] if ap['hostname'] != '' and ap[
|
||||
'hostname'] != '<hidden>' else ap_mac
|
||||
logging.warning(
|
||||
"!!! captured new handshake on channel %d, %d dBm: %s (%s) -> %s [%s (%s)] !!!",
|
||||
ap['channel'],
|
||||
ap['rssi'],
|
||||
sta['mac'], sta['vendor'],
|
||||
ap['hostname'], ap['mac'], ap['vendor'])
|
||||
plugins.on('handshake', self, filename, ap, sta)
|
||||
found_handshake = True
|
||||
self._update_handshakes(1 if found_handshake else 0)
|
||||
|
||||
def _event_poller(self, loop):
|
||||
self._load_recovery_data()
|
||||
self.run('events.clear')
|
||||
|
||||
while True:
|
||||
time.sleep(1)
|
||||
|
||||
new_shakes = 0
|
||||
|
||||
logging.debug("polling events ...")
|
||||
|
||||
try:
|
||||
s = self.session()
|
||||
self._update_uptime(s)
|
||||
|
||||
self._update_advertisement(s)
|
||||
self._update_peers()
|
||||
self._update_counters()
|
||||
|
||||
for h in [e for e in self.events() if e['tag'] == 'wifi.client.handshake']:
|
||||
filename = h['data']['file']
|
||||
sta_mac = h['data']['station']
|
||||
ap_mac = h['data']['ap']
|
||||
key = "%s -> %s" % (sta_mac, ap_mac)
|
||||
|
||||
if key not in self._handshakes:
|
||||
self._handshakes[key] = h
|
||||
new_shakes += 1
|
||||
ap_and_station = self._find_ap_sta_in(sta_mac, ap_mac, s)
|
||||
if ap_and_station is None:
|
||||
logging.warning("!!! captured new handshake: %s !!!", key)
|
||||
self._last_pwnd = ap_mac
|
||||
plugins.on('handshake', self, filename, ap_mac, sta_mac)
|
||||
else:
|
||||
(ap, sta) = ap_and_station
|
||||
self._last_pwnd = ap['hostname'] if ap['hostname'] != '' and ap[
|
||||
'hostname'] != '<hidden>' else ap_mac
|
||||
logging.warning(
|
||||
"!!! captured new handshake on channel %d, %d dBm: %s (%s) -> %s [%s (%s)] !!!",
|
||||
ap['channel'],
|
||||
ap['rssi'],
|
||||
sta['mac'], sta['vendor'],
|
||||
ap['hostname'], ap['mac'], ap['vendor'])
|
||||
plugins.on('handshake', self, filename, ap, sta)
|
||||
|
||||
except Exception as e:
|
||||
logging.error("error: %s", e)
|
||||
|
||||
finally:
|
||||
self._update_handshakes(new_shakes)
|
||||
loop.create_task(self.start_websocket(self._on_event))
|
||||
loop.run_forever()
|
||||
except Exception as ex:
|
||||
logging.debug("Error while polling via websocket (%s)", ex)
|
||||
|
||||
def start_event_polling(self):
|
||||
_thread.start_new_thread(self._event_poller, ())
|
||||
# start a thread and pass in the mainloop
|
||||
_thread.start_new_thread(self._event_poller, (asyncio.new_event_loop(),))
|
||||
|
||||
|
||||
def is_module_running(self, module):
|
||||
s = self.session()
|
||||
@ -465,4 +474,4 @@ class Agent(Client, Automata, AsyncAdvertiser, AsyncTrainer):
|
||||
plugins.on('channel_hop', self, channel)
|
||||
|
||||
except Exception as e:
|
||||
logging.error("error: %s", e)
|
||||
logging.error("Error while setting channel (%s)", e)
|
||||
|
@ -1,12 +1,9 @@
|
||||
import os
|
||||
import time
|
||||
import warnings
|
||||
import logging
|
||||
|
||||
# https://stackoverflow.com/questions/40426502/is-there-a-way-to-suppress-the-messages-tensorflow-prints/40426709
|
||||
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # or any {'0', '1', '2'}
|
||||
# https://stackoverflow.com/questions/15777951/how-to-suppress-pandas-future-warning
|
||||
warnings.simplefilter(action='ignore', category=FutureWarning)
|
||||
|
||||
|
||||
def load(config, agent, epoch, from_disk=True):
|
||||
@ -59,7 +56,7 @@ def load(config, agent, epoch, from_disk=True):
|
||||
|
||||
return a2c
|
||||
except Exception as e:
|
||||
logging.exception("error while starting AI")
|
||||
logging.exception("error while starting AI (%s)", e)
|
||||
|
||||
logging.warning("[ai] AI not loaded!")
|
||||
return False
|
||||
|
@ -19,6 +19,10 @@ class Epoch(object):
|
||||
self.active_for = 0
|
||||
# number of epochs with no visible access points
|
||||
self.blind_for = 0
|
||||
# number of epochs in sad state
|
||||
self.sad_for = 0
|
||||
# number of epochs in bored state
|
||||
self.bored_for = 0
|
||||
# did deauth in this epoch in the current channel?
|
||||
self.did_deauth = False
|
||||
# number of deauths in this epoch
|
||||
@ -99,13 +103,13 @@ class Epoch(object):
|
||||
try:
|
||||
aps_per_chan[ch_idx] += 1.0
|
||||
sta_per_chan[ch_idx] += len(ap['clients'])
|
||||
except IndexError as e:
|
||||
except IndexError:
|
||||
logging.error("got data on channel %d, we can store %d channels" % (ap['channel'], wifi.NumChannels))
|
||||
|
||||
for peer in peers:
|
||||
try:
|
||||
peers_per_chan[peer.last_channel - 1] += 1.0
|
||||
except IndexError as e:
|
||||
except IndexError:
|
||||
logging.error(
|
||||
"got peer data on channel %d, we can store %d channels" % (peer.last_channel, wifi.NumChannels))
|
||||
|
||||
@ -157,6 +161,20 @@ class Epoch(object):
|
||||
else:
|
||||
self.active_for += 1
|
||||
self.inactive_for = 0
|
||||
self.sad_for = 0
|
||||
self.bored_for = 0
|
||||
|
||||
if self.inactive_for >= self.config['personality']['sad_num_epochs']:
|
||||
# sad > bored; cant be sad and bored
|
||||
self.bored_for = 0
|
||||
self.sad_for += 1
|
||||
elif self.inactive_for >= self.config['personality']['bored_num_epochs']:
|
||||
# sad_treshhold > inactive > bored_treshhold; cant be sad and bored
|
||||
self.sad_for = 0
|
||||
self.bored_for += 1
|
||||
else:
|
||||
self.sad_for = 0
|
||||
self.bored_for = 0
|
||||
|
||||
now = time.time()
|
||||
cpu = pwnagotchi.cpu_load()
|
||||
@ -172,6 +190,8 @@ class Epoch(object):
|
||||
'blind_for_epochs': self.blind_for,
|
||||
'inactive_for_epochs': self.inactive_for,
|
||||
'active_for_epochs': self.active_for,
|
||||
'sad_for_epochs': self.sad_for,
|
||||
'bored_for_epochs': self.bored_for,
|
||||
'missed_interactions': self.num_missed,
|
||||
'num_hops': self.num_hops,
|
||||
'num_peers': self.num_peers,
|
||||
@ -188,13 +208,15 @@ class Epoch(object):
|
||||
self._epoch_data['reward'] = self._reward(self.epoch + 1, self._epoch_data)
|
||||
self._epoch_data_ready.set()
|
||||
|
||||
logging.info("[epoch %d] duration=%s slept_for=%s blind=%d inactive=%d active=%d peers=%d tot_bond=%.2f "
|
||||
logging.info("[epoch %d] duration=%s slept_for=%s blind=%d sad=%d bored=%d inactive=%d active=%d peers=%d tot_bond=%.2f "
|
||||
"avg_bond=%.2f hops=%d missed=%d deauths=%d assocs=%d handshakes=%d cpu=%d%% mem=%d%% "
|
||||
"temperature=%dC reward=%s" % (
|
||||
self.epoch,
|
||||
utils.secs_to_hhmmss(self.epoch_duration),
|
||||
utils.secs_to_hhmmss(self.num_slept),
|
||||
self.blind_for,
|
||||
self.sad_for,
|
||||
self.bored_for,
|
||||
self.inactive_for,
|
||||
self.active_for,
|
||||
self.num_peers,
|
||||
|
@ -18,4 +18,10 @@ class RewardFunction(object):
|
||||
m = -.3 * (state['missed_interactions'] / tot_interactions)
|
||||
i = -.2 * (state['inactive_for_epochs'] / tot_epochs)
|
||||
|
||||
return h + a + c + b + i + m
|
||||
# include emotions if state >= 5 epochs
|
||||
_sad = state['sad_for_epochs'] if state['sad_for_epochs'] >= 5 else 0
|
||||
_bored = state['bored_for_epochs'] if state['bored_for_epochs'] >= 5 else 0
|
||||
s = -.2 * (_sad / tot_epochs)
|
||||
l = -.1 * (_bored / tot_epochs)
|
||||
|
||||
return h + a + c + b + i + m + s + l
|
||||
|
@ -176,7 +176,7 @@ class AsyncTrainer(object):
|
||||
self.set_training(True, epochs_per_episode)
|
||||
self._model.learn(total_timesteps=epochs_per_episode, callback=self.on_ai_training_step)
|
||||
except Exception as e:
|
||||
logging.exception("[ai] error while training")
|
||||
logging.exception("[ai] error while training (%s)", e)
|
||||
finally:
|
||||
self.set_training(False)
|
||||
obs = self._model.env.reset()
|
||||
|
@ -120,14 +120,14 @@ class Automata(object):
|
||||
logging.warning("agent missed %d interactions -> lonely", did_miss)
|
||||
self.set_lonely()
|
||||
# after X times being bored, the status is set to sad or angry
|
||||
elif self._epoch.inactive_for >= self._config['personality']['sad_num_epochs']:
|
||||
elif self._epoch.sad_for:
|
||||
factor = self._epoch.inactive_for / self._config['personality']['sad_num_epochs']
|
||||
if factor >= 2.0:
|
||||
self.set_angry(factor)
|
||||
else:
|
||||
self.set_sad()
|
||||
# after X times being inactive, the status is set to bored
|
||||
elif self._epoch.inactive_for >= self._config['personality']['bored_num_epochs']:
|
||||
elif self._epoch.bored_for:
|
||||
self.set_bored()
|
||||
# after X times being active, the status is set to happy / excited
|
||||
elif self._epoch.active_for >= self._config['personality']['excited_num_epochs']:
|
||||
|
@ -1,5 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
import websockets
|
||||
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
|
||||
@ -25,15 +28,25 @@ class Client(object):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.url = "%s://%s:%d/api" % (scheme, hostname, port)
|
||||
self.websocket = "ws://%s:%s@%s:%d/api" % (username, password, hostname, port)
|
||||
self.auth = HTTPBasicAuth(username, password)
|
||||
|
||||
def session(self):
|
||||
r = requests.get("%s/session" % self.url, auth=self.auth)
|
||||
return decode(r)
|
||||
|
||||
def events(self):
|
||||
r = requests.get("%s/events" % self.url, auth=self.auth)
|
||||
return decode(r)
|
||||
async def start_websocket(self, consumer):
|
||||
s = "%s/events" % self.websocket
|
||||
while True:
|
||||
try:
|
||||
async with websockets.connect(s, ping_interval=60, ping_timeout=90) as ws:
|
||||
async for msg in ws:
|
||||
try:
|
||||
await consumer(msg)
|
||||
except Exception as ex:
|
||||
logging.debug("Error while parsing event (%s)", ex)
|
||||
except websockets.exceptions.ConnectionClosedError:
|
||||
logging.debug("Lost websocket connection. Reconnecting...")
|
||||
|
||||
def run(self, command, verbose_errors=True):
|
||||
r = requests.post("%s/session" % self.url, auth=self.auth, json={'cmd': command})
|
||||
|
@ -1,5 +1,6 @@
|
||||
main.name = ""
|
||||
main.lang = "en"
|
||||
main.confd = "/etc/pwnagotchi/conf.d/"
|
||||
main.custom_plugins = ""
|
||||
main.iface = "mon0"
|
||||
main.mon_start_cmd = "/usr/bin/monstart"
|
||||
@ -14,7 +15,6 @@ main.whitelist = [
|
||||
]
|
||||
main.filter = ""
|
||||
|
||||
|
||||
main.plugins.grid.enabled = true
|
||||
main.plugins.grid.report = false
|
||||
main.plugins.grid.exclude = [
|
||||
@ -38,14 +38,17 @@ main.plugins.onlinehashcrack.enabled = false
|
||||
main.plugins.onlinehashcrack.email = ""
|
||||
main.plugins.onlinehashcrack.dashboard = ""
|
||||
main.plugins.onlinehashcrack.single_files = false
|
||||
main.plugins.onlinehashcrack.whitelist = []
|
||||
|
||||
main.plugins.wpa-sec.enabled = false
|
||||
main.plugins.wpa-sec.api_key = ""
|
||||
main.plugins.wpa-sec.api_url = "https://wpa-sec.stanev.org"
|
||||
main.plugins.wpa-sec.download_results = false
|
||||
main.plugins.wpa-sec.whitelist = []
|
||||
|
||||
main.plugins.wigle.enabled = false
|
||||
main.plugins.wigle.api_key = ""
|
||||
main.plugins.wigle.whitelist = []
|
||||
|
||||
main.plugins.bt-tether.enabled = false
|
||||
|
||||
@ -106,6 +109,8 @@ main.plugins.led.patterns.epoch = "oo oo oo oo oo oo oo"
|
||||
main.plugins.led.patterns.peer_detected = "oo oo oo oo oo oo oo"
|
||||
main.plugins.led.patterns.peer_lost = "oo oo oo oo oo oo oo"
|
||||
|
||||
main.plugins.logtail.enabled = false
|
||||
|
||||
main.plugins.session-stats.enabled = true
|
||||
main.plugins.session-stats.save_directory = "/var/tmp/pwnagotchi/sessions/"
|
||||
|
||||
|
@ -11,6 +11,7 @@ from distutils.dir_util import copy_tree
|
||||
|
||||
mounts = list()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_write(filename, mode='w'):
|
||||
path = os.path.dirname(filename)
|
||||
@ -25,18 +26,27 @@ def ensure_write(filename, mode='w'):
|
||||
|
||||
|
||||
def size_of(path):
|
||||
"""
|
||||
Calculate the sum of all the files in path
|
||||
"""
|
||||
total = 0
|
||||
for root, dirs, files in os.walk(path):
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
total += os.path.getsize(os.path.join(root, f))
|
||||
return total
|
||||
|
||||
|
||||
def is_mountpoint(path):
|
||||
"""
|
||||
Checks if path is mountpoint
|
||||
"""
|
||||
return os.system(f"mountpoint -q {path}") == 0
|
||||
|
||||
|
||||
def setup_mounts(config):
|
||||
"""
|
||||
Sets up all the configured mountpoints
|
||||
"""
|
||||
global mounts
|
||||
fs_cfg = config['fs']['memory']
|
||||
if not fs_cfg['enabled']:
|
||||
@ -82,6 +92,7 @@ def setup_mounts(config):
|
||||
|
||||
mounts.append(m)
|
||||
|
||||
|
||||
class MemoryFS:
|
||||
@staticmethod
|
||||
def zram_install():
|
||||
@ -90,11 +101,13 @@ class MemoryFS:
|
||||
return os.system("modprobe zram") == 0
|
||||
return True
|
||||
|
||||
|
||||
@staticmethod
|
||||
def zram_dev():
|
||||
logging.debug("[FS] Adding zram device")
|
||||
return open("/sys/class/zram-control/hot_add", "rt").read().strip("\n")
|
||||
|
||||
|
||||
def __init__(self, mount, disk, size="40M",
|
||||
zram=True, zram_alg="lz4", zram_disk_size="100M",
|
||||
zram_fs_type="ext4", rsync=True):
|
||||
@ -109,6 +122,7 @@ class MemoryFS:
|
||||
self.rsync = True
|
||||
self._setup()
|
||||
|
||||
|
||||
def _setup(self):
|
||||
if self.zram and MemoryFS.zram_install():
|
||||
# setup zram
|
||||
|
Binary file not shown.
@ -228,8 +228,8 @@ msgid ""
|
||||
"{associated} new friends and ate {handshakes} handshakes! #pwnagotchi "
|
||||
"#pwnlog #pwnlife #hacktheplanet #skynet"
|
||||
msgstr ""
|
||||
"{duration}中{deauted}のAPに拒否されたけど、{associated}回チャンスがあって"
|
||||
"{handshakes}回ハンドシェイクがあったよ。。 #pownagotchi #pwnlog #pwnlife "
|
||||
"{duration}中{deauthed}のAPに拒否されたけど、{associated}回チャンスがあって"
|
||||
"{handshakes}回ハンドシェイクがあったよ。。 #pwnagotchi #pwnlog #pwnlife "
|
||||
"#hacktheplanet #skynet"
|
||||
|
||||
msgid "hours"
|
||||
|
@ -5,6 +5,7 @@ import os
|
||||
import logging
|
||||
import shutil
|
||||
import gzip
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
|
||||
from pwnagotchi.voice import Voice
|
||||
@ -235,11 +236,18 @@ def setup_logging(args, config):
|
||||
console_handler.setFormatter(formatter)
|
||||
root.addHandler(console_handler)
|
||||
|
||||
# https://stackoverflow.com/questions/24344045/how-can-i-completely-remove-any-logging-from-requests-module-in-python?noredirect=1&lq=1
|
||||
logging.getLogger("urllib3").propagate = False
|
||||
requests_log = logging.getLogger("requests")
|
||||
requests_log.addHandler(logging.NullHandler())
|
||||
requests_log.propagate = False
|
||||
if not args.debug:
|
||||
# disable scapy and tensorflow logging
|
||||
logging.getLogger("scapy").disabled = True
|
||||
logging.getLogger('tensorflow').disabled = True
|
||||
# https://stackoverflow.com/questions/15777951/how-to-suppress-pandas-future-warning
|
||||
warnings.simplefilter(action='ignore', category=FutureWarning)
|
||||
warnings.simplefilter(action='ignore', category=DeprecationWarning)
|
||||
# https://stackoverflow.com/questions/24344045/how-can-i-completely-remove-any-logging-from-requests-module-in-python?noredirect=1&lq=1
|
||||
logging.getLogger("urllib3").propagate = False
|
||||
requests_log = logging.getLogger("requests")
|
||||
requests_log.addHandler(logging.NullHandler())
|
||||
requests_log.prpagate = False
|
||||
|
||||
|
||||
def log_rotation(filename, cfg):
|
||||
|
@ -4,9 +4,7 @@ import _thread
|
||||
import threading
|
||||
import importlib, importlib.util
|
||||
import logging
|
||||
import pwnagotchi
|
||||
from pwnagotchi.ui import view
|
||||
from pwnagotchi.utils import save_config
|
||||
|
||||
|
||||
|
||||
default_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "default")
|
||||
@ -39,6 +37,10 @@ def toggle_plugin(name, enable=True):
|
||||
|
||||
returns True if changed, otherwise False
|
||||
"""
|
||||
import pwnagotchi
|
||||
from pwnagotchi.ui import view
|
||||
from pwnagotchi.utils import save_config
|
||||
|
||||
global loaded, database
|
||||
|
||||
if pwnagotchi.config:
|
||||
@ -55,6 +57,8 @@ def toggle_plugin(name, enable=True):
|
||||
if enable and name in database and name not in loaded:
|
||||
load_from_file(database[name])
|
||||
one(name, 'loaded')
|
||||
if pwnagotchi.config:
|
||||
one(name, 'config_changed', pwnagotchi.config)
|
||||
one(name, 'ui_setup', view.ROOT)
|
||||
one(name, 'ready', view.ROOT._agent)
|
||||
return True
|
||||
@ -63,7 +67,7 @@ def toggle_plugin(name, enable=True):
|
||||
|
||||
|
||||
def on(event_name, *args, **kwargs):
|
||||
for plugin_name, plugin in loaded.items():
|
||||
for plugin_name in loaded.keys():
|
||||
one(plugin_name, event_name, *args, **kwargs)
|
||||
|
||||
|
||||
@ -136,3 +140,4 @@ def load(config):
|
||||
plugin.options = config['main']['plugins'][name]
|
||||
|
||||
on('loaded')
|
||||
on('config_changed', config)
|
||||
|
389
pwnagotchi/plugins/cmd.py
Normal file
389
pwnagotchi/plugins/cmd.py
Normal file
@ -0,0 +1,389 @@
|
||||
# Handles the commandline stuff
|
||||
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
import glob
|
||||
import re
|
||||
import shutil
|
||||
from fnmatch import fnmatch
|
||||
from pwnagotchi.utils import download_file, unzip, save_config, parse_version, md5
|
||||
from pwnagotchi.plugins import default_path
|
||||
|
||||
|
||||
REPO_URL = 'https://github.com/evilsocket/pwnagotchi-plugins-contrib/archive/master.zip'
|
||||
SAVE_DIR = '/usr/local/share/pwnagotchi/availaible-plugins/'
|
||||
DEFAULT_INSTALL_PATH = '/usr/local/share/pwnagotchi/installed-plugins/'
|
||||
|
||||
|
||||
def add_parsers(parser):
|
||||
"""
|
||||
Adds the plugins subcommand to a given argparse.ArgumentParser
|
||||
"""
|
||||
subparsers = parser.add_subparsers()
|
||||
## pwnagotchi plugins
|
||||
parser_plugins = subparsers.add_parser('plugins')
|
||||
plugin_subparsers = parser_plugins.add_subparsers(dest='plugincmd')
|
||||
|
||||
## pwnagotchi plugins search
|
||||
parser_plugins_search = plugin_subparsers.add_parser('search', help='Search for pwnagotchi plugins')
|
||||
parser_plugins_search.add_argument('pattern', type=str, help="Search expression (wildcards allowed)")
|
||||
|
||||
## pwnagotchi plugins list
|
||||
parser_plugins_list = plugin_subparsers.add_parser('list', help='List available pwnagotchi plugins')
|
||||
parser_plugins_list.add_argument('-i', '--installed', action='store_true', required=False, help='List also installed plugins')
|
||||
|
||||
## pwnagotchi plugins update
|
||||
parser_plugins_update = plugin_subparsers.add_parser('update', help='Updates the database')
|
||||
|
||||
## pwnagotchi plugins upgrade
|
||||
parser_plugins_upgrade = plugin_subparsers.add_parser('upgrade', help='Upgrades plugins')
|
||||
parser_plugins_upgrade.add_argument('pattern', type=str, nargs='?', default='*', help="Filter expression (wildcards allowed)")
|
||||
|
||||
## pwnagotchi plugins enable
|
||||
parser_plugins_enable = plugin_subparsers.add_parser('enable', help='Enables a plugin')
|
||||
parser_plugins_enable.add_argument('name', type=str, help='Name of the plugin')
|
||||
|
||||
## pwnagotchi plugins disable
|
||||
parser_plugins_disable = plugin_subparsers.add_parser('disable', help='Disables a plugin')
|
||||
parser_plugins_disable.add_argument('name', type=str, help='Name of the plugin')
|
||||
|
||||
## pwnagotchi plugins install
|
||||
parser_plugins_install = plugin_subparsers.add_parser('install', help='Installs a plugin')
|
||||
parser_plugins_install.add_argument('name', type=str, help='Name of the plugin')
|
||||
|
||||
## pwnagotchi plugins uninstall
|
||||
parser_plugins_uninstall = plugin_subparsers.add_parser('uninstall', help='Uninstalls a plugin')
|
||||
parser_plugins_uninstall.add_argument('name', type=str, help='Name of the plugin')
|
||||
|
||||
## pwnagotchi plugins edit
|
||||
parser_plugins_edit = plugin_subparsers.add_parser('edit', help='Edit the options')
|
||||
parser_plugins_edit.add_argument('name', type=str, help='Name of the plugin')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def used_plugin_cmd(args):
|
||||
"""
|
||||
Checks if the plugins subcommand was used
|
||||
"""
|
||||
return hasattr(args, 'plugincmd')
|
||||
|
||||
|
||||
def handle_cmd(args, config):
|
||||
"""
|
||||
Parses the arguments and does the thing the user wants
|
||||
"""
|
||||
if args.plugincmd == 'update':
|
||||
return update()
|
||||
elif args.plugincmd == 'search':
|
||||
args.installed = True # also search in installed plugins
|
||||
return list_plugins(args, config, args.pattern)
|
||||
elif args.plugincmd == 'install':
|
||||
return install(args, config)
|
||||
elif args.plugincmd == 'uninstall':
|
||||
return uninstall(args, config)
|
||||
elif args.plugincmd == 'list':
|
||||
return list_plugins(args, config)
|
||||
elif args.plugincmd == 'enable':
|
||||
return enable(args, config)
|
||||
elif args.plugincmd == 'disable':
|
||||
return disable(args, config)
|
||||
elif args.plugincmd == 'upgrade':
|
||||
return upgrade(args, config, args.pattern)
|
||||
elif args.plugincmd == 'edit':
|
||||
return edit(args, config)
|
||||
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def edit(args, config):
|
||||
"""
|
||||
Edit the config of the plugin
|
||||
"""
|
||||
plugin = args.name
|
||||
editor = os.environ.get('EDITOR', 'vim') # because vim is the best
|
||||
|
||||
if plugin not in config['main']['plugins']:
|
||||
return 1
|
||||
|
||||
plugin_config = {'main': {'plugins': {plugin: config['main']['plugins'][plugin]}}}
|
||||
|
||||
import toml
|
||||
from subprocess import call
|
||||
from tempfile import NamedTemporaryFile
|
||||
from pwnagotchi.utils import DottedTomlEncoder
|
||||
|
||||
new_plugin_config = None
|
||||
with NamedTemporaryFile(suffix=".tmp", mode='r+t') as tmp:
|
||||
tmp.write(toml.dumps(plugin_config, encoder=DottedTomlEncoder()))
|
||||
tmp.flush()
|
||||
rc = call([editor, tmp.name])
|
||||
if rc != 0:
|
||||
return rc
|
||||
tmp.seek(0)
|
||||
new_plugin_config = toml.load(tmp)
|
||||
|
||||
config['main']['plugins'][plugin] = new_plugin_config['main']['plugins'][plugin]
|
||||
save_config(config, args.user_config)
|
||||
return 0
|
||||
|
||||
|
||||
def enable(args, config):
|
||||
"""
|
||||
Enables the given plugin and saves the config to disk
|
||||
"""
|
||||
if args.name not in config['main']['plugins']:
|
||||
config['main']['plugins'][args.name] = dict()
|
||||
config['main']['plugins'][args.name]['enabled'] = True
|
||||
save_config(config, args.user_config)
|
||||
return 0
|
||||
|
||||
|
||||
def disable(args, config):
|
||||
"""
|
||||
Disables the given plugin and saves the config to disk
|
||||
"""
|
||||
if args.name not in config['main']['plugins']:
|
||||
config['main']['plugins'][args.name] = dict()
|
||||
config['main']['plugins'][args.name]['enabled'] = False
|
||||
save_config(config, args.user_config)
|
||||
return 0
|
||||
|
||||
|
||||
def upgrade(args, config, pattern='*'):
|
||||
"""
|
||||
Upgrades the given plugin
|
||||
"""
|
||||
available = _get_available()
|
||||
installed = _get_installed(config)
|
||||
|
||||
for plugin, filename in installed.items():
|
||||
if not fnmatch(plugin, pattern) or plugin not in available:
|
||||
continue
|
||||
|
||||
available_version = _extract_version(available[plugin])
|
||||
installed_version = _extract_version(filename)
|
||||
|
||||
if installed_version and available_version:
|
||||
if available_version <= installed_version:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
logging.info('Upgrade %s from %s to %s', plugin, '.'.join(installed_version), '.'.join(available_version))
|
||||
shutil.copyfile(available[plugin], installed[plugin])
|
||||
|
||||
# maybe has config
|
||||
for conf in glob.glob(available[plugin].replace('.py', '.y?ml')):
|
||||
dst = os.path.join(os.path.dirname(installed[plugin]), os.path.basename(conf))
|
||||
if os.path.exists(dst) and md5(dst) != md5(conf):
|
||||
# backup
|
||||
logging.info('Backing up config: %s', os.path.basename(conf))
|
||||
shutil.move(dst, dst + '.bak')
|
||||
shutil.copyfile(conf, dst)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def list_plugins(args, config, pattern='*'):
|
||||
"""
|
||||
Lists the available and installed plugins
|
||||
"""
|
||||
found = False
|
||||
|
||||
line = "|{name:^{width}}|{version:^9}|{enabled:^10}|{status:^15}|"
|
||||
|
||||
available = _get_available()
|
||||
installed = _get_installed(config)
|
||||
|
||||
available_and_installed = set(list(available.keys()) + list(installed.keys()))
|
||||
available_not_installed = set(available.keys()) - set(installed.keys())
|
||||
|
||||
max_len_list = available_and_installed if args.installed else available_not_installed
|
||||
max_len = max(map(len, max_len_list))
|
||||
header = line.format(name='Plugin', width=max_len, version='Version', enabled='Active', status='Status')
|
||||
line_length = max(max_len, len('Plugin')) + len(header) - len('Plugin') - 12 # lol
|
||||
|
||||
print('-' * line_length)
|
||||
print(header)
|
||||
print('-' * line_length)
|
||||
|
||||
if args.installed:
|
||||
# only installed (maybe update available?)
|
||||
for plugin, filename in sorted(installed.items()):
|
||||
if not fnmatch(plugin, pattern):
|
||||
continue
|
||||
found = True
|
||||
installed_version = _extract_version(filename)
|
||||
available_version = None
|
||||
if plugin in available:
|
||||
available_version = _extract_version(available[plugin])
|
||||
|
||||
status = "installed"
|
||||
if installed_version and available_version:
|
||||
if available_version > installed_version:
|
||||
status = "installed (^)"
|
||||
|
||||
enabled = 'enabled' if plugin in config['main']['plugins'] and \
|
||||
'enabled' in config['main']['plugins'][plugin] and \
|
||||
config['main']['plugins'][plugin]['enabled'] \
|
||||
else 'disabled'
|
||||
|
||||
print(line.format(name=plugin, width=max_len, version='.'.join(installed_version), enabled=enabled, status=status))
|
||||
|
||||
|
||||
for plugin in sorted(available_not_installed):
|
||||
if not fnmatch(plugin, pattern):
|
||||
continue
|
||||
found = True
|
||||
available_version = _extract_version(available[plugin])
|
||||
print(line.format(name=plugin, width=max_len, version='.'.join(available_version), enabled='-', status='available'))
|
||||
|
||||
print('-' * line_length)
|
||||
|
||||
if not found:
|
||||
logging.info('Maybe try: pwnagotchi plugins update')
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def _extract_version(filename):
|
||||
"""
|
||||
Extracts the version from a python file
|
||||
"""
|
||||
plugin_content = open(filename, 'rt').read()
|
||||
m = re.search(r'__version__[\t ]*=[\t ]*[\'\"]([^\"\']+)', plugin_content)
|
||||
if m:
|
||||
return parse_version(m.groups()[0])
|
||||
return None
|
||||
|
||||
|
||||
def _get_available():
|
||||
"""
|
||||
Get all availaible plugins
|
||||
"""
|
||||
available = dict()
|
||||
for filename in glob.glob(os.path.join(SAVE_DIR, "*.py")):
|
||||
plugin_name = os.path.basename(filename.replace(".py", ""))
|
||||
available[plugin_name] = filename
|
||||
return available
|
||||
|
||||
|
||||
def _get_installed(config):
|
||||
"""
|
||||
Get all installed plugins
|
||||
"""
|
||||
installed = dict()
|
||||
search_dirs = [ default_path, config['main']['custom_plugins'] ]
|
||||
for search_dir in search_dirs:
|
||||
if search_dir:
|
||||
for filename in glob.glob(os.path.join(search_dir, "*.py")):
|
||||
plugin_name = os.path.basename(filename.replace(".py", ""))
|
||||
installed[plugin_name] = filename
|
||||
return installed
|
||||
|
||||
|
||||
def uninstall(args, config):
|
||||
"""
|
||||
Uninstalls a plugin
|
||||
"""
|
||||
plugin_name = args.name
|
||||
installed = _get_installed(config)
|
||||
if plugin_name not in installed:
|
||||
logging.error('Plugin %s is not installed.', plugin_name)
|
||||
return 1
|
||||
os.remove(installed[plugin_name])
|
||||
return 0
|
||||
|
||||
|
||||
def install(args, config):
|
||||
"""
|
||||
Installs the given plugin
|
||||
"""
|
||||
global DEFAULT_INSTALL_PATH
|
||||
plugin_name = args.name
|
||||
available = _get_available()
|
||||
installed = _get_installed(config)
|
||||
|
||||
if plugin_name not in available:
|
||||
logging.error('%s not found.', plugin_name)
|
||||
return 1
|
||||
|
||||
if plugin_name in installed:
|
||||
logging.error('%s already installed.', plugin_name)
|
||||
|
||||
# install into custom_plugins path
|
||||
install_path = config['main']['custom_plugins']
|
||||
if not install_path:
|
||||
install_path = DEFAULT_INSTALL_PATH
|
||||
config['main']['custom_plugins'] = install_path
|
||||
save_config(config, args.user_config)
|
||||
|
||||
os.makedirs(install_path, exist_ok=True)
|
||||
|
||||
shutil.copyfile(available[plugin_name], os.path.join(install_path, os.path.basename(available[plugin_name])))
|
||||
|
||||
# maybe has config
|
||||
for conf in glob.glob(available[plugin_name].replace('.py', '.y?ml')):
|
||||
dst = os.path.join(install_path, os.path.basename(conf))
|
||||
if os.path.exists(dst) and md5(dst) != md5(conf):
|
||||
# backup
|
||||
logging.info('Backing up config: %s', os.path.basename(conf))
|
||||
shutil.move(dst, dst + '.bak')
|
||||
shutil.copyfile(conf, dst)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _analyse_dir(path):
|
||||
results = dict()
|
||||
path += '*' if path.endswith('/') else '/*'
|
||||
for filename in glob.glob(path, recursive=True):
|
||||
if not os.path.isfile(filename):
|
||||
continue
|
||||
try:
|
||||
results[filename] = md5(filename)
|
||||
except OSError:
|
||||
continue
|
||||
return results
|
||||
|
||||
|
||||
def update():
|
||||
"""
|
||||
Updates the database
|
||||
"""
|
||||
global REPO_URL, SAVE_DIR
|
||||
|
||||
DEST = os.path.join(SAVE_DIR, 'plugins.zip')
|
||||
logging.info('Downloading plugins to %s', DEST)
|
||||
|
||||
try:
|
||||
os.makedirs(SAVE_DIR, exist_ok=True)
|
||||
before_update = _analyse_dir(SAVE_DIR)
|
||||
|
||||
download_file(REPO_URL, os.path.join(SAVE_DIR, DEST))
|
||||
|
||||
logging.info('Unzipping...')
|
||||
unzip(DEST, SAVE_DIR, strip_dirs=1)
|
||||
|
||||
after_update = _analyse_dir(SAVE_DIR)
|
||||
|
||||
b_len = len(before_update)
|
||||
a_len = len(after_update)
|
||||
|
||||
if a_len > b_len:
|
||||
logging.info('Found %d new file(s).', a_len - b_len)
|
||||
|
||||
changed = 0
|
||||
for filename, filehash in after_update.items():
|
||||
if filename in before_update and filehash != before_update[filename]:
|
||||
changed += 1
|
||||
|
||||
if changed:
|
||||
logging.info('%d file(s) were changed.', changed)
|
||||
|
||||
return 0
|
||||
except Exception as ex:
|
||||
logging.error('Error while updating plugins %s', ex)
|
||||
return 1
|
@ -6,12 +6,11 @@ import requests
|
||||
import platform
|
||||
import shutil
|
||||
import glob
|
||||
import pkg_resources
|
||||
from threading import Lock
|
||||
|
||||
import pwnagotchi
|
||||
import pwnagotchi.plugins as plugins
|
||||
from pwnagotchi.utils import StatusFile
|
||||
from pwnagotchi.utils import StatusFile, parse_version as version_to_tuple
|
||||
|
||||
|
||||
def check(version, repo, native=True):
|
||||
@ -30,8 +29,8 @@ def check(version, repo, native=True):
|
||||
info['available'] = latest_ver = latest['tag_name'].replace('v', '')
|
||||
is_arm = info['arch'].startswith('arm')
|
||||
|
||||
local = pkg_resources.parse_version(info['current'])
|
||||
remote = pkg_resources.parse_version(latest_ver)
|
||||
local = version_to_tuple(info['current'])
|
||||
remote = version_to_tuple(latest_ver)
|
||||
if remote > local:
|
||||
if not native:
|
||||
info['url'] = "https://github.com/%s/archive/%s.zip" % (repo, latest['tag_name'])
|
||||
@ -161,6 +160,9 @@ class AutoUpdate(plugins.Plugin):
|
||||
logging.info("[update] plugin loaded.")
|
||||
|
||||
def on_internet_available(self, agent):
|
||||
if self.lock.locked():
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
logging.debug("[update] internet connectivity is available (ready %s)" % self.ready)
|
||||
|
||||
|
@ -7,6 +7,7 @@ import re
|
||||
import pwnagotchi.grid as grid
|
||||
import pwnagotchi.plugins as plugins
|
||||
from pwnagotchi.utils import StatusFile, WifiInfo, extract_from_pcap
|
||||
from threading import Lock
|
||||
|
||||
|
||||
def parse_pcap(filename):
|
||||
@ -54,6 +55,7 @@ class Grid(plugins.Plugin):
|
||||
|
||||
self.unread_messages = 0
|
||||
self.total_messages = 0
|
||||
self.lock = Lock()
|
||||
|
||||
def is_excluded(self, what):
|
||||
for skip in self.options['exclude']:
|
||||
@ -122,21 +124,25 @@ class Grid(plugins.Plugin):
|
||||
def on_internet_available(self, agent):
|
||||
logging.debug("internet available")
|
||||
|
||||
try:
|
||||
grid.update_data(agent.last_session)
|
||||
except Exception as e:
|
||||
logging.error("error connecting to the pwngrid-peer service: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
if self.lock.locked():
|
||||
return
|
||||
|
||||
try:
|
||||
self.check_inbox(agent)
|
||||
except Exception as e:
|
||||
logging.error("[grid] error while checking inbox: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
with self.lock:
|
||||
try:
|
||||
grid.update_data(agent.last_session)
|
||||
except Exception as e:
|
||||
logging.error("error connecting to the pwngrid-peer service: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
return
|
||||
|
||||
try:
|
||||
self.check_handshakes(agent)
|
||||
except Exception as e:
|
||||
logging.error("[grid] error while checking pcaps: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
try:
|
||||
self.check_inbox(agent)
|
||||
except Exception as e:
|
||||
logging.error("[grid] error while checking inbox: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
|
||||
try:
|
||||
self.check_handshakes(agent)
|
||||
except Exception as e:
|
||||
logging.error("[grid] error while checking pcaps: %s" % e)
|
||||
logging.debug(e, exc_info=True)
|
||||
|
282
pwnagotchi/plugins/default/logtail.py
Normal file
282
pwnagotchi/plugins/default/logtail.py
Normal file
@ -0,0 +1,282 @@
|
||||
import os
|
||||
import logging
|
||||
import threading
|
||||
from time import sleep
|
||||
from datetime import datetime,timedelta
|
||||
from pwnagotchi import plugins
|
||||
from pwnagotchi.utils import StatusFile
|
||||
from flask import render_template_string
|
||||
from flask import jsonify
|
||||
from flask import abort
|
||||
from flask import Response
|
||||
|
||||
|
||||
TEMPLATE = """
|
||||
{% extends "base.html" %}
|
||||
{% set active_page = "plugins" %}
|
||||
{% block title %}
|
||||
Logtail
|
||||
{% endblock %}
|
||||
|
||||
{% block styles %}
|
||||
{{ super() }}
|
||||
<style>
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#filter {
|
||||
width: 100%;
|
||||
font-size: 16px;
|
||||
padding: 12px 20px 12px 40px;
|
||||
border: 1px solid #ddd;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
border: 1px solid #ddd;
|
||||
}
|
||||
th, td {
|
||||
text-align: left;
|
||||
padding: 12px;
|
||||
width: 1px;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td:nth-child(2) {
|
||||
text-align: center;
|
||||
}
|
||||
thead, tr:hover {
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
tr {
|
||||
border-bottom: 1px solid #ddd;
|
||||
}
|
||||
div.sticky {
|
||||
position: -webkit-sticky;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
display: table;
|
||||
width: 100%;
|
||||
}
|
||||
div.sticky > * {
|
||||
display: table-cell;
|
||||
}
|
||||
div.sticky > span {
|
||||
width: 1%;
|
||||
}
|
||||
div.sticky > input {
|
||||
width: 100%;
|
||||
}
|
||||
tr.default {
|
||||
color: black;
|
||||
}
|
||||
tr.info {
|
||||
color: black;
|
||||
}
|
||||
tr.warning {
|
||||
color: darkorange;
|
||||
}
|
||||
tr.error {
|
||||
color: crimson;
|
||||
}
|
||||
tr.debug {
|
||||
color: blueviolet;
|
||||
}
|
||||
.ui-mobile .ui-page-active {
|
||||
overflow: visible;
|
||||
overflow-x: visible;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block script %}
|
||||
var content = document.getElementById('content');
|
||||
var filter = document.getElementById('filter');
|
||||
var filterVal = filter.value.toUpperCase();
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', '{{ url_for('plugins') }}/logtail/stream');
|
||||
xhr.send();
|
||||
var position = 0;
|
||||
var data;
|
||||
var time;
|
||||
var level;
|
||||
var msg;
|
||||
var colorClass;
|
||||
|
||||
function handleNewData() {
|
||||
var messages = xhr.responseText.split('\\n');
|
||||
filterVal = filter.value.toUpperCase();
|
||||
messages.slice(position, -1).forEach(function(value) {
|
||||
|
||||
if (value.charAt(0) != '[') {
|
||||
msg = value;
|
||||
time = '';
|
||||
level = '';
|
||||
} else {
|
||||
data = value.split(']');
|
||||
time = data.shift() + ']';
|
||||
level = data.shift() + ']';
|
||||
msg = data.join(']');
|
||||
|
||||
switch(level) {
|
||||
case ' [INFO]':
|
||||
colorClass = 'info';
|
||||
break;
|
||||
case ' [WARNING]':
|
||||
colorClass = 'warning';
|
||||
break;
|
||||
case ' [ERROR]':
|
||||
colorClass = 'error';
|
||||
break;
|
||||
case ' [DEBUG]':
|
||||
colorClass = 'debug';
|
||||
break;
|
||||
default:
|
||||
colorClass = 'default';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var tr = document.createElement('tr');
|
||||
var td1 = document.createElement('td');
|
||||
var td2 = document.createElement('td');
|
||||
var td3 = document.createElement('td');
|
||||
|
||||
td1.textContent = time;
|
||||
td2.textContent = level;
|
||||
td3.textContent = msg;
|
||||
|
||||
tr.appendChild(td1);
|
||||
tr.appendChild(td2);
|
||||
tr.appendChild(td3);
|
||||
|
||||
tr.className = colorClass;
|
||||
|
||||
if (filterVal.length > 0 && value.toUpperCase().indexOf(filterVal) == -1) {
|
||||
tr.style.visibility = "collapse";
|
||||
}
|
||||
|
||||
content.appendChild(tr);
|
||||
});
|
||||
position = messages.length - 1;
|
||||
}
|
||||
|
||||
var scrollingElement = (document.scrollingElement || document.body)
|
||||
function scrollToBottom () {
|
||||
scrollingElement.scrollTop = scrollingElement.scrollHeight;
|
||||
}
|
||||
|
||||
var timer;
|
||||
var scrollElm = document.getElementById('autoscroll');
|
||||
timer = setInterval(function() {
|
||||
handleNewData();
|
||||
if (scrollElm.checked) {
|
||||
scrollToBottom();
|
||||
}
|
||||
if (xhr.readyState == XMLHttpRequest.DONE) {
|
||||
clearInterval(timer);
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
var typingTimer;
|
||||
var doneTypingInterval = 1000;
|
||||
|
||||
filter.onkeyup = function() {
|
||||
clearTimeout(typingTimer);
|
||||
typingTimer = setTimeout(doneTyping, doneTypingInterval);
|
||||
}
|
||||
|
||||
filter.onkeydown = function() {
|
||||
clearTimeout(typingTimer);
|
||||
}
|
||||
|
||||
function doneTyping() {
|
||||
document.body.style.cursor = 'progress';
|
||||
var table, tr, tds, td, i, txtValue;
|
||||
filterVal = filter.value.toUpperCase();
|
||||
table = document.getElementById("content");
|
||||
tr = table.getElementsByTagName("tr");
|
||||
for (i = 0; i < tr.length; i++) {
|
||||
tds = tr[i].getElementsByTagName("td");
|
||||
if (tds) {
|
||||
for (l = 0; l < tds.length; l++) {
|
||||
td = tds[l];
|
||||
if (td) {
|
||||
txtValue = td.textContent || td.innerText;
|
||||
if (txtValue.toUpperCase().indexOf(filterVal) > -1) {
|
||||
tr[i].style.visibility = "visible";
|
||||
break;
|
||||
} else {
|
||||
tr[i].style.visibility = "collapse";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
document.body.style.cursor = 'default';
|
||||
}
|
||||
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="sticky">
|
||||
<input type="text" id="filter" placeholder="Search for ..." title="Type in a filter">
|
||||
<span><input checked type="checkbox" id="autoscroll"></span>
|
||||
<span><label for="autoscroll"> Autoscroll to bottom</label><br></span>
|
||||
</div>
|
||||
<table id="content">
|
||||
<thead>
|
||||
<th>
|
||||
Time
|
||||
</th>
|
||||
<th>
|
||||
Level
|
||||
</th>
|
||||
<th>
|
||||
Message
|
||||
</th>
|
||||
</thead>
|
||||
</table>
|
||||
{% endblock %}
|
||||
"""
|
||||
|
||||
|
||||
class Logtail(plugins.Plugin):
|
||||
__author__ = '33197631+dadav@users.noreply.github.com'
|
||||
__version__ = '0.1.0'
|
||||
__license__ = 'GPL3'
|
||||
__description__ = 'This plugin tails the logfile.'
|
||||
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.options = dict()
|
||||
self.ready = False
|
||||
|
||||
def on_config_changed(self, config):
|
||||
self.config = config
|
||||
self.ready = True
|
||||
|
||||
def on_loaded(self):
|
||||
"""
|
||||
Gets called when the plugin gets loaded
|
||||
"""
|
||||
logging.info("Logtail plugin loaded.")
|
||||
|
||||
def on_webhook(self, path, request):
|
||||
if not self.ready:
|
||||
return "Plugin not ready"
|
||||
|
||||
if not path or path == "/":
|
||||
return render_template_string(TEMPLATE)
|
||||
|
||||
if path == 'stream':
|
||||
def generate():
|
||||
with open(self.config['main']['log']['path']) as f:
|
||||
yield f.read()
|
||||
while True:
|
||||
yield f.readline()
|
||||
|
||||
return Response(generate(), mimetype='text/plain')
|
||||
|
||||
abort(404)
|
@ -49,6 +49,8 @@ class NetPos(plugins.Plugin):
|
||||
saved_file.write(x + "\n")
|
||||
|
||||
def on_internet_available(self, agent):
|
||||
if self.lock.locked():
|
||||
return
|
||||
with self.lock:
|
||||
if self.ready:
|
||||
config = agent.config()
|
||||
|
@ -5,7 +5,7 @@ import re
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from threading import Lock
|
||||
from pwnagotchi.utils import StatusFile
|
||||
from pwnagotchi.utils import StatusFile, remove_whitelisted
|
||||
import pwnagotchi.plugins as plugins
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
@ -20,7 +20,7 @@ class OnlineHashCrack(plugins.Plugin):
|
||||
self.ready = False
|
||||
try:
|
||||
self.report = StatusFile('/root/.ohc_uploads', data_format='json')
|
||||
except JSONDecodeError as json_err:
|
||||
except JSONDecodeError:
|
||||
os.remove('/root/.ohc_uploads')
|
||||
self.report = StatusFile('/root/.ohc_uploads', data_format='json')
|
||||
self.skip = list()
|
||||
@ -35,25 +35,11 @@ class OnlineHashCrack(plugins.Plugin):
|
||||
return
|
||||
|
||||
if 'whitelist' not in self.options:
|
||||
self.options['whitelist'] = []
|
||||
|
||||
# remove special characters from whitelist APs to match on-disk format
|
||||
self.options['whitelist'] = set(map(lambda x: re.sub(r'[^a-zA-Z0-9]', '', x), self.options['whitelist']))
|
||||
self.options['whitelist'] = list()
|
||||
|
||||
self.ready = True
|
||||
logging.info("OHC: OnlineHashCrack plugin loaded.")
|
||||
|
||||
def _filter_handshake_file(self, handshake_filename):
|
||||
try:
|
||||
basename = os.path.basename(handshake_filename)
|
||||
ssid, bssid = basename.split('_')
|
||||
# remove the ".pcap" from the bssid (which is really just the end of the filename)
|
||||
bssid = bssid[:-5]
|
||||
except:
|
||||
# something failed in our parsing of the filename. let the file through
|
||||
return True
|
||||
|
||||
return ssid not in self.options['whitelist'] and bssid not in self.options['whitelist']
|
||||
|
||||
def _upload_to_ohc(self, path, timeout=30):
|
||||
"""
|
||||
@ -96,64 +82,59 @@ class OnlineHashCrack(plugins.Plugin):
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
|
||||
if not self.ready or self.lock.locked():
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
if self.ready:
|
||||
display = agent.view()
|
||||
config = agent.config()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if
|
||||
filename.endswith('.pcap')]
|
||||
|
||||
# pull out whitelisted APs
|
||||
handshake_paths = filter(lambda path: self._filter_handshake_file(path), handshake_paths)
|
||||
|
||||
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
|
||||
|
||||
if handshake_new:
|
||||
logging.info("OHC: Internet connectivity detected. Uploading new handshakes to onlinehashcrack.com")
|
||||
|
||||
for idx, handshake in enumerate(handshake_new):
|
||||
display.set('status',
|
||||
f"Uploading handshake to onlinehashcrack.com ({idx + 1}/{len(handshake_new)})")
|
||||
display.update(force=True)
|
||||
try:
|
||||
self._upload_to_ohc(handshake)
|
||||
if handshake not in reported:
|
||||
reported.append(handshake)
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info(f"OHC: Successfully uploaded {handshake}")
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
self.skip.append(handshake)
|
||||
logging.error("OHC: %s", req_e)
|
||||
continue
|
||||
except OSError as os_e:
|
||||
self.skip.append(handshake)
|
||||
logging.error("OHC: %s", os_e)
|
||||
continue
|
||||
|
||||
if 'dashboard' in self.options and self.options['dashboard']:
|
||||
cracked_file = os.path.join(handshake_dir, 'onlinehashcrack.cracked')
|
||||
if os.path.exists(cracked_file):
|
||||
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
|
||||
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
|
||||
return
|
||||
|
||||
display = agent.view()
|
||||
config = agent.config()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if
|
||||
filename.endswith('.pcap')]
|
||||
# pull out whitelisted APs
|
||||
handshake_paths = remove_whitelisted(handshake_paths, self.options['whitelist'])
|
||||
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
|
||||
if handshake_new:
|
||||
logging.info("OHC: Internet connectivity detected. Uploading new handshakes to onlinehashcrack.com")
|
||||
for idx, handshake in enumerate(handshake_new):
|
||||
display.set('status',
|
||||
f"Uploading handshake to onlinehashcrack.com ({idx + 1}/{len(handshake_new)})")
|
||||
display.update(force=True)
|
||||
try:
|
||||
self._download_cracked(cracked_file)
|
||||
logging.info("OHC: Downloaded cracked passwords.")
|
||||
self._upload_to_ohc(handshake)
|
||||
if handshake not in reported:
|
||||
reported.append(handshake)
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info(f"OHC: Successfully uploaded {handshake}")
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
logging.debug("OHC: %s", req_e)
|
||||
self.skip.append(handshake)
|
||||
logging.error("OHC: %s", req_e)
|
||||
continue
|
||||
except OSError as os_e:
|
||||
logging.debug("OHC: %s", os_e)
|
||||
|
||||
if 'single_files' in self.options and self.options['single_files']:
|
||||
with open(cracked_file, 'r') as cracked_list:
|
||||
for row in csv.DictReader(cracked_list):
|
||||
if row['password']:
|
||||
filename = re.sub(r'[^a-zA-Z0-9]', '', row['ESSID']) + '_' + row['BSSID'].replace(':','')
|
||||
if os.path.exists( os.path.join(handshake_dir, filename+'.pcap') ):
|
||||
with open(os.path.join(handshake_dir, filename+'.pcap.cracked'), 'w') as f:
|
||||
f.write(row['password'])
|
||||
self.skip.append(handshake)
|
||||
logging.error("OHC: %s", os_e)
|
||||
continue
|
||||
if 'dashboard' in self.options and self.options['dashboard']:
|
||||
cracked_file = os.path.join(handshake_dir, 'onlinehashcrack.cracked')
|
||||
if os.path.exists(cracked_file):
|
||||
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
|
||||
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
|
||||
return
|
||||
try:
|
||||
self._download_cracked(cracked_file)
|
||||
logging.info("OHC: Downloaded cracked passwords.")
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
logging.debug("OHC: %s", req_e)
|
||||
except OSError as os_e:
|
||||
logging.debug("OHC: %s", os_e)
|
||||
if 'single_files' in self.options and self.options['single_files']:
|
||||
with open(cracked_file, 'r') as cracked_list:
|
||||
for row in csv.DictReader(cracked_list):
|
||||
if row['password']:
|
||||
filename = re.sub(r'[^a-zA-Z0-9]', '', row['ESSID']) + '_' + row['BSSID'].replace(':','')
|
||||
if os.path.exists( os.path.join(handshake_dir, filename+'.pcap') ):
|
||||
with open(os.path.join(handshake_dir, filename+'.pcap.cracked'), 'w') as f:
|
||||
f.write(row['password'])
|
||||
|
@ -16,11 +16,22 @@ TEMPLATE = """
|
||||
{% endblock %}
|
||||
|
||||
{% block styles %}
|
||||
{{ super() }}
|
||||
<link rel="stylesheet" href="/css/jquery.jqplot.min.css"/>
|
||||
<link rel="stylesheet" href="/css/jquery.jqplot.css"/>
|
||||
<style>
|
||||
div.chart {
|
||||
height: 400px;
|
||||
width: 100%;
|
||||
}
|
||||
div#session {
|
||||
width: 100%;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block scripts %}
|
||||
{{ super() }}
|
||||
<script type="text/javascript" src="/js/jquery.jqplot.min.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery.jqplot.js"></script>
|
||||
<script type="text/javascript" src="/js/plugins/jqplot.mobile.js"></script>
|
||||
@ -83,7 +94,6 @@ TEMPLATE = """
|
||||
tickOptions:{formatString:'%H:%M:%S'}
|
||||
},
|
||||
yaxis:{
|
||||
min: 0,
|
||||
tickOptions:{formatString:'%.2f'}
|
||||
}
|
||||
},
|
||||
@ -102,7 +112,6 @@ TEMPLATE = """
|
||||
tickOptions:{formatString:'%H:%M:%S'}
|
||||
},
|
||||
yaxis:{
|
||||
min: 0,
|
||||
tickOptions:{formatString:'%.2f'}
|
||||
}
|
||||
}
|
||||
@ -121,8 +130,9 @@ TEMPLATE = """
|
||||
var session = x.options[x.selectedIndex].text;
|
||||
loadData('/plugins/session-stats/os' + '?session=' + session, 'chart_os', 'OS', false)
|
||||
loadData('/plugins/session-stats/temp' + '?session=' + session, 'chart_temp', 'Temp', false)
|
||||
loadData('/plugins/session-stats/nums' + '?session=' + session, 'chart_nums', 'Wifi', true)
|
||||
loadData('/plugins/session-stats/wifi' + '?session=' + session, 'chart_wifi', 'Wifi', true)
|
||||
loadData('/plugins/session-stats/duration' + '?session=' + session, 'chart_duration', 'Sleeping', true)
|
||||
loadData('/plugins/session-stats/reward' + '?session=' + session, 'chart_reward', 'Reward', false)
|
||||
loadData('/plugins/session-stats/epoch' + '?session=' + session, 'chart_epoch', 'Epochs', false)
|
||||
}
|
||||
|
||||
@ -134,14 +144,15 @@ TEMPLATE = """
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<select id="session" style="width:100%;">
|
||||
<select id="session">
|
||||
<option selected>Current</option>
|
||||
</select>
|
||||
<div id="chart_os" style="height:400px;width:100%; "></div>
|
||||
<div id="chart_temp" style="height:400px;width:100%; "></div>
|
||||
<div id="chart_nums" style="height:400px;width:100%; "></div>
|
||||
<div id="chart_duration" style="height:400px;width:100%; "></div>
|
||||
<div id="chart_epoch" style="height:400px;width:100%; "></div>
|
||||
<div id="chart_os" class="chart"></div>
|
||||
<div id="chart_temp" class="chart"></div>
|
||||
<div id="chart_wifi" class="chart"></div>
|
||||
<div id="chart_duration" class="chart"></div>
|
||||
<div id="chart_reward" class="chart"></div>
|
||||
<div id="chart_epoch" class="chart"></div>
|
||||
{% endblock %}
|
||||
"""
|
||||
|
||||
@ -189,9 +200,6 @@ class SessionStats(plugins.Plugin):
|
||||
data_format='json')
|
||||
logging.info("Session-stats plugin loaded.")
|
||||
|
||||
def on_unloaded(self, ui):
|
||||
pass
|
||||
|
||||
def on_epoch(self, agent, epoch, epoch_data):
|
||||
"""
|
||||
Save the epoch_data to self.stats
|
||||
@ -220,7 +228,7 @@ class SessionStats(plugins.Plugin):
|
||||
extract_keys = ['cpu_load','mem_usage',]
|
||||
elif path == "temp":
|
||||
extract_keys = ['temperature']
|
||||
elif path == "nums":
|
||||
elif path == "wifi":
|
||||
extract_keys = [
|
||||
'missed_interactions',
|
||||
'num_hops',
|
||||
@ -236,10 +244,13 @@ class SessionStats(plugins.Plugin):
|
||||
'duration_secs',
|
||||
'slept_for_secs',
|
||||
]
|
||||
elif path == "reward":
|
||||
extract_keys = [
|
||||
'reward',
|
||||
]
|
||||
elif path == "epoch":
|
||||
extract_keys = [
|
||||
'active_for_epochs',
|
||||
'blind_for_epochs',
|
||||
]
|
||||
elif path == "session":
|
||||
return jsonify({'files': os.listdir(self.options['save_directory'])})
|
||||
|
@ -139,7 +139,7 @@ class Switcher(plugins.Plugin):
|
||||
'bored', 'sad', 'excited', 'lonely', 'rebooting', 'wait',
|
||||
'sleep', 'wifi_update', 'unfiltered_ap_list', 'association',
|
||||
'deauthentication', 'channel_hop', 'handshake', 'epoch',
|
||||
'peer_detected', 'peer_lost']
|
||||
'peer_detected', 'peer_lost', 'config_changed']
|
||||
|
||||
for m in methods:
|
||||
setattr(Switcher, 'on_%s' % m, partial(self.trigger, m))
|
||||
|
@ -8,174 +8,183 @@ from flask import abort
|
||||
from flask import render_template_string
|
||||
|
||||
INDEX = """
|
||||
<html>
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, user-scalable=0" />
|
||||
<title>
|
||||
webcfg
|
||||
</title>
|
||||
<style>
|
||||
#divTop {
|
||||
position: -webkit-sticky;
|
||||
position: sticky;
|
||||
top: 0px;
|
||||
width: 100%;
|
||||
font-size: 16px;
|
||||
padding: 5px;
|
||||
border: 1px solid #ddd;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
{% extends "base.html" %}
|
||||
{% set active_page = "plugins" %}
|
||||
{% block title %}
|
||||
Webcfg
|
||||
{% endblock %}
|
||||
|
||||
#searchText {
|
||||
width: 100%;
|
||||
}
|
||||
{% block meta %}
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, user-scalable=0" />
|
||||
{% endblock %}
|
||||
|
||||
table {
|
||||
table-layout: auto;
|
||||
width: 100%;
|
||||
}
|
||||
{% block styles %}
|
||||
{{ super() }}
|
||||
<style>
|
||||
#divTop {
|
||||
position: -webkit-sticky;
|
||||
position: sticky;
|
||||
top: 0px;
|
||||
width: 100%;
|
||||
font-size: 16px;
|
||||
padding: 5px;
|
||||
border: 1px solid #ddd;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
table, th, td {
|
||||
border: 1px solid black;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#searchText {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
th, td {
|
||||
padding: 15px;
|
||||
text-align: left;
|
||||
}
|
||||
table {
|
||||
table-layout: auto;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
table tr:nth-child(even) {
|
||||
background-color: #eee;
|
||||
}
|
||||
table, th, td {
|
||||
border: 1px solid black;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
table tr:nth-child(odd) {
|
||||
background-color: #fff;
|
||||
}
|
||||
th, td {
|
||||
padding: 15px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
table th {
|
||||
background-color: black;
|
||||
color: white;
|
||||
}
|
||||
table tr:nth-child(even) {
|
||||
background-color: #eee;
|
||||
}
|
||||
|
||||
.remove {
|
||||
background-color: #f44336;
|
||||
color: white;
|
||||
border: 2px solid #f44336;
|
||||
padding: 4px 8px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
font-size: 12px;
|
||||
margin: 4px 2px;
|
||||
-webkit-transition-duration: 0.4s; /* Safari */
|
||||
transition-duration: 0.4s;
|
||||
cursor: pointer;
|
||||
}
|
||||
table tr:nth-child(odd) {
|
||||
background-color: #fff;
|
||||
}
|
||||
|
||||
.remove:hover {
|
||||
background-color: white;
|
||||
color: black;
|
||||
}
|
||||
table th {
|
||||
background-color: black;
|
||||
color: white;
|
||||
}
|
||||
|
||||
#btnSave {
|
||||
position: -webkit-sticky;
|
||||
position: sticky;
|
||||
bottom: 0px;
|
||||
width: 100%;
|
||||
background-color: #0061b0;
|
||||
border: none;
|
||||
color: white;
|
||||
padding: 15px 32px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
font-size: 16px;
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
}
|
||||
.remove {
|
||||
background-color: #f44336;
|
||||
color: white;
|
||||
border: 2px solid #f44336;
|
||||
padding: 4px 8px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
font-size: 12px;
|
||||
margin: 4px 2px;
|
||||
-webkit-transition-duration: 0.4s; /* Safari */
|
||||
transition-duration: 0.4s;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#divTop {
|
||||
display: table;
|
||||
width: 100%;
|
||||
}
|
||||
#divTop > * {
|
||||
display: table-cell;
|
||||
}
|
||||
#divTop > span {
|
||||
width: 1%;
|
||||
}
|
||||
#divTop > input {
|
||||
width: 100%;
|
||||
}
|
||||
.remove:hover {
|
||||
background-color: white;
|
||||
color: black;
|
||||
}
|
||||
|
||||
@media screen and (max-width:700px) {
|
||||
table, tr, td {
|
||||
padding:0;
|
||||
border:1px solid black;
|
||||
}
|
||||
#btnSave {
|
||||
position: -webkit-sticky;
|
||||
position: sticky;
|
||||
bottom: 0px;
|
||||
width: 100%;
|
||||
background-color: #0061b0;
|
||||
border: none;
|
||||
color: white;
|
||||
padding: 15px 32px;
|
||||
text-align: center;
|
||||
text-decoration: none;
|
||||
display: inline-block;
|
||||
font-size: 16px;
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
}
|
||||
|
||||
table {
|
||||
border:none;
|
||||
}
|
||||
#divTop {
|
||||
display: table;
|
||||
width: 100%;
|
||||
}
|
||||
#divTop > * {
|
||||
display: table-cell;
|
||||
}
|
||||
#divTop > span {
|
||||
width: 1%;
|
||||
}
|
||||
#divTop > input {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
tr:first-child, thead, th {
|
||||
display:none;
|
||||
border:none;
|
||||
}
|
||||
@media screen and (max-width:700px) {
|
||||
table, tr, td {
|
||||
padding:0;
|
||||
border:1px solid black;
|
||||
}
|
||||
|
||||
tr {
|
||||
float: left;
|
||||
width: 100%;
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
table {
|
||||
border:none;
|
||||
}
|
||||
|
||||
table tr:nth-child(odd) {
|
||||
background-color: #eee;
|
||||
}
|
||||
tr:first-child, thead, th {
|
||||
display:none;
|
||||
border:none;
|
||||
}
|
||||
|
||||
td {
|
||||
float: left;
|
||||
width: 100%;
|
||||
padding:1em;
|
||||
}
|
||||
tr {
|
||||
float: left;
|
||||
width: 100%;
|
||||
margin-bottom: 2em;
|
||||
}
|
||||
|
||||
td::before {
|
||||
content:attr(data-label);
|
||||
word-wrap: break-word;
|
||||
background: #eee;
|
||||
border-right:2px solid black;
|
||||
width: 20%;
|
||||
float:left;
|
||||
padding:1em;
|
||||
font-weight: bold;
|
||||
margin:-1em 1em -1em -1em;
|
||||
}
|
||||
table tr:nth-child(odd) {
|
||||
background-color: #eee;
|
||||
}
|
||||
|
||||
.del_btn_wrapper {
|
||||
content:attr(data-label);
|
||||
word-wrap: break-word;
|
||||
background: #eee;
|
||||
border-right:2px solid black;
|
||||
width: 20%;
|
||||
float:left;
|
||||
padding:1em;
|
||||
font-weight: bold;
|
||||
margin:-1em 1em -1em -1em;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="divTop">
|
||||
<input type="text" id="searchText" onkeyup="filterTable()" placeholder="Search for options ..." title="Type an option name">
|
||||
<span><select id="selAddType"><option value="text">Text</option><option value="number">Number</option></select></span>
|
||||
<span><button id="btnAdd" type="button" onclick="addOption()">+</button></span>
|
||||
</div>
|
||||
<div id="content"></div>
|
||||
<button id="btnSave" type="button" onclick="saveConfig()">Save</button>
|
||||
</body>
|
||||
<script type="text/javascript">
|
||||
td {
|
||||
float: left;
|
||||
width: 100%;
|
||||
padding:1em;
|
||||
}
|
||||
|
||||
td::before {
|
||||
content:attr(data-label);
|
||||
word-wrap: break-word;
|
||||
background: #eee;
|
||||
border-right:2px solid black;
|
||||
width: 20%;
|
||||
float:left;
|
||||
padding:1em;
|
||||
font-weight: bold;
|
||||
margin:-1em 1em -1em -1em;
|
||||
}
|
||||
|
||||
.del_btn_wrapper {
|
||||
content:attr(data-label);
|
||||
word-wrap: break-word;
|
||||
background: #eee;
|
||||
border-right:2px solid black;
|
||||
width: 20%;
|
||||
float:left;
|
||||
padding:1em;
|
||||
font-weight: bold;
|
||||
margin:-1em 1em -1em -1em;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="divTop">
|
||||
<input type="text" id="searchText" placeholder="Search for options ..." title="Type an option name">
|
||||
<span><select id="selAddType"><option value="text">Text</option><option value="number">Number</option></select></span>
|
||||
<span><button id="btnAdd" type="button" onclick="addOption()">+</button></span>
|
||||
</div>
|
||||
<button id="btnSave" type="button" onclick="saveConfig()">Save and restart</button>
|
||||
<div id="content"></div>
|
||||
{% endblock %}
|
||||
|
||||
{% block script %}
|
||||
function addOption() {
|
||||
var input, table, tr, td, divDelBtn, btnDel, selType, selTypeVal;
|
||||
input = document.getElementById("searchText");
|
||||
@ -231,11 +240,10 @@ INDEX = """
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function filterTable(){
|
||||
var input, filter, table, tr, td, i, txtValue;
|
||||
input = document.getElementById("searchText");
|
||||
filter = input.value.toUpperCase();
|
||||
var searchInput = document.getElementById("searchText");
|
||||
searchInput.onkeyup = function() {
|
||||
var filter, table, tr, td, i, txtValue;
|
||||
filter = searchInput.value.toUpperCase();
|
||||
table = document.getElementById("tableOptions");
|
||||
if (table) {
|
||||
tr = table.getElementsByTagName("tr");
|
||||
@ -446,8 +454,7 @@ INDEX = """
|
||||
divContent.innerHTML = "";
|
||||
divContent.appendChild(table);
|
||||
});
|
||||
</script>
|
||||
</html>
|
||||
{% endblock %}
|
||||
"""
|
||||
|
||||
def serializer(obj):
|
||||
@ -463,16 +470,17 @@ class WebConfig(plugins.Plugin):
|
||||
|
||||
def __init__(self):
|
||||
self.ready = False
|
||||
self.mode = 'MANU'
|
||||
|
||||
def on_config_changed(self, config):
|
||||
self.config = config
|
||||
self.ready = True
|
||||
|
||||
def on_ready(self, agent):
|
||||
self.config = agent.config()
|
||||
self.mode = "MANU" if agent.mode == "manual" else "AUTO"
|
||||
self.ready = True
|
||||
self.mode = 'MANU' if agent.mode == 'manual' else 'AUTO'
|
||||
|
||||
def on_internet_available(self, agent):
|
||||
self.config = agent.config()
|
||||
self.mode = "MANU" if agent.mode == "manual" else "AUTO"
|
||||
self.ready = True
|
||||
self.mode = 'MANU' if agent.mode == 'manual' else 'AUTO'
|
||||
|
||||
def on_loaded(self):
|
||||
"""
|
||||
|
@ -10,12 +10,12 @@ from dateutil.parser import parse
|
||||
|
||||
'''
|
||||
webgpsmap shows existing position data stored in your /handshakes/ directory
|
||||
|
||||
|
||||
the plugin does the following:
|
||||
- search for *.pcap files in your /handshakes/ dir
|
||||
- for every found .pcap file it looks for a .geo.json or .gps.json or .paw-gps.json file with
|
||||
latitude+longitude data inside and shows this position on the map
|
||||
- if also an .cracked file with a plaintext password inside exist, it reads the content and shows the
|
||||
- if also an .cracked file with a plaintext password inside exist, it reads the content and shows the
|
||||
position as green instead of red and the password inside the infopox of the position
|
||||
special:
|
||||
you can save the html-map as one file for offline use or host on your own webspace with "/plugins/webgpsmap/offlinemap"
|
||||
@ -35,8 +35,8 @@ class Webgpsmap(plugins.Plugin):
|
||||
def __init__(self):
|
||||
self.ready = False
|
||||
|
||||
def on_ready(self, agent):
|
||||
self.config = agent.config()
|
||||
def on_config_changed(self, config):
|
||||
self.config = config
|
||||
self.ready = True
|
||||
|
||||
def on_loaded(self):
|
||||
|
@ -1,12 +1,14 @@
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
from io import StringIO
|
||||
import csv
|
||||
from datetime import datetime
|
||||
import requests
|
||||
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcap, StatusFile
|
||||
import pwnagotchi.plugins as plugins
|
||||
|
||||
from io import StringIO
|
||||
from datetime import datetime
|
||||
from pwnagotchi.utils import WifiInfo, FieldNotFoundError, extract_from_pcap, StatusFile, remove_whitelisted
|
||||
from threading import Lock
|
||||
from pwnagotchi import plugins
|
||||
|
||||
|
||||
def _extract_gps_data(path):
|
||||
@ -100,90 +102,90 @@ class Wigle(plugins.Plugin):
|
||||
self.ready = False
|
||||
self.report = StatusFile('/root/.wigle_uploads', data_format='json')
|
||||
self.skip = list()
|
||||
self.lock = Lock()
|
||||
|
||||
def on_loaded(self):
|
||||
if 'api_key' not in self.options or ('api_key' in self.options and self.options['api_key'] is None):
|
||||
logging.error("WIGLE: api_key isn't set. Can't upload to wigle.net")
|
||||
return
|
||||
|
||||
if not 'whitelist' in self.options:
|
||||
self.options['whitelist'] = list()
|
||||
|
||||
self.ready = True
|
||||
|
||||
def on_internet_available(self, agent):
|
||||
from scapy.all import Scapy_Exception
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
if self.ready:
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
if not self.ready or self.lock.locked():
|
||||
return
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
all_files = os.listdir(handshake_dir)
|
||||
all_gps_files = [os.path.join(handshake_dir, filename)
|
||||
for filename in all_files
|
||||
if filename.endswith('.gps.json')]
|
||||
new_gps_files = set(all_gps_files) - set(reported) - set(self.skip)
|
||||
from scapy.all import Scapy_Exception
|
||||
|
||||
if new_gps_files:
|
||||
logging.info("WIGLE: Internet connectivity detected. Uploading new handshakes to wigle.net")
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
all_files = os.listdir(handshake_dir)
|
||||
all_gps_files = [os.path.join(handshake_dir, filename)
|
||||
for filename in all_files
|
||||
if filename.endswith('.gps.json')]
|
||||
|
||||
csv_entries = list()
|
||||
no_err_entries = list()
|
||||
|
||||
for gps_file in new_gps_files:
|
||||
pcap_filename = gps_file.replace('.gps.json', '.pcap')
|
||||
|
||||
if not os.path.exists(pcap_filename):
|
||||
logging.error("WIGLE: Can't find pcap for %s", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
gps_data = _extract_gps_data(gps_file)
|
||||
except OSError as os_err:
|
||||
logging.error("WIGLE: %s", os_err)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
except json.JSONDecodeError as json_err:
|
||||
logging.error("WIGLE: %s", json_err)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
|
||||
if gps_data['Latitude'] == 0 and gps_data['Longitude'] == 0:
|
||||
logging.warning("WIGLE: Not enough gps-information for %s. Trying again next time.", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
pcap_data = extract_from_pcap(pcap_filename, [WifiInfo.BSSID,
|
||||
WifiInfo.ESSID,
|
||||
WifiInfo.ENCRYPTION,
|
||||
WifiInfo.CHANNEL,
|
||||
WifiInfo.RSSI])
|
||||
except FieldNotFoundError:
|
||||
logging.error("WIGLE: Could not extract all information. Skip %s", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
except Scapy_Exception as sc_e:
|
||||
logging.error("WIGLE: %s", sc_e)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
|
||||
new_entry = _transform_wigle_entry(gps_data, pcap_data)
|
||||
csv_entries.append(new_entry)
|
||||
no_err_entries.append(gps_file)
|
||||
|
||||
if csv_entries:
|
||||
display.set('status', "Uploading gps-data to wigle.net ...")
|
||||
display.update(force=True)
|
||||
try:
|
||||
_send_to_wigle(csv_entries, self.options['api_key'])
|
||||
reported += no_err_entries
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info("WIGLE: Successfully uploaded %d files", len(no_err_entries))
|
||||
except requests.exceptions.RequestException as re_e:
|
||||
self.skip += no_err_entries
|
||||
logging.error("WIGLE: Got an exception while uploading %s", re_e)
|
||||
except OSError as os_e:
|
||||
self.skip += no_err_entries
|
||||
logging.error("WIGLE: Got the following error: %s", os_e)
|
||||
all_gps_files = remove_whitelisted(all_gps_files, self.options['whitelist'])
|
||||
new_gps_files = set(all_gps_files) - set(reported) - set(self.skip)
|
||||
if new_gps_files:
|
||||
logging.info("WIGLE: Internet connectivity detected. Uploading new handshakes to wigle.net")
|
||||
csv_entries = list()
|
||||
no_err_entries = list()
|
||||
for gps_file in new_gps_files:
|
||||
pcap_filename = gps_file.replace('.gps.json', '.pcap')
|
||||
if not os.path.exists(pcap_filename):
|
||||
logging.error("WIGLE: Can't find pcap for %s", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
try:
|
||||
gps_data = _extract_gps_data(gps_file)
|
||||
except OSError as os_err:
|
||||
logging.error("WIGLE: %s", os_err)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
except json.JSONDecodeError as json_err:
|
||||
logging.error("WIGLE: %s", json_err)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
if gps_data['Latitude'] == 0 and gps_data['Longitude'] == 0:
|
||||
logging.warning("WIGLE: Not enough gps-information for %s. Trying again next time.", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
try:
|
||||
pcap_data = extract_from_pcap(pcap_filename, [WifiInfo.BSSID,
|
||||
WifiInfo.ESSID,
|
||||
WifiInfo.ENCRYPTION,
|
||||
WifiInfo.CHANNEL,
|
||||
WifiInfo.RSSI])
|
||||
except FieldNotFoundError:
|
||||
logging.error("WIGLE: Could not extract all information. Skip %s", gps_file)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
except Scapy_Exception as sc_e:
|
||||
logging.error("WIGLE: %s", sc_e)
|
||||
self.skip.append(gps_file)
|
||||
continue
|
||||
new_entry = _transform_wigle_entry(gps_data, pcap_data)
|
||||
csv_entries.append(new_entry)
|
||||
no_err_entries.append(gps_file)
|
||||
if csv_entries:
|
||||
display.set('status', "Uploading gps-data to wigle.net ...")
|
||||
display.update(force=True)
|
||||
try:
|
||||
_send_to_wigle(csv_entries, self.options['api_key'])
|
||||
reported += no_err_entries
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info("WIGLE: Successfully uploaded %d files", len(no_err_entries))
|
||||
except requests.exceptions.RequestException as re_e:
|
||||
self.skip += no_err_entries
|
||||
logging.error("WIGLE: Got an exception while uploading %s", re_e)
|
||||
except OSError as os_e:
|
||||
self.skip += no_err_entries
|
||||
logging.error("WIGLE: Got the following error: %s", os_e)
|
||||
|
@ -3,7 +3,7 @@ import logging
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from threading import Lock
|
||||
from pwnagotchi.utils import StatusFile
|
||||
from pwnagotchi.utils import StatusFile, remove_whitelisted
|
||||
from pwnagotchi import plugins
|
||||
from json.decoder import JSONDecodeError
|
||||
|
||||
@ -19,7 +19,7 @@ class WpaSec(plugins.Plugin):
|
||||
self.lock = Lock()
|
||||
try:
|
||||
self.report = StatusFile('/root/.wpa_sec_uploads', data_format='json')
|
||||
except JSONDecodeError as json_err:
|
||||
except JSONDecodeError:
|
||||
os.remove("/root/.wpa_sec_uploads")
|
||||
self.report = StatusFile('/root/.wpa_sec_uploads', data_format='json')
|
||||
self.options = dict()
|
||||
@ -78,54 +78,57 @@ class WpaSec(plugins.Plugin):
|
||||
logging.error("WPA_SEC: API-URL isn't set. Can't upload, no endpoint configured.")
|
||||
return
|
||||
|
||||
if 'whitelist' not in self.options:
|
||||
self.options['whitelist'] = list()
|
||||
|
||||
self.ready = True
|
||||
|
||||
def on_internet_available(self, agent):
|
||||
"""
|
||||
Called in manual mode when there's internet connectivity
|
||||
"""
|
||||
if not self.ready or self.lock.locked():
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
if self.ready:
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if
|
||||
filename.endswith('.pcap')]
|
||||
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
|
||||
|
||||
if handshake_new:
|
||||
logging.info("WPA_SEC: Internet connectivity detected. Uploading new handshakes to wpa-sec.stanev.org")
|
||||
|
||||
for idx, handshake in enumerate(handshake_new):
|
||||
display.set('status', f"Uploading handshake to wpa-sec.stanev.org ({idx + 1}/{len(handshake_new)})")
|
||||
display.update(force=True)
|
||||
try:
|
||||
self._upload_to_wpasec(handshake)
|
||||
reported.append(handshake)
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info("WPA_SEC: Successfully uploaded %s", handshake)
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
self.skip.append(handshake)
|
||||
logging.error("WPA_SEC: %s", req_e)
|
||||
continue
|
||||
except OSError as os_e:
|
||||
logging.error("WPA_SEC: %s", os_e)
|
||||
continue
|
||||
|
||||
if 'download_results' in self.options and self.options['download_results']:
|
||||
cracked_file = os.path.join(handshake_dir, 'wpa-sec.cracked.potfile')
|
||||
if os.path.exists(cracked_file):
|
||||
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
|
||||
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
|
||||
return
|
||||
config = agent.config()
|
||||
display = agent.view()
|
||||
reported = self.report.data_field_or('reported', default=list())
|
||||
handshake_dir = config['bettercap']['handshakes']
|
||||
handshake_filenames = os.listdir(handshake_dir)
|
||||
handshake_paths = [os.path.join(handshake_dir, filename) for filename in handshake_filenames if
|
||||
filename.endswith('.pcap')]
|
||||
handshake_paths = remove_whitelisted(handshake_paths, self.options['whitelist'])
|
||||
handshake_new = set(handshake_paths) - set(reported) - set(self.skip)
|
||||
|
||||
if handshake_new:
|
||||
logging.info("WPA_SEC: Internet connectivity detected. Uploading new handshakes to wpa-sec.stanev.org")
|
||||
for idx, handshake in enumerate(handshake_new):
|
||||
display.set('status', f"Uploading handshake to wpa-sec.stanev.org ({idx + 1}/{len(handshake_new)})")
|
||||
display.update(force=True)
|
||||
try:
|
||||
self._download_from_wpasec(os.path.join(handshake_dir, 'wpa-sec.cracked.potfile'))
|
||||
logging.info("WPA_SEC: Downloaded cracked passwords.")
|
||||
self._upload_to_wpasec(handshake)
|
||||
reported.append(handshake)
|
||||
self.report.update(data={'reported': reported})
|
||||
logging.info("WPA_SEC: Successfully uploaded %s", handshake)
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
logging.debug("WPA_SEC: %s", req_e)
|
||||
self.skip.append(handshake)
|
||||
logging.error("WPA_SEC: %s", req_e)
|
||||
continue
|
||||
except OSError as os_e:
|
||||
logging.debug("WPA_SEC: %s", os_e)
|
||||
logging.error("WPA_SEC: %s", os_e)
|
||||
continue
|
||||
|
||||
if 'download_results' in self.options and self.options['download_results']:
|
||||
cracked_file = os.path.join(handshake_dir, 'wpa-sec.cracked.potfile')
|
||||
if os.path.exists(cracked_file):
|
||||
last_check = datetime.fromtimestamp(os.path.getmtime(cracked_file))
|
||||
if last_check is not None and ((datetime.now() - last_check).seconds / (60 * 60)) < 1:
|
||||
return
|
||||
try:
|
||||
self._download_from_wpasec(os.path.join(handshake_dir, 'wpa-sec.cracked.potfile'))
|
||||
logging.info("WPA_SEC: Downloaded cracked passwords.")
|
||||
except requests.exceptions.RequestException as req_e:
|
||||
logging.debug("WPA_SEC: %s", req_e)
|
||||
except OSError as os_e:
|
||||
logging.debug("WPA_SEC: %s", os_e)
|
||||
|
@ -1,37 +1,26 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
{% block head %}
|
||||
<head>
|
||||
{% block meta %}
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
{% endblock %}
|
||||
|
||||
<title>
|
||||
{% block title %}
|
||||
{% endblock %}
|
||||
</title>
|
||||
|
||||
{% block styles %}
|
||||
<link rel="stylesheet" href="/js/jquery.mobile/jquery.mobile-1.4.5.min.css"/>
|
||||
<link rel="stylesheet" type="text/css" href="/css/style.css"/>
|
||||
{% block styles %}
|
||||
{% endblock %}
|
||||
|
||||
<script type="text/javascript" src="/js/jquery-1.12.4.min.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery.mobile/jquery.mobile-1.4.5.min.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery.timeago.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery-qrcode-0.17.0.min.js"></script>
|
||||
{% block scripts %}
|
||||
{% endblock %}
|
||||
|
||||
<script type="text/javascript">
|
||||
$.mobile.ajaxEnabled = false;
|
||||
|
||||
jQuery(document).ready(function() {
|
||||
jQuery("time.timeago").timeago();
|
||||
});
|
||||
|
||||
{% block script %}
|
||||
{% endblock %}
|
||||
</script>
|
||||
</head>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
<body>
|
||||
<div data-role="page">
|
||||
|
||||
@ -72,6 +61,23 @@
|
||||
{% endblock %}
|
||||
|
||||
</div>
|
||||
{% block scripts %}
|
||||
<script type="text/javascript" src="/js/jquery-1.12.4.min.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery.mobile/jquery.mobile-1.4.5.min.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery.timeago.js"></script>
|
||||
<script type="text/javascript" src="/js/jquery-qrcode-0.17.0.min.js"></script>
|
||||
<script type="text/javascript">
|
||||
$.mobile.ajaxEnabled = false;
|
||||
$.mobile.pushStateEnabled = false;
|
||||
|
||||
jQuery(document).ready(function() {
|
||||
jQuery("time.timeago").timeago();
|
||||
});
|
||||
|
||||
{% block script %}
|
||||
{% endblock %}
|
||||
</script>
|
||||
{% endblock %}
|
||||
</body>
|
||||
{% endblock %}
|
||||
</html>
|
||||
|
@ -1,20 +1,20 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
import logging
|
||||
import glob
|
||||
import os
|
||||
import time
|
||||
import subprocess
|
||||
import yaml
|
||||
|
||||
import json
|
||||
import shutil
|
||||
import toml
|
||||
import sys
|
||||
import re
|
||||
|
||||
import pwnagotchi
|
||||
from toml.encoder import TomlEncoder, _dump_str
|
||||
from pwnagotchi.fs import ensure_write
|
||||
from zipfile import ZipFile
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class DottedTomlEncoder(TomlEncoder):
|
||||
@ -25,6 +25,19 @@ class DottedTomlEncoder(TomlEncoder):
|
||||
def __init__(self, _dict=dict):
|
||||
super(DottedTomlEncoder, self).__init__(_dict)
|
||||
|
||||
def dump_list(self, v):
|
||||
retval = "["
|
||||
# 1 line if its just 1 item; therefore no newline
|
||||
if len(v) > 1:
|
||||
retval += "\n"
|
||||
for u in v:
|
||||
retval += " " + str(self.dump_value(u)) + ",\n"
|
||||
# 1 line if its just 1 item; remove newline
|
||||
if len(v) <= 1:
|
||||
retval = retval.rstrip("\n")
|
||||
retval += "]"
|
||||
return retval
|
||||
|
||||
def dump_sections(self, o, sup):
|
||||
retstr = ""
|
||||
pre = ""
|
||||
@ -41,12 +54,71 @@ class DottedTomlEncoder(TomlEncoder):
|
||||
if isinstance(value, dict):
|
||||
toadd, _ = self.dump_sections(value, pre + qsection)
|
||||
retstr += toadd
|
||||
# separte sections
|
||||
if not retstr.endswith('\n\n'):
|
||||
retstr += '\n'
|
||||
else:
|
||||
retstr += (pre + qsection + " = " +
|
||||
str(self.dump_value(value)) + '\n')
|
||||
return (retstr, self._dict())
|
||||
|
||||
|
||||
def parse_version(version):
|
||||
"""
|
||||
Converts a version str to tuple, so that versions can be compared
|
||||
"""
|
||||
return tuple(version.split('.'))
|
||||
|
||||
|
||||
def remove_whitelisted(list_of_handshakes, list_of_whitelisted_strings, valid_on_error=True):
|
||||
"""
|
||||
Removes a given list of whitelisted handshakes from a path list
|
||||
"""
|
||||
filtered = list()
|
||||
def normalize(name):
|
||||
"""
|
||||
Only allow alpha/nums
|
||||
"""
|
||||
return str.lower(''.join(c for c in name if c.isalnum()))
|
||||
|
||||
for handshake in list_of_handshakes:
|
||||
try:
|
||||
normalized_handshake = normalize(os.path.basename(handshake).rstrip('.pcap'))
|
||||
for whitelist in list_of_whitelisted_strings:
|
||||
normalized_whitelist = normalize(whitelist)
|
||||
if normalized_whitelist in normalized_handshake:
|
||||
break
|
||||
else:
|
||||
filtered.append(handshake)
|
||||
except Exception:
|
||||
if valid_on_error:
|
||||
filtered.append(handshake)
|
||||
return filtered
|
||||
|
||||
|
||||
|
||||
def download_file(url, destination, chunk_size=128):
|
||||
import requests
|
||||
resp = requests.get(url)
|
||||
resp.raise_for_status()
|
||||
|
||||
with open(destination, 'wb') as fd:
|
||||
for chunk in resp.iter_content(chunk_size):
|
||||
fd.write(chunk)
|
||||
|
||||
def unzip(file, destination, strip_dirs=0):
|
||||
os.makedirs(destination, exist_ok=True)
|
||||
with ZipFile(file, 'r') as zip:
|
||||
if strip_dirs:
|
||||
for info in zip.infolist():
|
||||
new_filename = info.filename.split('/', maxsplit=strip_dirs)[strip_dirs]
|
||||
if new_filename:
|
||||
info.filename = new_filename
|
||||
zip.extract(info, destination)
|
||||
else:
|
||||
zip.extractall(destination)
|
||||
|
||||
|
||||
# https://stackoverflow.com/questions/823196/yaml-merge-in-python
|
||||
def merge_config(user, default):
|
||||
if isinstance(user, dict) and isinstance(default, dict):
|
||||
@ -86,6 +158,7 @@ def load_config(args):
|
||||
if not os.path.exists(default_config_path):
|
||||
os.makedirs(default_config_path)
|
||||
|
||||
import pwnagotchi
|
||||
ref_defaults_file = os.path.join(os.path.dirname(pwnagotchi.__file__), 'defaults.toml')
|
||||
ref_defaults_data = None
|
||||
|
||||
@ -134,6 +207,7 @@ def load_config(args):
|
||||
# no toml found; convert yaml
|
||||
logging.info('Old yaml-config found. Converting to toml...')
|
||||
with open(args.user_config, 'w') as toml_file, open(yaml_name) as yaml_file:
|
||||
import yaml
|
||||
user_config = yaml.safe_load(yaml_file)
|
||||
# convert int/float keys to str
|
||||
user_config = keys_to_str(user_config)
|
||||
@ -149,6 +223,15 @@ def load_config(args):
|
||||
logging.error("There was an error processing the configuration file:\n%s ",ex)
|
||||
sys.exit(1)
|
||||
|
||||
# dropins
|
||||
dropin = config['main']['confd']
|
||||
if dropin and os.path.isdir(dropin):
|
||||
dropin += '*.toml' if dropin.endswith('/') else '/*.toml' # only toml here; yaml is no more
|
||||
for conf in glob.glob(dropin):
|
||||
with open(conf) as toml_file:
|
||||
additional_config = toml.load(toml_file)
|
||||
config = merge_config(additional_config, config)
|
||||
|
||||
# the very first step is to normalize the display name so we don't need dozens of if/elif around
|
||||
if config['ui']['display']['type'] in ('inky', 'inkyphat'):
|
||||
config['ui']['display']['type'] = 'inky'
|
||||
@ -226,7 +309,7 @@ def led(on=True):
|
||||
|
||||
|
||||
def blink(times=1, delay=0.3):
|
||||
for t in range(0, times):
|
||||
for _ in range(0, times):
|
||||
led(True)
|
||||
time.sleep(delay)
|
||||
led(False)
|
||||
@ -249,6 +332,18 @@ class FieldNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def md5(fname):
|
||||
"""
|
||||
https://stackoverflow.com/questions/3431825/generating-an-md5-checksum-of-a-file
|
||||
"""
|
||||
import hashlib
|
||||
hash_md5 = hashlib.md5()
|
||||
with open(fname, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_md5.update(chunk)
|
||||
return hash_md5.hexdigest()
|
||||
|
||||
|
||||
def extract_from_pcap(path, fields):
|
||||
"""
|
||||
Search in pcap-file for specified information
|
||||
@ -361,6 +456,7 @@ class StatusFile(object):
|
||||
return self._updated is not None and (datetime.now() - self._updated).days < days
|
||||
|
||||
def update(self, data=None):
|
||||
from pwnagotchi.fs import ensure_write
|
||||
self._updated = datetime.now()
|
||||
self.data = data
|
||||
with ensure_write(self._path, 'w') as fp:
|
||||
|
@ -21,3 +21,4 @@ flask-wtf==0.14.3
|
||||
dbus-python==1.2.12
|
||||
toml==0.10.0
|
||||
python-dateutil==2.8.1
|
||||
websockets==8.1
|
||||
|
Loading…
x
Reference in New Issue
Block a user