2013-03-02 00:05:03 +01:00
|
|
|
# Python class for controlling wpa_supplicant
|
2019-01-04 19:27:40 +01:00
|
|
|
# Copyright (c) 2013-2019, Jouni Malinen <j@w1.fi>
|
2013-03-02 00:05:03 +01:00
|
|
|
#
|
|
|
|
# This software may be distributed under the terms of the BSD license.
|
|
|
|
# See README for more details.
|
|
|
|
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import logging
|
2014-03-08 20:25:47 +01:00
|
|
|
import binascii
|
2013-03-09 12:36:35 +01:00
|
|
|
import re
|
2014-03-08 20:25:47 +01:00
|
|
|
import struct
|
2013-03-02 00:05:03 +01:00
|
|
|
import wpaspy
|
2016-03-08 14:28:04 +01:00
|
|
|
import remotehost
|
2016-06-07 15:42:40 +02:00
|
|
|
import subprocess
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-10-31 11:46:42 +01:00
|
|
|
logger = logging.getLogger()
|
2013-03-02 00:05:03 +01:00
|
|
|
wpas_ctrl = '/var/run/wpa_supplicant'
|
|
|
|
|
|
|
|
class WpaSupplicant:
|
2016-03-04 10:20:34 +01:00
|
|
|
def __init__(self, ifname=None, global_iface=None, hostname=None,
|
2016-12-14 12:56:15 +01:00
|
|
|
port=9877, global_port=9878, monitor=True):
|
|
|
|
self.monitor = monitor
|
2016-03-04 10:20:34 +01:00
|
|
|
self.hostname = hostname
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2019-03-17 14:26:34 +01:00
|
|
|
self.global_mon = None
|
|
|
|
self.global_ctrl = None
|
2014-10-19 19:56:36 +02:00
|
|
|
self.gctrl_mon = None
|
2019-03-17 14:26:34 +01:00
|
|
|
self.ctrl = None
|
|
|
|
self.mon = None
|
|
|
|
self.ifname = None
|
2016-03-08 14:28:04 +01:00
|
|
|
self.host = remotehost.Host(hostname, ifname)
|
2016-05-29 14:03:40 +02:00
|
|
|
self._group_dbg = None
|
2013-12-30 22:08:25 +01:00
|
|
|
if ifname:
|
2016-03-04 10:20:34 +01:00
|
|
|
self.set_ifname(ifname, hostname, port)
|
2016-04-07 12:31:59 +02:00
|
|
|
res = self.get_driver_status()
|
2016-05-14 17:59:47 +02:00
|
|
|
if 'capa.flags' in res and int(res['capa.flags'], 0) & 0x20000000:
|
2016-04-07 12:31:59 +02:00
|
|
|
self.p2p_dev_ifname = 'p2p-dev-' + self.ifname
|
|
|
|
else:
|
|
|
|
self.p2p_dev_ifname = ifname
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
self.global_iface = global_iface
|
|
|
|
if global_iface:
|
2016-03-04 10:20:34 +01:00
|
|
|
if hostname != None:
|
|
|
|
self.global_ctrl = wpaspy.Ctrl(hostname, global_port)
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.global_mon = wpaspy.Ctrl(hostname, global_port)
|
2016-03-08 14:28:05 +01:00
|
|
|
self.global_dbg = hostname + "/" + str(global_port) + "/"
|
2016-03-04 10:20:34 +01:00
|
|
|
else:
|
|
|
|
self.global_ctrl = wpaspy.Ctrl(global_iface)
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.global_mon = wpaspy.Ctrl(global_iface)
|
2016-03-08 14:28:05 +01:00
|
|
|
self.global_dbg = ""
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.global_mon.attach()
|
2019-03-17 14:26:34 +01:00
|
|
|
|
|
|
|
def __del__(self):
|
|
|
|
self.close_monitor()
|
|
|
|
self.close_control()
|
|
|
|
|
|
|
|
def close_control_ctrl(self):
|
|
|
|
if self.ctrl:
|
|
|
|
del self.ctrl
|
|
|
|
self.ctrl = None
|
|
|
|
|
|
|
|
def close_control_global(self):
|
|
|
|
if self.global_ctrl:
|
|
|
|
del self.global_ctrl
|
|
|
|
self.global_ctrl = None
|
|
|
|
|
|
|
|
def close_control(self):
|
|
|
|
self.close_control_ctrl()
|
|
|
|
self.close_control_global()
|
|
|
|
|
|
|
|
def close_monitor_mon(self):
|
|
|
|
if not self.mon:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
while self.mon.pending():
|
|
|
|
ev = self.mon.recv()
|
|
|
|
logger.debug(self.dbg + ": " + ev)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
self.mon.detach()
|
|
|
|
except ConnectionRefusedError:
|
|
|
|
pass
|
2019-04-15 19:13:28 +02:00
|
|
|
except Exception as e:
|
|
|
|
if str(e) == "DETACH failed":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise
|
2019-03-17 14:26:34 +01:00
|
|
|
del self.mon
|
|
|
|
self.mon = None
|
|
|
|
|
|
|
|
def close_monitor_global(self):
|
|
|
|
if not self.global_mon:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
while self.global_mon.pending():
|
|
|
|
ev = self.global_mon.recv()
|
|
|
|
logger.debug(self.global_dbg + ": " + ev)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
self.global_mon.detach()
|
|
|
|
except ConnectionRefusedError:
|
|
|
|
pass
|
2019-04-15 19:13:28 +02:00
|
|
|
except Exception as e:
|
|
|
|
if str(e) == "DETACH failed":
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise
|
2019-03-17 14:26:34 +01:00
|
|
|
del self.global_mon
|
|
|
|
self.global_mon = None
|
|
|
|
|
|
|
|
def close_monitor_group(self):
|
|
|
|
if not self.gctrl_mon:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
while self.gctrl_mon.pending():
|
|
|
|
ev = self.gctrl_mon.recv()
|
|
|
|
logger.debug(self.dbg + ": " + ev)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
self.gctrl_mon.detach()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
del self.gctrl_mon
|
|
|
|
self.gctrl_mon = None
|
|
|
|
|
|
|
|
def close_monitor(self):
|
|
|
|
self.close_monitor_mon()
|
|
|
|
self.close_monitor_global()
|
|
|
|
self.close_monitor_group()
|
2013-06-30 23:13:11 +02:00
|
|
|
|
2016-06-27 19:10:23 +02:00
|
|
|
def cmd_execute(self, cmd_array, shell=False):
|
2016-06-07 15:42:40 +02:00
|
|
|
if self.hostname is None:
|
2016-06-27 19:10:23 +02:00
|
|
|
if shell:
|
|
|
|
cmd = ' '.join(cmd_array)
|
|
|
|
else:
|
|
|
|
cmd = cmd_array
|
2016-06-07 15:42:40 +02:00
|
|
|
proc = subprocess.Popen(cmd, stderr=subprocess.STDOUT,
|
2016-06-27 19:10:23 +02:00
|
|
|
stdout=subprocess.PIPE, shell=shell)
|
2016-06-07 15:42:40 +02:00
|
|
|
out = proc.communicate()[0]
|
|
|
|
ret = proc.returncode
|
2019-02-02 11:48:30 +01:00
|
|
|
return ret, out.decode()
|
2016-06-07 15:42:40 +02:00
|
|
|
else:
|
|
|
|
return self.host.execute(cmd_array)
|
|
|
|
|
2016-03-04 10:20:40 +01:00
|
|
|
def terminate(self):
|
|
|
|
if self.global_mon:
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_global()
|
2016-03-04 10:20:40 +01:00
|
|
|
self.global_ctrl.terminate()
|
|
|
|
self.global_ctrl = None
|
|
|
|
|
2015-01-18 16:13:55 +01:00
|
|
|
def close_ctrl(self):
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_global()
|
|
|
|
self.close_control_global()
|
2015-01-18 16:13:55 +01:00
|
|
|
self.remove_ifname()
|
|
|
|
|
2016-03-04 10:20:34 +01:00
|
|
|
def set_ifname(self, ifname, hostname=None, port=9877):
|
2019-03-17 14:26:34 +01:00
|
|
|
self.remove_ifname()
|
2013-12-30 22:08:25 +01:00
|
|
|
self.ifname = ifname
|
2016-03-04 10:20:34 +01:00
|
|
|
if hostname != None:
|
|
|
|
self.ctrl = wpaspy.Ctrl(hostname, port)
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.mon = wpaspy.Ctrl(hostname, port)
|
2016-03-08 14:28:04 +01:00
|
|
|
self.host = remotehost.Host(hostname, ifname)
|
2016-03-08 14:28:05 +01:00
|
|
|
self.dbg = hostname + "/" + ifname
|
2016-03-04 10:20:34 +01:00
|
|
|
else:
|
|
|
|
self.ctrl = wpaspy.Ctrl(os.path.join(wpas_ctrl, ifname))
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.mon = wpaspy.Ctrl(os.path.join(wpas_ctrl, ifname))
|
2016-03-08 14:28:05 +01:00
|
|
|
self.dbg = ifname
|
2016-12-14 12:56:15 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.mon.attach()
|
2013-12-30 22:08:25 +01:00
|
|
|
|
|
|
|
def remove_ifname(self):
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_mon()
|
|
|
|
self.close_control_ctrl()
|
|
|
|
self.ifname = None
|
2013-12-30 22:08:25 +01:00
|
|
|
|
2016-03-04 10:20:34 +01:00
|
|
|
def get_ctrl_iface_port(self, ifname):
|
|
|
|
if self.hostname is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
res = self.global_request("INTERFACES ctrl")
|
|
|
|
lines = res.splitlines()
|
|
|
|
found = False
|
|
|
|
for line in lines:
|
|
|
|
words = line.split()
|
|
|
|
if words[0] == ifname:
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
raise Exception("Could not find UDP port for " + ifname)
|
|
|
|
res = line.find("ctrl_iface=udp:")
|
|
|
|
if res == -1:
|
|
|
|
raise Exception("Wrong ctrl_interface format")
|
|
|
|
words = line.split(":")
|
|
|
|
return int(words[1])
|
|
|
|
|
2015-01-31 16:28:08 +01:00
|
|
|
def interface_add(self, ifname, config="", driver="nl80211",
|
2015-04-04 10:02:22 +02:00
|
|
|
drv_params=None, br_ifname=None, create=False,
|
2015-11-03 15:30:12 +01:00
|
|
|
set_ifname=True, all_params=False, if_type=None):
|
2016-04-24 11:28:18 +02:00
|
|
|
status, groups = self.host.execute(["id"])
|
2016-03-08 14:28:04 +01:00
|
|
|
if status != 0:
|
2013-12-30 22:08:25 +01:00
|
|
|
group = "admin"
|
2016-03-08 14:28:04 +01:00
|
|
|
group = "admin" if "(admin)" in groups else "adm"
|
2014-04-15 23:27:27 +02:00
|
|
|
cmd = "INTERFACE_ADD " + ifname + "\t" + config + "\t" + driver + "\tDIR=/var/run/wpa_supplicant GROUP=" + group
|
2013-12-30 23:17:02 +01:00
|
|
|
if drv_params:
|
|
|
|
cmd = cmd + '\t' + drv_params
|
2015-01-31 16:28:08 +01:00
|
|
|
if br_ifname:
|
|
|
|
if not drv_params:
|
|
|
|
cmd += '\t'
|
|
|
|
cmd += '\t' + br_ifname
|
2015-03-26 19:43:11 +01:00
|
|
|
if create:
|
|
|
|
if not br_ifname:
|
|
|
|
cmd += '\t'
|
|
|
|
if not drv_params:
|
|
|
|
cmd += '\t'
|
|
|
|
cmd += '\tcreate'
|
2015-11-03 15:30:12 +01:00
|
|
|
if if_type:
|
|
|
|
cmd += '\t' + if_type
|
2015-04-04 10:02:22 +02:00
|
|
|
if all_params and not create:
|
|
|
|
if not br_ifname:
|
|
|
|
cmd += '\t'
|
|
|
|
if not drv_params:
|
|
|
|
cmd += '\t'
|
|
|
|
cmd += '\t'
|
2013-12-30 22:08:25 +01:00
|
|
|
if "FAIL" in self.global_request(cmd):
|
|
|
|
raise Exception("Failed to add a dynamic wpa_supplicant interface")
|
2015-04-04 10:02:22 +02:00
|
|
|
if not create and set_ifname:
|
2016-03-04 10:20:34 +01:00
|
|
|
port = self.get_ctrl_iface_port(ifname)
|
|
|
|
self.set_ifname(ifname, self.hostname, port)
|
2016-04-07 12:31:59 +02:00
|
|
|
res = self.get_driver_status()
|
2016-05-14 17:59:47 +02:00
|
|
|
if 'capa.flags' in res and int(res['capa.flags'], 0) & 0x20000000:
|
2016-04-07 12:31:59 +02:00
|
|
|
self.p2p_dev_ifname = 'p2p-dev-' + self.ifname
|
|
|
|
else:
|
|
|
|
self.p2p_dev_ifname = ifname
|
2013-12-30 22:08:25 +01:00
|
|
|
|
|
|
|
def interface_remove(self, ifname):
|
|
|
|
self.remove_ifname()
|
|
|
|
self.global_request("INTERFACE_REMOVE " + ifname)
|
|
|
|
|
2015-02-07 21:38:21 +01:00
|
|
|
def request(self, cmd, timeout=10):
|
2016-03-08 14:28:05 +01:00
|
|
|
logger.debug(self.dbg + ": CTRL: " + cmd)
|
2015-02-07 21:38:21 +01:00
|
|
|
return self.ctrl.request(cmd, timeout=timeout)
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
def global_request(self, cmd):
|
|
|
|
if self.global_iface is None:
|
2016-03-20 10:08:12 +01:00
|
|
|
return self.request(cmd)
|
2013-06-30 23:13:11 +02:00
|
|
|
else:
|
2013-12-30 22:08:25 +01:00
|
|
|
ifname = self.ifname or self.global_iface
|
2016-03-08 14:28:05 +01:00
|
|
|
logger.debug(self.global_dbg + ifname + ": CTRL(global): " + cmd)
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_ctrl.request(cmd)
|
|
|
|
|
2016-05-29 14:03:40 +02:00
|
|
|
@property
|
|
|
|
def group_dbg(self):
|
|
|
|
if self._group_dbg is not None:
|
|
|
|
return self._group_dbg
|
|
|
|
if self.group_ifname is None:
|
|
|
|
raise Exception("Cannot have group_dbg without group_ifname")
|
|
|
|
if self.hostname is None:
|
|
|
|
self._group_dbg = self.group_ifname
|
|
|
|
else:
|
|
|
|
self._group_dbg = self.hostname + "/" + self.group_ifname
|
|
|
|
return self._group_dbg
|
|
|
|
|
2013-03-09 16:34:08 +01:00
|
|
|
def group_request(self, cmd):
|
|
|
|
if self.group_ifname and self.group_ifname != self.ifname:
|
2016-05-29 14:03:40 +02:00
|
|
|
if self.hostname is None:
|
|
|
|
gctrl = wpaspy.Ctrl(os.path.join(wpas_ctrl, self.group_ifname))
|
|
|
|
else:
|
|
|
|
port = self.get_ctrl_iface_port(self.group_ifname)
|
|
|
|
gctrl = wpaspy.Ctrl(self.hostname, port)
|
|
|
|
logger.debug(self.group_dbg + ": CTRL(group): " + cmd)
|
2013-03-09 16:34:08 +01:00
|
|
|
return gctrl.request(cmd)
|
|
|
|
return self.request(cmd)
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def ping(self):
|
|
|
|
return "PONG" in self.request("PING")
|
|
|
|
|
2014-04-29 13:46:09 +02:00
|
|
|
def global_ping(self):
|
|
|
|
return "PONG" in self.global_request("PING")
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def reset(self):
|
2014-04-29 13:46:09 +02:00
|
|
|
self.dump_monitor()
|
2013-09-03 11:13:25 +02:00
|
|
|
res = self.request("FLUSH")
|
2019-03-15 10:34:32 +01:00
|
|
|
if "OK" not in res:
|
2013-09-03 11:13:25 +02:00
|
|
|
logger.info("FLUSH to " + self.ifname + " failed: " + res)
|
2015-05-05 11:36:54 +02:00
|
|
|
self.global_request("REMOVE_NETWORK all")
|
2015-06-17 15:24:48 +02:00
|
|
|
self.global_request("SET p2p_no_group_iface 1")
|
|
|
|
self.global_request("P2P_FLUSH")
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_group()
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2013-04-28 21:00:58 +02:00
|
|
|
self.dump_monitor()
|
2013-09-28 16:31:54 +02:00
|
|
|
|
|
|
|
iter = 0
|
|
|
|
while iter < 60:
|
2015-07-02 15:14:53 +02:00
|
|
|
state1 = self.get_driver_status_field("scan_state")
|
|
|
|
p2pdev = "p2p-dev-" + self.ifname
|
|
|
|
state2 = self.get_driver_status_field("scan_state", ifname=p2pdev)
|
|
|
|
states = str(state1) + " " + str(state2)
|
|
|
|
if "SCAN_STARTED" in states or "SCAN_REQUESTED" in states:
|
2013-09-28 16:31:54 +02:00
|
|
|
logger.info(self.ifname + ": Waiting for scan operation to complete before continuing")
|
|
|
|
time.sleep(1)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
iter = iter + 1
|
|
|
|
if iter == 60:
|
|
|
|
logger.error(self.ifname + ": Driver scan state did not clear")
|
2019-01-24 08:45:42 +01:00
|
|
|
print("Trying to clear cfg80211/mac80211 scan state")
|
2016-04-24 11:28:18 +02:00
|
|
|
status, buf = self.host.execute(["ifconfig", self.ifname, "down"])
|
2016-03-08 14:28:04 +01:00
|
|
|
if status != 0:
|
|
|
|
logger.info("ifconfig failed: " + buf)
|
|
|
|
logger.info(status)
|
2016-04-24 11:28:18 +02:00
|
|
|
status, buf = self.host.execute(["ifconfig", self.ifname, "up"])
|
2016-03-08 14:28:04 +01:00
|
|
|
if status != 0:
|
|
|
|
logger.info("ifconfig failed: " + buf)
|
|
|
|
logger.info(status)
|
2014-01-13 19:26:22 +01:00
|
|
|
if iter > 0:
|
|
|
|
# The ongoing scan could have discovered BSSes or P2P peers
|
|
|
|
logger.info("Run FLUSH again since scan was in progress")
|
|
|
|
self.request("FLUSH")
|
2014-01-14 22:37:15 +01:00
|
|
|
self.dump_monitor()
|
2013-09-28 16:31:54 +02:00
|
|
|
|
2013-09-03 11:13:25 +02:00
|
|
|
if not self.ping():
|
|
|
|
logger.info("No PING response from " + self.ifname + " after reset")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2020-01-26 12:28:43 +01:00
|
|
|
def set(self, field, value, allow_fail=False):
|
2019-03-15 10:34:32 +01:00
|
|
|
if "OK" not in self.request("SET " + field + " " + value):
|
2020-01-26 12:28:43 +01:00
|
|
|
if allow_fail:
|
|
|
|
return
|
2016-12-10 16:04:08 +01:00
|
|
|
raise Exception("Failed to set wpa_supplicant parameter " + field)
|
|
|
|
|
2013-03-09 18:01:56 +01:00
|
|
|
def add_network(self):
|
|
|
|
id = self.request("ADD_NETWORK")
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("ADD_NETWORK failed")
|
|
|
|
return int(id)
|
|
|
|
|
|
|
|
def remove_network(self, id):
|
|
|
|
id = self.request("REMOVE_NETWORK " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("REMOVE_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2014-02-03 23:08:18 +01:00
|
|
|
def get_network(self, id, field):
|
|
|
|
res = self.request("GET_NETWORK " + str(id) + " " + field)
|
|
|
|
if res == "FAIL\n":
|
|
|
|
return None
|
|
|
|
return res
|
|
|
|
|
2013-03-09 18:01:56 +01:00
|
|
|
def set_network(self, id, field, value):
|
|
|
|
res = self.request("SET_NETWORK " + str(id) + " " + field + " " + value)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_network_quoted(self, id, field, value):
|
|
|
|
res = self.request("SET_NETWORK " + str(id) + " " + field + ' "' + value + '"')
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2016-04-07 12:32:00 +02:00
|
|
|
def p2pdev_request(self, cmd):
|
|
|
|
return self.global_request("IFNAME=" + self.p2p_dev_ifname + " " + cmd)
|
|
|
|
|
|
|
|
def p2pdev_add_network(self):
|
|
|
|
id = self.p2pdev_request("ADD_NETWORK")
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("p2pdev ADD_NETWORK failed")
|
|
|
|
return int(id)
|
|
|
|
|
|
|
|
def p2pdev_set_network(self, id, field, value):
|
|
|
|
res = self.p2pdev_request("SET_NETWORK " + str(id) + " " + field + " " + value)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("p2pdev SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def p2pdev_set_network_quoted(self, id, field, value):
|
|
|
|
res = self.p2pdev_request("SET_NETWORK " + str(id) + " " + field + ' "' + value + '"')
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("p2pdev SET_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2015-05-05 11:36:58 +02:00
|
|
|
def list_networks(self, p2p=False):
|
|
|
|
if p2p:
|
|
|
|
res = self.global_request("LIST_NETWORKS")
|
|
|
|
else:
|
|
|
|
res = self.request("LIST_NETWORKS")
|
2013-11-06 22:35:19 +01:00
|
|
|
lines = res.splitlines()
|
|
|
|
networks = []
|
|
|
|
for l in lines:
|
|
|
|
if "network id" in l:
|
|
|
|
continue
|
2019-03-15 11:10:37 +01:00
|
|
|
[id, ssid, bssid, flags] = l.split('\t')
|
2013-11-06 22:35:19 +01:00
|
|
|
network = {}
|
|
|
|
network['id'] = id
|
|
|
|
network['ssid'] = ssid
|
|
|
|
network['bssid'] = bssid
|
|
|
|
network['flags'] = flags
|
|
|
|
networks.append(network)
|
|
|
|
return networks
|
|
|
|
|
2014-04-12 17:47:48 +02:00
|
|
|
def hs20_enable(self, auto_interworking=False):
|
2013-10-29 13:20:29 +01:00
|
|
|
self.request("SET interworking 1")
|
|
|
|
self.request("SET hs20 1")
|
2014-04-12 17:47:48 +02:00
|
|
|
if auto_interworking:
|
|
|
|
self.request("SET auto_interworking 1")
|
|
|
|
else:
|
|
|
|
self.request("SET auto_interworking 0")
|
2013-10-29 13:20:29 +01:00
|
|
|
|
2015-02-08 16:09:18 +01:00
|
|
|
def interworking_add_network(self, bssid):
|
|
|
|
id = self.request("INTERWORKING_ADD_NETWORK " + bssid)
|
|
|
|
if "FAIL" in id or "OK" in id:
|
|
|
|
raise Exception("INTERWORKING_ADD_NETWORK failed")
|
|
|
|
return int(id)
|
|
|
|
|
2013-04-01 00:01:24 +02:00
|
|
|
def add_cred(self):
|
|
|
|
id = self.request("ADD_CRED")
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("ADD_CRED failed")
|
|
|
|
return int(id)
|
|
|
|
|
|
|
|
def remove_cred(self, id):
|
|
|
|
id = self.request("REMOVE_CRED " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("REMOVE_CRED failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_cred(self, id, field, value):
|
|
|
|
res = self.request("SET_CRED " + str(id) + " " + field + " " + value)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_CRED failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def set_cred_quoted(self, id, field, value):
|
|
|
|
res = self.request("SET_CRED " + str(id) + " " + field + ' "' + value + '"')
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("SET_CRED failed")
|
|
|
|
return None
|
|
|
|
|
2014-04-04 21:50:46 +02:00
|
|
|
def get_cred(self, id, field):
|
|
|
|
return self.request("GET_CRED " + str(id) + " " + field)
|
|
|
|
|
2013-11-04 12:09:46 +01:00
|
|
|
def add_cred_values(self, params):
|
2013-10-29 13:20:29 +01:00
|
|
|
id = self.add_cred()
|
2013-11-04 12:09:46 +01:00
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
quoted = ["realm", "username", "password", "domain", "imsi",
|
|
|
|
"excluded_ssid", "milenage", "ca_cert", "client_cert",
|
|
|
|
"private_key", "domain_suffix_match", "provisioning_sp",
|
|
|
|
"roaming_partner", "phase1", "phase2", "private_key_passwd",
|
|
|
|
"roaming_consortiums"]
|
2013-11-04 12:09:46 +01:00
|
|
|
for field in quoted:
|
|
|
|
if field in params:
|
|
|
|
self.set_cred_quoted(id, field, params[field])
|
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
not_quoted = ["eap", "roaming_consortium", "priority",
|
|
|
|
"required_roaming_consortium", "sp_priority",
|
|
|
|
"max_bss_load", "update_identifier", "req_conn_capab",
|
|
|
|
"min_dl_bandwidth_home", "min_ul_bandwidth_home",
|
|
|
|
"min_dl_bandwidth_roaming", "min_ul_bandwidth_roaming"]
|
2013-11-04 12:09:46 +01:00
|
|
|
for field in not_quoted:
|
|
|
|
if field in params:
|
|
|
|
self.set_cred(id, field, params[field])
|
|
|
|
|
2016-07-03 18:37:50 +02:00
|
|
|
return id
|
2013-10-29 13:20:29 +01:00
|
|
|
|
2014-04-04 22:13:49 +02:00
|
|
|
def select_network(self, id, freq=None):
|
|
|
|
if freq:
|
2015-02-08 16:09:18 +01:00
|
|
|
extra = " freq=" + str(freq)
|
2014-04-04 22:13:49 +02:00
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
id = self.request("SELECT_NETWORK " + str(id) + extra)
|
2013-03-27 12:57:48 +01:00
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("SELECT_NETWORK failed")
|
|
|
|
return None
|
|
|
|
|
2014-09-01 06:23:35 +02:00
|
|
|
def mesh_group_add(self, id):
|
|
|
|
id = self.request("MESH_GROUP_ADD " + str(id))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("MESH_GROUP_ADD failed")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def mesh_group_remove(self):
|
|
|
|
id = self.request("MESH_GROUP_REMOVE " + str(self.ifname))
|
|
|
|
if "FAIL" in id:
|
|
|
|
raise Exception("MESH_GROUP_REMOVE failed")
|
|
|
|
return None
|
|
|
|
|
2016-07-13 19:06:03 +02:00
|
|
|
def connect_network(self, id, timeout=None):
|
|
|
|
if timeout is None:
|
|
|
|
timeout = 10 if self.hostname is None else 60
|
2013-03-27 12:57:48 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
self.select_network(id)
|
2014-12-20 10:51:30 +01:00
|
|
|
self.wait_connected(timeout=timeout)
|
2013-03-27 12:57:48 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_status(self, extra=None):
|
|
|
|
if extra:
|
|
|
|
extra = "-" + extra
|
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
res = self.request("STATUS" + extra)
|
2013-03-02 00:05:03 +01:00
|
|
|
lines = res.splitlines()
|
2013-03-31 11:33:49 +02:00
|
|
|
vals = dict()
|
2013-03-02 00:05:03 +01:00
|
|
|
for l in lines:
|
2014-02-04 12:22:10 +01:00
|
|
|
try:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2014-02-04 12:22:10 +01:00
|
|
|
vals[name] = value
|
2019-01-24 08:45:41 +01:00
|
|
|
except ValueError as e:
|
2014-02-04 12:22:10 +01:00
|
|
|
logger.info(self.ifname + ": Ignore unexpected STATUS line: " + l)
|
2013-03-31 11:33:49 +02:00
|
|
|
return vals
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_status_field(self, field, extra=None):
|
|
|
|
vals = self.get_status(extra)
|
2013-03-31 11:33:49 +02:00
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
2013-03-02 00:05:03 +01:00
|
|
|
return None
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_group_status(self, extra=None):
|
|
|
|
if extra:
|
|
|
|
extra = "-" + extra
|
|
|
|
else:
|
|
|
|
extra = ""
|
|
|
|
res = self.group_request("STATUS" + extra)
|
2013-03-09 17:24:32 +01:00
|
|
|
lines = res.splitlines()
|
2013-03-31 11:33:49 +02:00
|
|
|
vals = dict()
|
2013-03-09 17:24:32 +01:00
|
|
|
for l in lines:
|
2014-11-27 18:42:54 +01:00
|
|
|
try:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2014-11-27 18:42:54 +01:00
|
|
|
except ValueError:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected status line: " + l)
|
|
|
|
continue
|
2013-03-31 11:33:49 +02:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
2014-04-06 15:52:14 +02:00
|
|
|
def get_group_status_field(self, field, extra=None):
|
|
|
|
vals = self.get_group_status(extra)
|
2013-03-31 11:33:49 +02:00
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
2013-03-09 17:24:32 +01:00
|
|
|
return None
|
|
|
|
|
2015-07-02 15:14:53 +02:00
|
|
|
def get_driver_status(self, ifname=None):
|
|
|
|
if ifname is None:
|
|
|
|
res = self.request("STATUS-DRIVER")
|
|
|
|
else:
|
|
|
|
res = self.global_request("IFNAME=%s STATUS-DRIVER" % ifname)
|
2015-07-16 11:46:01 +02:00
|
|
|
if res.startswith("FAIL"):
|
|
|
|
return dict()
|
2013-09-28 16:31:54 +02:00
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
2014-11-27 18:42:54 +01:00
|
|
|
try:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2014-11-27 18:42:54 +01:00
|
|
|
except ValueError:
|
|
|
|
logger.info(self.ifname + ": Ignore unexpected status-driver line: " + l)
|
|
|
|
continue
|
2013-09-28 16:31:54 +02:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
2015-07-02 15:14:53 +02:00
|
|
|
def get_driver_status_field(self, field, ifname=None):
|
|
|
|
vals = self.get_driver_status(ifname)
|
2013-09-28 16:31:54 +02:00
|
|
|
if field in vals:
|
|
|
|
return vals[field]
|
|
|
|
return None
|
|
|
|
|
2014-06-10 19:50:30 +02:00
|
|
|
def get_mcc(self):
|
2016-04-24 23:19:40 +02:00
|
|
|
mcc = int(self.get_driver_status_field('capa.num_multichan_concurrent'))
|
|
|
|
return 1 if mcc < 2 else mcc
|
2014-06-10 19:50:30 +02:00
|
|
|
|
2014-03-15 23:18:03 +01:00
|
|
|
def get_mib(self):
|
|
|
|
res = self.request("MIB")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
|
|
|
try:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2014-03-15 23:18:03 +01:00
|
|
|
vals[name] = value
|
2019-01-24 08:45:41 +01:00
|
|
|
except ValueError as e:
|
2014-03-15 23:18:03 +01:00
|
|
|
logger.info(self.ifname + ": Ignore unexpected MIB line: " + l)
|
|
|
|
return vals
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def p2p_dev_addr(self):
|
2013-03-31 11:33:49 +02:00
|
|
|
return self.get_status_field("p2p_device_address")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-03-09 17:24:32 +01:00
|
|
|
def p2p_interface_addr(self):
|
2013-03-31 11:33:49 +02:00
|
|
|
return self.get_group_status_field("address")
|
2013-03-09 17:24:32 +01:00
|
|
|
|
2014-10-19 19:55:02 +02:00
|
|
|
def own_addr(self):
|
|
|
|
try:
|
|
|
|
res = self.p2p_interface_addr()
|
|
|
|
except:
|
|
|
|
res = self.p2p_dev_addr()
|
|
|
|
return res
|
|
|
|
|
2020-02-05 08:13:48 +01:00
|
|
|
def get_addr(self, group=False):
|
|
|
|
dev_addr = self.own_addr()
|
|
|
|
if not group:
|
|
|
|
addr = self.get_status_field('address')
|
|
|
|
if addr:
|
|
|
|
dev_addr = addr
|
|
|
|
|
|
|
|
return dev_addr
|
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def p2p_listen(self):
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_request("P2P_LISTEN")
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2015-09-24 19:37:52 +02:00
|
|
|
def p2p_ext_listen(self, period, interval):
|
|
|
|
return self.global_request("P2P_EXT_LISTEN %d %d" % (period, interval))
|
|
|
|
|
|
|
|
def p2p_cancel_ext_listen(self):
|
|
|
|
return self.global_request("P2P_EXT_LISTEN")
|
|
|
|
|
2014-12-08 16:56:47 +01:00
|
|
|
def p2p_find(self, social=False, progressive=False, dev_id=None,
|
2015-02-28 21:06:24 +01:00
|
|
|
dev_type=None, delay=None, freq=None):
|
2014-01-08 21:25:58 +01:00
|
|
|
cmd = "P2P_FIND"
|
2013-03-02 00:05:03 +01:00
|
|
|
if social:
|
2014-01-08 21:25:58 +01:00
|
|
|
cmd = cmd + " type=social"
|
2014-04-12 19:03:24 +02:00
|
|
|
elif progressive:
|
|
|
|
cmd = cmd + " type=progressive"
|
2014-01-08 21:25:58 +01:00
|
|
|
if dev_id:
|
|
|
|
cmd = cmd + " dev_id=" + dev_id
|
|
|
|
if dev_type:
|
|
|
|
cmd = cmd + " dev_type=" + dev_type
|
2014-12-08 16:56:47 +01:00
|
|
|
if delay:
|
|
|
|
cmd = cmd + " delay=" + str(delay)
|
2015-02-28 21:06:24 +01:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + " freq=" + str(freq)
|
2014-01-08 21:25:58 +01:00
|
|
|
return self.global_request(cmd)
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2013-03-17 09:58:48 +01:00
|
|
|
def p2p_stop_find(self):
|
2013-06-30 23:13:11 +02:00
|
|
|
return self.global_request("P2P_STOP_FIND")
|
2013-03-17 09:58:48 +01:00
|
|
|
|
2013-03-02 00:05:03 +01:00
|
|
|
def wps_read_pin(self):
|
2014-04-06 15:31:44 +02:00
|
|
|
self.pin = self.request("WPS_PIN get").rstrip("\n")
|
|
|
|
if "FAIL" in self.pin:
|
|
|
|
raise Exception("Could not generate PIN")
|
2013-03-02 00:05:03 +01:00
|
|
|
return self.pin
|
|
|
|
|
|
|
|
def peer_known(self, peer, full=True):
|
2013-06-30 23:13:11 +02:00
|
|
|
res = self.global_request("P2P_PEER " + peer)
|
2013-03-02 00:05:03 +01:00
|
|
|
if peer.lower() not in res.lower():
|
|
|
|
return False
|
|
|
|
if not full:
|
|
|
|
return True
|
|
|
|
return "[PROBE_REQ_ONLY]" not in res
|
|
|
|
|
2015-10-26 22:30:48 +01:00
|
|
|
def discover_peer(self, peer, full=True, timeout=15, social=True,
|
|
|
|
force_find=False, freq=None):
|
2013-03-02 00:05:03 +01:00
|
|
|
logger.info(self.ifname + ": Trying to discover peer " + peer)
|
2014-01-05 15:42:48 +01:00
|
|
|
if not force_find and self.peer_known(peer, full):
|
2013-03-02 00:05:03 +01:00
|
|
|
return True
|
2015-10-26 22:30:48 +01:00
|
|
|
self.p2p_find(social, freq=freq)
|
2013-03-02 00:05:03 +01:00
|
|
|
count = 0
|
2014-11-28 15:54:31 +01:00
|
|
|
while count < timeout * 4:
|
|
|
|
time.sleep(0.25)
|
2013-03-02 00:05:03 +01:00
|
|
|
count = count + 1
|
|
|
|
if self.peer_known(peer, full):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2013-09-01 18:24:12 +02:00
|
|
|
def get_peer(self, peer):
|
|
|
|
res = self.global_request("P2P_PEER " + peer)
|
|
|
|
if peer.lower() not in res.lower():
|
|
|
|
raise Exception("Peer information not available")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
|
|
|
if '=' in l:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2013-09-01 18:24:12 +02:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
|
|
|
|
2013-10-26 19:47:08 +02:00
|
|
|
def group_form_result(self, ev, expect_failure=False, go_neg_res=None):
|
2013-03-09 14:52:57 +01:00
|
|
|
if expect_failure:
|
|
|
|
if "P2P-GROUP-STARTED" in ev:
|
|
|
|
raise Exception("Group formation succeeded when expecting failure")
|
|
|
|
exp = r'<.>(P2P-GO-NEG-FAILURE) status=([0-9]*)'
|
|
|
|
s = re.split(exp, ev)
|
|
|
|
if len(s) < 3:
|
|
|
|
return None
|
|
|
|
res = {}
|
|
|
|
res['result'] = 'go-neg-failed'
|
|
|
|
res['status'] = int(s[2])
|
|
|
|
return res
|
|
|
|
|
|
|
|
if "P2P-GROUP-STARTED" not in ev:
|
|
|
|
raise Exception("No P2P-GROUP-STARTED event seen")
|
|
|
|
|
2013-04-02 20:46:52 +02:00
|
|
|
exp = r'<.>(P2P-GROUP-STARTED) ([^ ]*) ([^ ]*) ssid="(.*)" freq=([0-9]*) ((?:psk=.*)|(?:passphrase=".*")) go_dev_addr=([0-9a-f:]*) ip_addr=([0-9.]*) ip_mask=([0-9.]*) go_ip_addr=([0-9.]*)'
|
2013-03-09 12:36:35 +01:00
|
|
|
s = re.split(exp, ev)
|
2013-04-02 20:46:52 +02:00
|
|
|
if len(s) < 11:
|
|
|
|
exp = r'<.>(P2P-GROUP-STARTED) ([^ ]*) ([^ ]*) ssid="(.*)" freq=([0-9]*) ((?:psk=.*)|(?:passphrase=".*")) go_dev_addr=([0-9a-f:]*)'
|
|
|
|
s = re.split(exp, ev)
|
|
|
|
if len(s) < 8:
|
|
|
|
raise Exception("Could not parse P2P-GROUP-STARTED")
|
2013-03-09 12:36:35 +01:00
|
|
|
res = {}
|
|
|
|
res['result'] = 'success'
|
|
|
|
res['ifname'] = s[2]
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = s[2]
|
2014-10-19 19:56:36 +02:00
|
|
|
try:
|
2016-05-29 14:03:40 +02:00
|
|
|
if self.hostname is None:
|
|
|
|
self.gctrl_mon = wpaspy.Ctrl(os.path.join(wpas_ctrl,
|
|
|
|
self.group_ifname))
|
|
|
|
else:
|
|
|
|
port = self.get_ctrl_iface_port(self.group_ifname)
|
|
|
|
self.gctrl_mon = wpaspy.Ctrl(self.hostname, port)
|
2019-03-17 11:22:23 +01:00
|
|
|
if self.monitor:
|
|
|
|
self.gctrl_mon.attach()
|
2014-10-19 19:56:36 +02:00
|
|
|
except:
|
|
|
|
logger.debug("Could not open monitor socket for group interface")
|
|
|
|
self.gctrl_mon = None
|
2013-03-09 12:36:35 +01:00
|
|
|
res['role'] = s[3]
|
|
|
|
res['ssid'] = s[4]
|
|
|
|
res['freq'] = s[5]
|
2013-09-01 18:24:12 +02:00
|
|
|
if "[PERSISTENT]" in ev:
|
|
|
|
res['persistent'] = True
|
|
|
|
else:
|
|
|
|
res['persistent'] = False
|
2013-03-09 12:36:35 +01:00
|
|
|
p = re.match(r'psk=([0-9a-f]*)', s[6])
|
|
|
|
if p:
|
|
|
|
res['psk'] = p.group(1)
|
|
|
|
p = re.match(r'passphrase="(.*)"', s[6])
|
|
|
|
if p:
|
|
|
|
res['passphrase'] = p.group(1)
|
|
|
|
res['go_dev_addr'] = s[7]
|
2013-10-26 19:47:08 +02:00
|
|
|
|
2019-01-04 10:44:48 +01:00
|
|
|
if len(s) > 8 and len(s[8]) > 0 and "[PERSISTENT]" not in s[8]:
|
2013-04-02 20:46:52 +02:00
|
|
|
res['ip_addr'] = s[8]
|
|
|
|
if len(s) > 9:
|
|
|
|
res['ip_mask'] = s[9]
|
|
|
|
if len(s) > 10:
|
|
|
|
res['go_ip_addr'] = s[10]
|
|
|
|
|
2013-10-26 19:47:08 +02:00
|
|
|
if go_neg_res:
|
|
|
|
exp = r'<.>(P2P-GO-NEG-SUCCESS) role=(GO|client) freq=([0-9]*)'
|
|
|
|
s = re.split(exp, go_neg_res)
|
|
|
|
if len(s) < 4:
|
|
|
|
raise Exception("Could not parse P2P-GO-NEG-SUCCESS")
|
|
|
|
res['go_neg_role'] = s[2]
|
|
|
|
res['go_neg_freq'] = s[3]
|
|
|
|
|
2013-03-09 12:36:35 +01:00
|
|
|
return res
|
|
|
|
|
2015-10-29 19:47:36 +01:00
|
|
|
def p2p_go_neg_auth(self, peer, pin, method, go_intent=None,
|
|
|
|
persistent=False, freq=None, freq2=None,
|
|
|
|
max_oper_chwidth=None, ht40=False, vht=False):
|
2013-03-02 00:05:03 +01:00
|
|
|
if not self.discover_peer(peer):
|
|
|
|
raise Exception("Peer " + peer + " not found")
|
|
|
|
self.dump_monitor()
|
2015-01-22 19:53:03 +01:00
|
|
|
if pin:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + pin + " " + method + " auth"
|
|
|
|
else:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + method + " auth"
|
2013-03-09 12:52:59 +01:00
|
|
|
if go_intent:
|
|
|
|
cmd = cmd + ' go_intent=' + str(go_intent)
|
2013-10-26 19:47:08 +02:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + ' freq=' + str(freq)
|
2015-10-29 19:47:36 +01:00
|
|
|
if freq2:
|
|
|
|
cmd = cmd + ' freq2=' + str(freq2)
|
|
|
|
if max_oper_chwidth:
|
|
|
|
cmd = cmd + ' max_oper_chwidth=' + str(max_oper_chwidth)
|
|
|
|
if ht40:
|
|
|
|
cmd = cmd + ' ht40'
|
|
|
|
if vht:
|
|
|
|
cmd = cmd + ' vht'
|
2013-09-01 18:24:12 +02:00
|
|
|
if persistent:
|
|
|
|
cmd = cmd + " persistent"
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-02 00:05:03 +01:00
|
|
|
return None
|
|
|
|
raise Exception("P2P_CONNECT (auth) failed")
|
|
|
|
|
2016-12-14 11:45:37 +01:00
|
|
|
def p2p_go_neg_auth_result(self, timeout=None, expect_failure=False):
|
|
|
|
if timeout is None:
|
|
|
|
timeout = 1 if expect_failure else 5
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = None
|
|
|
|
ev = self.wait_global_event(["P2P-GO-NEG-SUCCESS",
|
2016-07-03 18:37:50 +02:00
|
|
|
"P2P-GO-NEG-FAILURE"], timeout)
|
2013-03-09 12:36:35 +01:00
|
|
|
if ev is None:
|
2013-03-09 12:52:59 +01:00
|
|
|
if expect_failure:
|
|
|
|
return None
|
2013-03-09 12:36:35 +01:00
|
|
|
raise Exception("Group formation timed out")
|
2013-10-26 19:47:08 +02:00
|
|
|
if "P2P-GO-NEG-SUCCESS" in ev:
|
|
|
|
go_neg_res = ev
|
2016-07-03 18:37:50 +02:00
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout)
|
2013-10-26 19:47:08 +02:00
|
|
|
if ev is None:
|
|
|
|
if expect_failure:
|
|
|
|
return None
|
|
|
|
raise Exception("Group formation timed out")
|
2013-03-09 12:36:35 +01:00
|
|
|
self.dump_monitor()
|
2013-10-26 19:47:08 +02:00
|
|
|
return self.group_form_result(ev, expect_failure, go_neg_res)
|
2013-03-09 12:36:35 +01:00
|
|
|
|
2015-10-29 19:47:36 +01:00
|
|
|
def p2p_go_neg_init(self, peer, pin, method, timeout=0, go_intent=None,
|
|
|
|
expect_failure=False, persistent=False,
|
|
|
|
persistent_id=None, freq=None, provdisc=False,
|
|
|
|
wait_group=True, freq2=None, max_oper_chwidth=None,
|
|
|
|
ht40=False, vht=False):
|
2013-03-02 00:05:03 +01:00
|
|
|
if not self.discover_peer(peer):
|
|
|
|
raise Exception("Peer " + peer + " not found")
|
|
|
|
self.dump_monitor()
|
2013-03-17 10:10:59 +01:00
|
|
|
if pin:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + pin + " " + method
|
|
|
|
else:
|
|
|
|
cmd = "P2P_CONNECT " + peer + " " + method
|
2016-05-20 22:37:51 +02:00
|
|
|
if go_intent is not None:
|
2013-03-09 12:52:59 +01:00
|
|
|
cmd = cmd + ' go_intent=' + str(go_intent)
|
2013-09-28 11:09:40 +02:00
|
|
|
if freq:
|
|
|
|
cmd = cmd + ' freq=' + str(freq)
|
2015-10-29 19:47:36 +01:00
|
|
|
if freq2:
|
|
|
|
cmd = cmd + ' freq2=' + str(freq2)
|
|
|
|
if max_oper_chwidth:
|
|
|
|
cmd = cmd + ' max_oper_chwidth=' + str(max_oper_chwidth)
|
|
|
|
if ht40:
|
|
|
|
cmd = cmd + ' ht40'
|
|
|
|
if vht:
|
|
|
|
cmd = cmd + ' vht'
|
2013-09-01 18:24:12 +02:00
|
|
|
if persistent:
|
|
|
|
cmd = cmd + " persistent"
|
2014-04-15 00:06:39 +02:00
|
|
|
elif persistent_id:
|
|
|
|
cmd = cmd + " persistent=" + persistent_id
|
2014-03-04 23:36:37 +01:00
|
|
|
if provdisc:
|
|
|
|
cmd = cmd + " provdisc"
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-02 00:05:03 +01:00
|
|
|
if timeout == 0:
|
|
|
|
return None
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = None
|
|
|
|
ev = self.wait_global_event(["P2P-GO-NEG-SUCCESS",
|
|
|
|
"P2P-GO-NEG-FAILURE"], timeout)
|
2013-03-09 12:36:35 +01:00
|
|
|
if ev is None:
|
2013-03-09 12:52:59 +01:00
|
|
|
if expect_failure:
|
|
|
|
return None
|
2013-03-09 12:36:35 +01:00
|
|
|
raise Exception("Group formation timed out")
|
2013-10-26 19:47:08 +02:00
|
|
|
if "P2P-GO-NEG-SUCCESS" in ev:
|
2014-10-10 17:03:38 +02:00
|
|
|
if not wait_group:
|
|
|
|
return ev
|
2013-10-26 19:47:08 +02:00
|
|
|
go_neg_res = ev
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout)
|
|
|
|
if ev is None:
|
|
|
|
if expect_failure:
|
|
|
|
return None
|
|
|
|
raise Exception("Group formation timed out")
|
2013-03-09 12:36:35 +01:00
|
|
|
self.dump_monitor()
|
2013-10-26 19:47:08 +02:00
|
|
|
return self.group_form_result(ev, expect_failure, go_neg_res)
|
2013-03-02 00:05:03 +01:00
|
|
|
raise Exception("P2P_CONNECT failed")
|
|
|
|
|
2019-02-01 21:31:59 +01:00
|
|
|
def _wait_event(self, mon, pfx, events, timeout):
|
2019-08-07 11:41:12 +02:00
|
|
|
if not isinstance(events, list):
|
|
|
|
raise Exception("WpaSupplicant._wait_event() called with incorrect events argument type")
|
2014-01-05 07:02:06 +01:00
|
|
|
start = os.times()[4]
|
|
|
|
while True:
|
2019-02-01 21:31:59 +01:00
|
|
|
while mon.pending():
|
|
|
|
ev = mon.recv()
|
|
|
|
logger.debug(self.dbg + pfx + ev)
|
2013-03-09 14:52:57 +01:00
|
|
|
for event in events:
|
|
|
|
if event in ev:
|
|
|
|
return ev
|
2014-01-05 07:02:06 +01:00
|
|
|
now = os.times()[4]
|
|
|
|
remaining = start + timeout - now
|
|
|
|
if remaining <= 0:
|
|
|
|
break
|
2019-02-01 21:31:59 +01:00
|
|
|
if not mon.pending(timeout=remaining):
|
2014-01-05 07:02:06 +01:00
|
|
|
break
|
2013-03-09 12:36:35 +01:00
|
|
|
return None
|
2013-03-02 00:05:03 +01:00
|
|
|
|
2019-02-01 21:31:59 +01:00
|
|
|
def wait_event(self, events, timeout=10):
|
|
|
|
return self._wait_event(self.mon, ": ", events, timeout)
|
|
|
|
|
2013-06-30 23:13:11 +02:00
|
|
|
def wait_global_event(self, events, timeout):
|
|
|
|
if self.global_iface is None:
|
2019-02-01 21:31:59 +01:00
|
|
|
return self.wait_event(events, timeout)
|
|
|
|
return self._wait_event(self.global_mon, "(global): ",
|
|
|
|
events, timeout)
|
2013-06-30 23:13:11 +02:00
|
|
|
|
2014-10-19 19:56:36 +02:00
|
|
|
def wait_group_event(self, events, timeout=10):
|
2019-08-07 11:41:12 +02:00
|
|
|
if not isinstance(events, list):
|
|
|
|
raise Exception("WpaSupplicant.wait_group_event() called with incorrect events argument type")
|
2014-10-19 19:56:36 +02:00
|
|
|
if self.group_ifname and self.group_ifname != self.ifname:
|
|
|
|
if self.gctrl_mon is None:
|
|
|
|
return None
|
|
|
|
start = os.times()[4]
|
|
|
|
while True:
|
|
|
|
while self.gctrl_mon.pending():
|
|
|
|
ev = self.gctrl_mon.recv()
|
2016-05-29 14:03:40 +02:00
|
|
|
logger.debug(self.group_dbg + "(group): " + ev)
|
2014-10-19 19:56:36 +02:00
|
|
|
for event in events:
|
|
|
|
if event in ev:
|
|
|
|
return ev
|
|
|
|
now = os.times()[4]
|
|
|
|
remaining = start + timeout - now
|
|
|
|
if remaining <= 0:
|
|
|
|
break
|
|
|
|
if not self.gctrl_mon.pending(timeout=remaining):
|
|
|
|
break
|
|
|
|
return None
|
|
|
|
|
|
|
|
return self.wait_event(events, timeout)
|
|
|
|
|
2013-09-01 18:30:08 +02:00
|
|
|
def wait_go_ending_session(self):
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_group()
|
2016-07-13 19:06:04 +02:00
|
|
|
timeout = 3 if self.hostname is None else 10
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-REMOVED"], timeout=timeout)
|
2013-09-01 18:30:08 +02:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Group removal event timed out")
|
|
|
|
if "reason=GO_ENDING_SESSION" not in ev:
|
|
|
|
raise Exception("Unexpected group removal reason")
|
|
|
|
|
2019-03-17 15:00:18 +01:00
|
|
|
def dump_monitor(self, mon=True, global_mon=True):
|
2015-10-04 10:46:02 +02:00
|
|
|
count_iface = 0
|
|
|
|
count_global = 0
|
2019-03-17 15:00:18 +01:00
|
|
|
while mon and self.monitor and self.mon.pending():
|
2013-03-02 00:05:03 +01:00
|
|
|
ev = self.mon.recv()
|
2016-03-08 14:28:05 +01:00
|
|
|
logger.debug(self.dbg + ": " + ev)
|
2015-10-04 10:46:02 +02:00
|
|
|
count_iface += 1
|
2019-03-17 15:00:18 +01:00
|
|
|
while global_mon and self.monitor and self.global_mon and self.global_mon.pending():
|
2013-09-25 15:45:45 +02:00
|
|
|
ev = self.global_mon.recv()
|
2016-03-08 14:28:05 +01:00
|
|
|
logger.debug(self.global_dbg + self.ifname + "(global): " + ev)
|
2015-10-04 10:46:02 +02:00
|
|
|
count_global += 1
|
|
|
|
return (count_iface, count_global)
|
2013-03-02 10:38:56 +01:00
|
|
|
|
2013-03-02 11:22:28 +01:00
|
|
|
def remove_group(self, ifname=None):
|
2019-03-17 14:26:34 +01:00
|
|
|
self.close_monitor_group()
|
2013-03-02 11:22:28 +01:00
|
|
|
if ifname is None:
|
2013-09-01 18:24:12 +02:00
|
|
|
ifname = self.group_ifname if self.group_ifname else self.ifname
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" not in self.global_request("P2P_GROUP_REMOVE " + ifname):
|
2013-03-02 10:38:56 +01:00
|
|
|
raise Exception("Group could not be removed")
|
2013-03-09 16:34:08 +01:00
|
|
|
self.group_ifname = None
|
2013-03-09 15:30:25 +01:00
|
|
|
|
2014-12-10 00:59:47 +01:00
|
|
|
def p2p_start_go(self, persistent=None, freq=None, no_event_clear=False):
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
cmd = "P2P_GROUP_ADD"
|
2013-03-09 18:01:56 +01:00
|
|
|
if persistent is None:
|
|
|
|
pass
|
|
|
|
elif persistent is True:
|
|
|
|
cmd = cmd + " persistent"
|
|
|
|
else:
|
|
|
|
cmd = cmd + " persistent=" + str(persistent)
|
2013-03-30 19:16:30 +01:00
|
|
|
if freq:
|
2013-09-28 11:09:40 +02:00
|
|
|
cmd = cmd + " freq=" + str(freq)
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED"], timeout=5)
|
2013-03-09 15:30:25 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("GO start up timed out")
|
2014-12-10 00:59:47 +01:00
|
|
|
if not no_event_clear:
|
|
|
|
self.dump_monitor()
|
2013-03-09 15:30:25 +01:00
|
|
|
return self.group_form_result(ev)
|
|
|
|
raise Exception("P2P_GROUP_ADD failed")
|
|
|
|
|
|
|
|
def p2p_go_authorize_client(self, pin):
|
|
|
|
cmd = "WPS_PIN any " + pin
|
2013-03-09 16:34:08 +01:00
|
|
|
if "FAIL" in self.group_request(cmd):
|
2013-03-09 15:30:25 +01:00
|
|
|
raise Exception("Failed to authorize client connection on GO")
|
|
|
|
return None
|
|
|
|
|
2013-09-01 10:56:55 +02:00
|
|
|
def p2p_go_authorize_client_pbc(self):
|
|
|
|
cmd = "WPS_PBC"
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to authorize client connection on GO")
|
|
|
|
return None
|
|
|
|
|
2014-11-28 15:54:31 +01:00
|
|
|
def p2p_connect_group(self, go_addr, pin, timeout=0, social=False,
|
|
|
|
freq=None):
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
2015-10-26 22:30:48 +01:00
|
|
|
if not self.discover_peer(go_addr, social=social, freq=freq):
|
2014-05-15 13:13:39 +02:00
|
|
|
if social or not self.discover_peer(go_addr, social=social):
|
|
|
|
raise Exception("GO " + go_addr + " not found")
|
2016-01-06 17:56:31 +01:00
|
|
|
self.p2p_stop_find()
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
cmd = "P2P_CONNECT " + go_addr + " " + pin + " join"
|
2014-11-28 15:54:31 +01:00
|
|
|
if freq:
|
|
|
|
cmd += " freq=" + str(freq)
|
2013-06-30 23:13:11 +02:00
|
|
|
if "OK" in self.global_request(cmd):
|
2013-03-09 15:30:25 +01:00
|
|
|
if timeout == 0:
|
|
|
|
self.dump_monitor()
|
|
|
|
return None
|
2016-01-06 17:56:31 +01:00
|
|
|
ev = self.wait_global_event(["P2P-GROUP-STARTED",
|
|
|
|
"P2P-GROUP-FORMATION-FAILURE"],
|
|
|
|
timeout)
|
2013-03-09 15:30:25 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Joining the group timed out")
|
2016-01-06 17:56:31 +01:00
|
|
|
if "P2P-GROUP-STARTED" not in ev:
|
|
|
|
raise Exception("Failed to join the group")
|
2013-03-09 15:30:25 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
return self.group_form_result(ev)
|
|
|
|
raise Exception("P2P_CONNECT(join) failed")
|
2013-03-09 17:24:32 +01:00
|
|
|
|
|
|
|
def tdls_setup(self, peer):
|
|
|
|
cmd = "TDLS_SETUP " + peer
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to request TDLS setup")
|
|
|
|
return None
|
|
|
|
|
|
|
|
def tdls_teardown(self, peer):
|
|
|
|
cmd = "TDLS_TEARDOWN " + peer
|
|
|
|
if "FAIL" in self.group_request(cmd):
|
|
|
|
raise Exception("Failed to request TDLS teardown")
|
|
|
|
return None
|
2013-03-29 19:41:33 +01:00
|
|
|
|
2015-04-28 13:00:51 +02:00
|
|
|
def tdls_link_status(self, peer):
|
|
|
|
cmd = "TDLS_LINK_STATUS " + peer
|
|
|
|
ret = self.group_request(cmd)
|
|
|
|
if "FAIL" in ret:
|
|
|
|
raise Exception("Failed to request TDLS link status")
|
|
|
|
return ret
|
|
|
|
|
2014-12-29 03:15:02 +01:00
|
|
|
def tspecs(self):
|
|
|
|
"""Return (tsid, up) tuples representing current tspecs"""
|
|
|
|
res = self.request("WMM_AC_STATUS")
|
|
|
|
tspecs = re.findall(r"TSID=(\d+) UP=(\d+)", res)
|
|
|
|
tspecs = [tuple(map(int, tspec)) for tspec in tspecs]
|
|
|
|
|
|
|
|
logger.debug("tspecs: " + str(tspecs))
|
|
|
|
return tspecs
|
|
|
|
|
2014-12-05 12:39:22 +01:00
|
|
|
def add_ts(self, tsid, up, direction="downlink", expect_failure=False,
|
|
|
|
extra=None):
|
2014-10-22 14:04:04 +02:00
|
|
|
params = {
|
|
|
|
"sba": 9000,
|
|
|
|
"nominal_msdu_size": 1500,
|
|
|
|
"min_phy_rate": 6000000,
|
|
|
|
"mean_data_rate": 1500,
|
|
|
|
}
|
2014-12-05 12:39:22 +01:00
|
|
|
cmd = "WMM_AC_ADDTS %s tsid=%d up=%d" % (direction, tsid, up)
|
2019-01-24 08:45:43 +01:00
|
|
|
for (key, value) in params.items():
|
2014-10-22 14:04:04 +02:00
|
|
|
cmd += " %s=%d" % (key, value)
|
2014-12-05 12:39:22 +01:00
|
|
|
if extra:
|
|
|
|
cmd += " " + extra
|
2014-10-22 14:04:04 +02:00
|
|
|
|
|
|
|
if self.request(cmd).strip() != "OK":
|
|
|
|
raise Exception("ADDTS failed (tsid=%d up=%d)" % (tsid, up))
|
|
|
|
|
2014-12-05 12:39:22 +01:00
|
|
|
if expect_failure:
|
|
|
|
ev = self.wait_event(["TSPEC-REQ-FAILED"], timeout=2)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("ADDTS failed (time out while waiting failure)")
|
|
|
|
if "tsid=%d" % (tsid) not in ev:
|
|
|
|
raise Exception("ADDTS failed (invalid tsid in TSPEC-REQ-FAILED")
|
|
|
|
return
|
|
|
|
|
2014-10-22 14:04:04 +02:00
|
|
|
ev = self.wait_event(["TSPEC-ADDED"], timeout=1)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("ADDTS failed (time out)")
|
|
|
|
if "tsid=%d" % (tsid) not in ev:
|
|
|
|
raise Exception("ADDTS failed (invalid tsid in TSPEC-ADDED)")
|
|
|
|
|
2019-03-15 10:34:32 +01:00
|
|
|
if (tsid, up) not in self.tspecs():
|
2014-12-29 03:15:02 +01:00
|
|
|
raise Exception("ADDTS failed (tsid not in tspec list)")
|
|
|
|
|
2014-10-22 14:04:04 +02:00
|
|
|
def del_ts(self, tsid):
|
|
|
|
if self.request("WMM_AC_DELTS %d" % (tsid)).strip() != "OK":
|
|
|
|
raise Exception("DELTS failed")
|
|
|
|
|
|
|
|
ev = self.wait_event(["TSPEC-REMOVED"], timeout=1)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("DELTS failed (time out)")
|
|
|
|
if "tsid=%d" % (tsid) not in ev:
|
|
|
|
raise Exception("DELTS failed (invalid tsid in TSPEC-REMOVED)")
|
|
|
|
|
2014-12-29 03:15:02 +01:00
|
|
|
tspecs = [(t, u) for (t, u) in self.tspecs() if t == tsid]
|
|
|
|
if tspecs:
|
|
|
|
raise Exception("DELTS failed (still in tspec list)")
|
|
|
|
|
2014-02-15 18:52:56 +01:00
|
|
|
def connect(self, ssid=None, ssid2=None, **kwargs):
|
2013-03-29 19:41:33 +01:00
|
|
|
logger.info("Connect STA " + self.ifname + " to AP")
|
|
|
|
id = self.add_network()
|
2013-12-31 12:21:18 +01:00
|
|
|
if ssid:
|
|
|
|
self.set_network_quoted(id, "ssid", ssid)
|
|
|
|
elif ssid2:
|
|
|
|
self.set_network(id, "ssid", ssid2)
|
2014-02-15 18:52:56 +01:00
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
quoted = ["psk", "identity", "anonymous_identity", "password",
|
2019-08-20 12:15:19 +02:00
|
|
|
"machine_identity", "machine_password",
|
2019-03-15 11:10:37 +01:00
|
|
|
"ca_cert", "client_cert", "private_key",
|
|
|
|
"private_key_passwd", "ca_cert2", "client_cert2",
|
|
|
|
"private_key2", "phase1", "phase2", "domain_suffix_match",
|
|
|
|
"altsubject_match", "subject_match", "pac_file", "dh_file",
|
|
|
|
"bgscan", "ht_mcs", "id_str", "openssl_ciphers",
|
|
|
|
"domain_match", "dpp_connector", "sae_password",
|
2019-09-01 16:08:45 +02:00
|
|
|
"sae_password_id", "check_cert_subject",
|
|
|
|
"machine_ca_cert", "machine_client_cert",
|
|
|
|
"machine_private_key", "machine_phase2"]
|
2014-02-15 18:52:56 +01:00
|
|
|
for field in quoted:
|
|
|
|
if field in kwargs and kwargs[field]:
|
|
|
|
self.set_network_quoted(id, field, kwargs[field])
|
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
not_quoted = ["proto", "key_mgmt", "ieee80211w", "pairwise",
|
|
|
|
"group", "wep_key0", "wep_key1", "wep_key2", "wep_key3",
|
|
|
|
"wep_tx_keyidx", "scan_freq", "freq_list", "eap",
|
|
|
|
"eapol_flags", "fragment_size", "scan_ssid", "auth_alg",
|
|
|
|
"wpa_ptk_rekey", "disable_ht", "disable_vht", "bssid",
|
|
|
|
"disable_max_amsdu", "ampdu_factor", "ampdu_density",
|
|
|
|
"disable_ht40", "disable_sgi", "disable_ldpc",
|
|
|
|
"ht40_intolerant", "update_identifier", "mac_addr",
|
|
|
|
"erp", "bg_scan_period", "bssid_blacklist",
|
|
|
|
"bssid_whitelist", "mem_only_psk", "eap_workaround",
|
|
|
|
"engine", "fils_dh_group", "bssid_hint",
|
|
|
|
"dpp_csign", "dpp_csign_expiry",
|
|
|
|
"dpp_netaccesskey", "dpp_netaccesskey_expiry",
|
2020-01-10 19:37:41 +01:00
|
|
|
"group_mgmt", "owe_group", "owe_only",
|
2020-01-23 22:04:47 +01:00
|
|
|
"owe_ptk_workaround",
|
2019-03-15 11:10:37 +01:00
|
|
|
"roaming_consortium_selection", "ocv",
|
2019-04-28 15:03:03 +02:00
|
|
|
"multi_ap_backhaul_sta", "rx_stbc", "tx_stbc",
|
2020-01-10 23:19:10 +01:00
|
|
|
"ft_eap_pmksa_caching", "beacon_prot",
|
|
|
|
"wpa_deny_ptk0_rekey"]
|
2014-02-15 18:52:56 +01:00
|
|
|
for field in not_quoted:
|
|
|
|
if field in kwargs and kwargs[field]:
|
|
|
|
self.set_network(id, field, kwargs[field])
|
|
|
|
|
|
|
|
if "raw_psk" in kwargs and kwargs['raw_psk']:
|
|
|
|
self.set_network(id, "psk", kwargs['raw_psk'])
|
|
|
|
if "password_hex" in kwargs and kwargs['password_hex']:
|
|
|
|
self.set_network(id, "password", kwargs['password_hex'])
|
|
|
|
if "peerkey" in kwargs and kwargs['peerkey']:
|
2013-12-28 10:43:42 +01:00
|
|
|
self.set_network(id, "peerkey", "1")
|
2014-02-15 18:52:56 +01:00
|
|
|
if "okc" in kwargs and kwargs['okc']:
|
2013-12-29 18:25:42 +01:00
|
|
|
self.set_network(id, "proactive_key_caching", "1")
|
2014-02-15 18:52:56 +01:00
|
|
|
if "ocsp" in kwargs and kwargs['ocsp']:
|
|
|
|
self.set_network(id, "ocsp", str(kwargs['ocsp']))
|
|
|
|
if "only_add_network" in kwargs and kwargs['only_add_network']:
|
2013-12-29 11:24:25 +01:00
|
|
|
return id
|
2014-02-15 18:52:56 +01:00
|
|
|
if "wait_connect" not in kwargs or kwargs['wait_connect']:
|
|
|
|
if "eap" in kwargs:
|
2014-01-13 19:47:01 +01:00
|
|
|
self.connect_network(id, timeout=20)
|
|
|
|
else:
|
|
|
|
self.connect_network(id)
|
2013-09-29 19:35:26 +02:00
|
|
|
else:
|
|
|
|
self.dump_monitor()
|
|
|
|
self.select_network(id)
|
2013-10-29 16:14:02 +01:00
|
|
|
return id
|
2013-03-30 10:28:39 +01:00
|
|
|
|
2016-12-29 11:42:49 +01:00
|
|
|
def scan(self, type=None, freq=None, no_wait=False, only_new=False,
|
|
|
|
passive=False):
|
2019-03-15 12:40:04 +01:00
|
|
|
if not no_wait:
|
|
|
|
self.dump_monitor()
|
2013-03-30 10:28:39 +01:00
|
|
|
if type:
|
|
|
|
cmd = "SCAN TYPE=" + type
|
|
|
|
else:
|
|
|
|
cmd = "SCAN"
|
2013-12-25 19:17:32 +01:00
|
|
|
if freq:
|
2014-12-01 11:14:11 +01:00
|
|
|
cmd = cmd + " freq=" + str(freq)
|
2014-04-12 16:16:07 +02:00
|
|
|
if only_new:
|
|
|
|
cmd += " only_new=1"
|
2016-12-29 11:42:49 +01:00
|
|
|
if passive:
|
|
|
|
cmd += " passive=1"
|
2014-01-03 13:34:59 +01:00
|
|
|
if not no_wait:
|
|
|
|
self.dump_monitor()
|
2019-03-13 20:05:15 +01:00
|
|
|
res = self.request(cmd)
|
2019-03-15 10:34:32 +01:00
|
|
|
if "OK" not in res:
|
2019-03-13 20:05:15 +01:00
|
|
|
raise Exception("Failed to trigger scan: " + str(res))
|
2014-01-03 13:34:59 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2016-11-25 21:48:26 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-SCAN-RESULTS",
|
|
|
|
"CTRL-EVENT-SCAN-FAILED"], 15)
|
2013-03-30 10:28:39 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Scan timed out")
|
2016-11-25 21:48:26 +01:00
|
|
|
if "CTRL-EVENT-SCAN-FAILED" in ev:
|
|
|
|
raise Exception("Scan failed: " + ev)
|
2013-03-30 10:28:39 +01:00
|
|
|
|
2016-12-29 11:42:49 +01:00
|
|
|
def scan_for_bss(self, bssid, freq=None, force_scan=False, only_new=False,
|
|
|
|
passive=False):
|
2014-05-14 12:35:32 +02:00
|
|
|
if not force_scan and self.get_bss(bssid) is not None:
|
|
|
|
return
|
|
|
|
for i in range(0, 10):
|
2016-12-29 11:42:49 +01:00
|
|
|
self.scan(freq=freq, type="ONLY", only_new=only_new,
|
|
|
|
passive=passive)
|
2014-05-14 12:35:32 +02:00
|
|
|
if self.get_bss(bssid) is not None:
|
|
|
|
return
|
|
|
|
raise Exception("Could not find BSS " + bssid + " in scan")
|
|
|
|
|
2014-12-15 00:13:03 +01:00
|
|
|
def flush_scan_cache(self, freq=2417):
|
2014-12-01 11:14:11 +01:00
|
|
|
self.request("BSS_FLUSH 0")
|
2014-12-15 00:13:03 +01:00
|
|
|
self.scan(freq=freq, only_new=True)
|
2015-06-12 20:28:14 +02:00
|
|
|
res = self.request("SCAN_RESULTS")
|
|
|
|
if len(res.splitlines()) > 1:
|
2019-03-13 20:05:15 +01:00
|
|
|
logger.debug("Scan results remaining after first attempt to flush the results:\n" + res)
|
2015-06-12 20:28:14 +02:00
|
|
|
self.request("BSS_FLUSH 0")
|
|
|
|
self.scan(freq=2422, only_new=True)
|
|
|
|
res = self.request("SCAN_RESULTS")
|
|
|
|
if len(res.splitlines()) > 1:
|
|
|
|
logger.info("flush_scan_cache: Could not clear all BSS entries. These remain:\n" + res)
|
2014-12-01 11:14:11 +01:00
|
|
|
|
2019-03-17 16:55:43 +01:00
|
|
|
def disconnect_and_stop_scan(self):
|
|
|
|
self.request("DISCONNECT")
|
|
|
|
res = self.request("ABORT_SCAN")
|
|
|
|
for i in range(2 if "OK" in res else 1):
|
|
|
|
self.wait_event(["CTRL-EVENT-DISCONNECTED",
|
|
|
|
"CTRL-EVENT-SCAN-RESULTS"], timeout=0.5)
|
|
|
|
self.dump_monitor()
|
|
|
|
|
2020-02-28 15:45:36 +01:00
|
|
|
def roam(self, bssid, fail_test=False, assoc_reject_ok=False,
|
|
|
|
check_bssid=True):
|
2013-03-30 10:28:39 +01:00
|
|
|
self.dump_monitor()
|
2014-05-14 16:02:32 +02:00
|
|
|
if "OK" not in self.request("ROAM " + bssid):
|
|
|
|
raise Exception("ROAM failed")
|
2014-03-23 18:23:16 +01:00
|
|
|
if fail_test:
|
2019-01-04 22:19:30 +01:00
|
|
|
if assoc_reject_ok:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED",
|
|
|
|
"CTRL-EVENT-ASSOC-REJECT"], timeout=1)
|
|
|
|
else:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
|
|
|
|
if ev is not None and "CTRL-EVENT-ASSOC-REJECT" not in ev:
|
2014-03-23 18:23:16 +01:00
|
|
|
raise Exception("Unexpected connection")
|
|
|
|
self.dump_monitor()
|
|
|
|
return
|
2020-02-28 15:45:36 +01:00
|
|
|
if assoc_reject_ok:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=10)
|
|
|
|
else:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED",
|
|
|
|
"CTRL-EVENT-ASSOC-REJECT"], timeout=10)
|
2019-01-04 19:27:40 +01:00
|
|
|
if ev is None:
|
|
|
|
raise Exception("Roaming with the AP timed out")
|
|
|
|
if "CTRL-EVENT-ASSOC-REJECT" in ev:
|
|
|
|
raise Exception("Roaming association rejected")
|
2013-03-30 10:28:39 +01:00
|
|
|
self.dump_monitor()
|
2020-02-28 15:45:36 +01:00
|
|
|
if check_bssid and self.get_status_field('bssid') != bssid:
|
|
|
|
raise Exception("Did not roam to correct BSSID")
|
2013-04-28 21:00:58 +02:00
|
|
|
|
2014-03-23 18:23:16 +01:00
|
|
|
def roam_over_ds(self, bssid, fail_test=False):
|
2013-12-28 10:52:19 +01:00
|
|
|
self.dump_monitor()
|
2014-05-14 16:02:32 +02:00
|
|
|
if "OK" not in self.request("FT_DS " + bssid):
|
|
|
|
raise Exception("FT_DS failed")
|
2014-03-23 18:23:16 +01:00
|
|
|
if fail_test:
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
|
|
|
|
if ev is not None:
|
|
|
|
raise Exception("Unexpected connection")
|
|
|
|
self.dump_monitor()
|
|
|
|
return
|
2019-01-04 19:27:40 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED",
|
|
|
|
"CTRL-EVENT-ASSOC-REJECT"], timeout=10)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("Roaming with the AP timed out")
|
|
|
|
if "CTRL-EVENT-ASSOC-REJECT" in ev:
|
|
|
|
raise Exception("Roaming association rejected")
|
2013-12-28 10:52:19 +01:00
|
|
|
self.dump_monitor()
|
|
|
|
|
2013-04-28 21:00:58 +02:00
|
|
|
def wps_reg(self, bssid, pin, new_ssid=None, key_mgmt=None, cipher=None,
|
2013-12-28 16:47:32 +01:00
|
|
|
new_passphrase=None, no_wait=False):
|
2013-04-28 21:00:58 +02:00
|
|
|
self.dump_monitor()
|
|
|
|
if new_ssid:
|
|
|
|
self.request("WPS_REG " + bssid + " " + pin + " " +
|
2019-02-02 17:19:35 +01:00
|
|
|
binascii.hexlify(new_ssid.encode()).decode() + " " +
|
|
|
|
key_mgmt + " " + cipher + " " +
|
|
|
|
binascii.hexlify(new_passphrase.encode()).decode())
|
2013-12-28 16:47:32 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2013-04-28 21:00:58 +02:00
|
|
|
ev = self.wait_event(["WPS-SUCCESS"], timeout=15)
|
|
|
|
else:
|
|
|
|
self.request("WPS_REG " + bssid + " " + pin)
|
2013-12-28 16:47:32 +01:00
|
|
|
if no_wait:
|
|
|
|
return
|
2013-04-28 21:00:58 +02:00
|
|
|
ev = self.wait_event(["WPS-CRED-RECEIVED"], timeout=15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("WPS cred timed out")
|
|
|
|
ev = self.wait_event(["WPS-FAIL"], timeout=15)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception("WPS timed out")
|
2014-12-20 10:51:30 +01:00
|
|
|
self.wait_connected(timeout=15)
|
2013-11-02 12:04:06 +01:00
|
|
|
|
|
|
|
def relog(self):
|
2014-10-20 10:22:16 +02:00
|
|
|
self.global_request("RELOG")
|
2013-11-24 19:50:11 +01:00
|
|
|
|
|
|
|
def wait_completed(self, timeout=10):
|
|
|
|
for i in range(0, timeout * 2):
|
|
|
|
if self.get_status_field("wpa_state") == "COMPLETED":
|
|
|
|
return
|
|
|
|
time.sleep(0.5)
|
|
|
|
raise Exception("Timeout while waiting for COMPLETED state")
|
2013-12-25 10:17:32 +01:00
|
|
|
|
|
|
|
def get_capability(self, field):
|
|
|
|
res = self.request("GET_CAPABILITY " + field)
|
|
|
|
if "FAIL" in res:
|
|
|
|
return None
|
|
|
|
return res.split(' ')
|
2013-12-25 19:49:02 +01:00
|
|
|
|
2015-02-04 10:30:24 +01:00
|
|
|
def get_bss(self, bssid, ifname=None):
|
2016-04-24 23:19:40 +02:00
|
|
|
if not ifname or ifname == self.ifname:
|
2015-02-04 10:30:24 +01:00
|
|
|
res = self.request("BSS " + bssid)
|
|
|
|
elif ifname == self.group_ifname:
|
|
|
|
res = self.group_request("BSS " + bssid)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-05-13 23:12:40 +02:00
|
|
|
if "FAIL" in res:
|
|
|
|
return None
|
2013-12-25 19:49:02 +01:00
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2013-12-25 19:49:02 +01:00
|
|
|
vals[name] = value
|
2014-05-13 23:12:40 +02:00
|
|
|
if len(vals) == 0:
|
|
|
|
return None
|
2013-12-25 19:49:02 +01:00
|
|
|
return vals
|
2013-12-29 18:25:42 +01:00
|
|
|
|
|
|
|
def get_pmksa(self, bssid):
|
|
|
|
res = self.request("PMKSA")
|
|
|
|
lines = res.splitlines()
|
|
|
|
for l in lines:
|
|
|
|
if bssid not in l:
|
|
|
|
continue
|
|
|
|
vals = dict()
|
2017-02-21 11:21:15 +01:00
|
|
|
try:
|
2019-03-15 11:10:37 +01:00
|
|
|
[index, aa, pmkid, expiration, opportunistic] = l.split(' ')
|
2017-02-21 11:21:15 +01:00
|
|
|
cache_id = None
|
|
|
|
except ValueError:
|
2019-03-15 11:10:37 +01:00
|
|
|
[index, aa, pmkid, expiration, opportunistic, cache_id] = l.split(' ')
|
2013-12-29 18:25:42 +01:00
|
|
|
vals['index'] = index
|
|
|
|
vals['pmkid'] = pmkid
|
|
|
|
vals['expiration'] = expiration
|
|
|
|
vals['opportunistic'] = opportunistic
|
2017-02-21 11:21:15 +01:00
|
|
|
if cache_id != None:
|
|
|
|
vals['cache_id'] = cache_id
|
2013-12-29 18:25:42 +01:00
|
|
|
return vals
|
|
|
|
return None
|
2014-02-03 20:52:33 +01:00
|
|
|
|
2020-02-10 01:35:53 +01:00
|
|
|
def get_pmk(self, network_id):
|
|
|
|
bssid = self.get_status_field('bssid')
|
|
|
|
res = self.request("PMKSA_GET %d" % network_id)
|
|
|
|
for val in res.splitlines():
|
|
|
|
if val.startswith(bssid):
|
|
|
|
return val.split(' ')[2]
|
|
|
|
return None
|
|
|
|
|
2014-02-03 20:52:33 +01:00
|
|
|
def get_sta(self, addr, info=None, next=False):
|
|
|
|
cmd = "STA-NEXT " if next else "STA "
|
|
|
|
if addr is None:
|
|
|
|
res = self.request("STA-FIRST")
|
|
|
|
elif info:
|
|
|
|
res = self.request(cmd + addr + " " + info)
|
|
|
|
else:
|
|
|
|
res = self.request(cmd + addr)
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
first = True
|
|
|
|
for l in lines:
|
|
|
|
if first:
|
|
|
|
vals['addr'] = l
|
|
|
|
first = False
|
|
|
|
else:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2014-02-03 20:52:33 +01:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
2014-03-08 20:25:47 +01:00
|
|
|
|
|
|
|
def mgmt_rx(self, timeout=5):
|
|
|
|
ev = self.wait_event(["MGMT-RX"], timeout=timeout)
|
|
|
|
if ev is None:
|
|
|
|
return None
|
|
|
|
msg = {}
|
|
|
|
items = ev.split(' ')
|
2019-03-15 11:10:37 +01:00
|
|
|
field, val = items[1].split('=')
|
2014-03-08 20:25:47 +01:00
|
|
|
if field != "freq":
|
|
|
|
raise Exception("Unexpected MGMT-RX event format: " + ev)
|
|
|
|
msg['freq'] = val
|
2016-05-13 19:50:11 +02:00
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
field, val = items[2].split('=')
|
2016-05-13 19:50:11 +02:00
|
|
|
if field != "datarate":
|
|
|
|
raise Exception("Unexpected MGMT-RX event format: " + ev)
|
|
|
|
msg['datarate'] = val
|
|
|
|
|
2019-03-15 11:10:37 +01:00
|
|
|
field, val = items[3].split('=')
|
2016-05-13 19:50:11 +02:00
|
|
|
if field != "ssi_signal":
|
|
|
|
raise Exception("Unexpected MGMT-RX event format: " + ev)
|
|
|
|
msg['ssi_signal'] = val
|
|
|
|
|
2014-03-08 20:25:47 +01:00
|
|
|
frame = binascii.unhexlify(items[4])
|
|
|
|
msg['frame'] = frame
|
|
|
|
|
|
|
|
hdr = struct.unpack('<HH6B6B6BH', frame[0:24])
|
|
|
|
msg['fc'] = hdr[0]
|
|
|
|
msg['subtype'] = (hdr[0] >> 4) & 0xf
|
|
|
|
hdr = hdr[1:]
|
|
|
|
msg['duration'] = hdr[0]
|
|
|
|
hdr = hdr[1:]
|
|
|
|
msg['da'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['sa'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['bssid'] = "%02x:%02x:%02x:%02x:%02x:%02x" % hdr[0:6]
|
|
|
|
hdr = hdr[6:]
|
|
|
|
msg['seq_ctrl'] = hdr[0]
|
|
|
|
msg['payload'] = frame[24:]
|
|
|
|
|
|
|
|
return msg
|
2014-12-20 10:51:30 +01:00
|
|
|
|
|
|
|
def wait_connected(self, timeout=10, error="Connection timed out"):
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-CONNECTED"], timeout=timeout)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception(error)
|
|
|
|
return ev
|
|
|
|
|
2016-07-13 19:06:03 +02:00
|
|
|
def wait_disconnected(self, timeout=None, error="Disconnection timed out"):
|
|
|
|
if timeout is None:
|
|
|
|
timeout = 10 if self.hostname is None else 30
|
2014-12-20 10:51:30 +01:00
|
|
|
ev = self.wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=timeout)
|
|
|
|
if ev is None:
|
|
|
|
raise Exception(error)
|
|
|
|
return ev
|
2015-02-06 02:31:00 +01:00
|
|
|
|
|
|
|
def get_group_ifname(self):
|
|
|
|
return self.group_ifname if self.group_ifname else self.ifname
|
2015-02-08 21:06:07 +01:00
|
|
|
|
|
|
|
def get_config(self):
|
|
|
|
res = self.request("DUMP")
|
|
|
|
if res.startswith("FAIL"):
|
|
|
|
raise Exception("DUMP failed")
|
|
|
|
lines = res.splitlines()
|
|
|
|
vals = dict()
|
|
|
|
for l in lines:
|
2019-03-15 11:10:37 +01:00
|
|
|
[name, value] = l.split('=', 1)
|
2015-02-08 21:06:07 +01:00
|
|
|
vals[name] = value
|
|
|
|
return vals
|
2015-07-13 08:49:10 +02:00
|
|
|
|
|
|
|
def asp_provision(self, peer, adv_id, adv_mac, session_id, session_mac,
|
2015-10-08 11:36:06 +02:00
|
|
|
method="1000", info="", status=None, cpt=None, role=None):
|
2015-07-13 08:49:10 +02:00
|
|
|
if status is None:
|
|
|
|
cmd = "P2P_ASP_PROVISION"
|
|
|
|
params = "info='%s' method=%s" % (info, method)
|
|
|
|
else:
|
|
|
|
cmd = "P2P_ASP_PROVISION_RESP"
|
|
|
|
params = "status=%d" % status
|
|
|
|
|
2016-04-24 23:19:40 +02:00
|
|
|
if role is not None:
|
|
|
|
params += " role=" + role
|
2015-07-02 15:21:24 +02:00
|
|
|
if cpt is not None:
|
|
|
|
params += " cpt=" + cpt
|
|
|
|
|
2015-07-13 08:49:10 +02:00
|
|
|
if "OK" not in self.global_request("%s %s adv_id=%s adv_mac=%s session=%d session_mac=%s %s" %
|
|
|
|
(cmd, peer, adv_id, adv_mac, session_id, session_mac, params)):
|
|
|
|
raise Exception("%s request failed" % cmd)
|
2018-08-02 14:36:24 +02:00
|
|
|
|
|
|
|
def note(self, txt):
|
|
|
|
self.request("NOTE " + txt)
|
2018-12-21 23:55:26 +01:00
|
|
|
|
2020-03-02 10:51:38 +01:00
|
|
|
def save_config(self):
|
|
|
|
if "OK" not in self.request("SAVE_CONFIG"):
|
|
|
|
raise Exception("Failed to save configuration file")
|
|
|
|
|
2018-12-21 23:55:26 +01:00
|
|
|
def wait_regdom(self, country_ie=False):
|
|
|
|
for i in range(5):
|
|
|
|
ev = self.wait_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=1)
|
|
|
|
if ev is None:
|
|
|
|
break
|
|
|
|
if country_ie:
|
|
|
|
if "init=COUNTRY_IE" in ev:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
break
|
2019-03-09 15:50:48 +01:00
|
|
|
|
|
|
|
def dpp_qr_code(self, uri):
|
|
|
|
res = self.request("DPP_QR_CODE " + uri)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to parse QR Code URI")
|
|
|
|
return int(res)
|
2019-03-09 17:13:10 +01:00
|
|
|
|
2019-12-03 17:27:52 +01:00
|
|
|
def dpp_nfc_uri(self, uri):
|
|
|
|
res = self.request("DPP_NFC_URI " + uri)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to parse NFC URI")
|
|
|
|
return int(res)
|
|
|
|
|
2019-03-09 17:13:10 +01:00
|
|
|
def dpp_bootstrap_gen(self, type="qrcode", chan=None, mac=None, info=None,
|
|
|
|
curve=None, key=None):
|
|
|
|
cmd = "DPP_BOOTSTRAP_GEN type=" + type
|
|
|
|
if chan:
|
|
|
|
cmd += " chan=" + chan
|
|
|
|
if mac:
|
|
|
|
if mac is True:
|
|
|
|
mac = self.own_addr()
|
|
|
|
cmd += " mac=" + mac.replace(':', '')
|
|
|
|
if info:
|
|
|
|
cmd += " info=" + info
|
|
|
|
if curve:
|
|
|
|
cmd += " curve=" + curve
|
|
|
|
if key:
|
|
|
|
cmd += " key=" + key
|
|
|
|
res = self.request(cmd)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to generate bootstrapping info")
|
|
|
|
return int(res)
|
2019-03-17 17:19:58 +01:00
|
|
|
|
|
|
|
def dpp_listen(self, freq, netrole=None, qr=None, role=None):
|
|
|
|
cmd = "DPP_LISTEN " + str(freq)
|
|
|
|
if netrole:
|
|
|
|
cmd += " netrole=" + netrole
|
|
|
|
if qr:
|
|
|
|
cmd += " qr=" + qr
|
|
|
|
if role:
|
|
|
|
cmd += " role=" + role
|
|
|
|
if "OK" not in self.request(cmd):
|
|
|
|
raise Exception("Failed to start listen operation")
|
2019-03-17 18:22:11 +01:00
|
|
|
|
2019-03-18 10:36:43 +01:00
|
|
|
def dpp_auth_init(self, peer=None, uri=None, conf=None, configurator=None,
|
|
|
|
extra=None, own=None, role=None, neg_freq=None,
|
2019-03-24 21:17:49 +01:00
|
|
|
ssid=None, passphrase=None, expect_fail=False,
|
2019-11-28 14:23:09 +01:00
|
|
|
tcp_addr=None, tcp_port=None, conn_status=False,
|
2020-02-11 05:35:15 +01:00
|
|
|
ssid_charset=None, nfc_uri=None, netrole=None):
|
2019-03-18 10:36:43 +01:00
|
|
|
cmd = "DPP_AUTH_INIT"
|
|
|
|
if peer is None:
|
2019-12-03 17:27:52 +01:00
|
|
|
if nfc_uri:
|
|
|
|
peer = self.dpp_nfc_uri(nfc_uri)
|
|
|
|
else:
|
|
|
|
peer = self.dpp_qr_code(uri)
|
2019-03-18 10:36:43 +01:00
|
|
|
cmd += " peer=%d" % peer
|
|
|
|
if own is not None:
|
|
|
|
cmd += " own=%d" % own
|
|
|
|
if role:
|
|
|
|
cmd += " role=" + role
|
|
|
|
if extra:
|
|
|
|
cmd += " " + extra
|
|
|
|
if conf:
|
|
|
|
cmd += " conf=" + conf
|
|
|
|
if configurator is not None:
|
|
|
|
cmd += " configurator=%d" % configurator
|
|
|
|
if neg_freq:
|
|
|
|
cmd += " neg_freq=%d" % neg_freq
|
|
|
|
if ssid:
|
|
|
|
cmd += " ssid=" + binascii.hexlify(ssid.encode()).decode()
|
2019-11-28 14:23:09 +01:00
|
|
|
if ssid_charset:
|
|
|
|
cmd += " ssid_charset=%d" % ssid_charset
|
2019-03-18 10:36:43 +01:00
|
|
|
if passphrase:
|
|
|
|
cmd += " pass=" + binascii.hexlify(passphrase.encode()).decode()
|
2019-03-24 21:17:49 +01:00
|
|
|
if tcp_addr:
|
|
|
|
cmd += " tcp_addr=" + tcp_addr
|
|
|
|
if tcp_port:
|
|
|
|
cmd += " tcp_port=" + tcp_port
|
2019-09-15 15:19:45 +02:00
|
|
|
if conn_status:
|
|
|
|
cmd += " conn_status=1"
|
2020-02-11 05:35:15 +01:00
|
|
|
if netrole:
|
|
|
|
cmd += " netrole=" + netrole
|
2019-03-18 10:36:43 +01:00
|
|
|
res = self.request(cmd)
|
|
|
|
if expect_fail:
|
|
|
|
if "FAIL" not in res:
|
|
|
|
raise Exception("DPP authentication started unexpectedly")
|
|
|
|
return
|
|
|
|
if "OK" not in res:
|
|
|
|
raise Exception("Failed to initiate DPP Authentication")
|
|
|
|
|
2019-03-17 18:22:11 +01:00
|
|
|
def dpp_pkex_init(self, identifier, code, role=None, key=None, curve=None,
|
|
|
|
extra=None, use_id=None, allow_fail=False):
|
|
|
|
if use_id is None:
|
|
|
|
id1 = self.dpp_bootstrap_gen(type="pkex", key=key, curve=curve)
|
|
|
|
else:
|
|
|
|
id1 = use_id
|
|
|
|
cmd = "own=%d " % id1
|
|
|
|
if identifier:
|
|
|
|
cmd += "identifier=%s " % identifier
|
|
|
|
cmd += "init=1 "
|
|
|
|
if role:
|
|
|
|
cmd += "role=%s " % role
|
|
|
|
if extra:
|
|
|
|
cmd += extra + " "
|
|
|
|
cmd += "code=%s" % code
|
|
|
|
res = self.request("DPP_PKEX_ADD " + cmd)
|
|
|
|
if allow_fail:
|
|
|
|
return id1
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to set PKEX data (initiator)")
|
|
|
|
return id1
|
|
|
|
|
|
|
|
def dpp_pkex_resp(self, freq, identifier, code, key=None, curve=None,
|
|
|
|
listen_role=None, use_id=None):
|
|
|
|
if use_id is None:
|
|
|
|
id0 = self.dpp_bootstrap_gen(type="pkex", key=key, curve=curve)
|
|
|
|
else:
|
|
|
|
id0 = use_id
|
|
|
|
cmd = "own=%d " % id0
|
|
|
|
if identifier:
|
|
|
|
cmd += "identifier=%s " % identifier
|
|
|
|
cmd += "code=%s" % code
|
|
|
|
res = self.request("DPP_PKEX_ADD " + cmd)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to set PKEX data (responder)")
|
|
|
|
self.dpp_listen(freq, role=listen_role)
|
|
|
|
return id0
|
2019-03-18 12:36:32 +01:00
|
|
|
|
|
|
|
def dpp_configurator_add(self, curve=None, key=None):
|
|
|
|
cmd = "DPP_CONFIGURATOR_ADD"
|
|
|
|
if curve:
|
|
|
|
cmd += " curve=" + curve
|
|
|
|
if key:
|
|
|
|
cmd += " key=" + key
|
|
|
|
res = self.request(cmd)
|
|
|
|
if "FAIL" in res:
|
|
|
|
raise Exception("Failed to add configurator")
|
|
|
|
return int(res)
|
|
|
|
|
|
|
|
def dpp_configurator_remove(self, conf_id):
|
|
|
|
res = self.request("DPP_CONFIGURATOR_REMOVE %d" % conf_id)
|
|
|
|
if "OK" not in res:
|
|
|
|
raise Exception("DPP_CONFIGURATOR_REMOVE failed")
|