mirror of
https://gitee.com/peng_zhihui/Dummy-Robot
synced 2025-09-27 02:09:12 +08:00
[Sw] Add CLI-Tool.
This commit is contained in:
parent
fe1007c668
commit
be2804ac0e
30
3.Software/CLI-Tool/.gitignore
vendored
Normal file
30
3.Software/CLI-Tool/.gitignore
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
|
||||
# Python Distribution / packaging
|
||||
.Python
|
||||
#env/
|
||||
#build/
|
||||
#develop-eggs/
|
||||
/dist/
|
||||
#downloads/
|
||||
#eggs/
|
||||
#.eggs/
|
||||
#lib/
|
||||
#lib64/
|
||||
#parts/
|
||||
#sdist/
|
||||
#var/
|
||||
/*.egg-info/
|
||||
#.installed.cfg
|
||||
#*.egg
|
||||
/MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
*.pyc
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
8
3.Software/CLI-Tool/.idea/.gitignore
generated
vendored
Normal file
8
3.Software/CLI-Tool/.idea/.gitignore
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Datasource local storage ignored files
|
||||
/../../../../../../../:\onWorking\_Private\REF-STM32F4\3.Software\cmd_tool\.idea/dataSources/
|
||||
/dataSources.local.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
17
3.Software/CLI-Tool/.idea/cmd_tool.iml
generated
Normal file
17
3.Software/CLI-Tool/.idea/cmd_tool.iml
generated
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="workspace" level="project" />
|
||||
<orderEntry type="library" name="ROS" level="project" />
|
||||
</component>
|
||||
<component name="PyDocumentationSettings">
|
||||
<option name="format" value="PLAIN" />
|
||||
<option name="myDocStringFormat" value="Plain" />
|
||||
</component>
|
||||
<component name="TestRunnerService">
|
||||
<option name="PROJECT_TEST_RUNNER" value="pytest" />
|
||||
</component>
|
||||
</module>
|
14
3.Software/CLI-Tool/.idea/deployment.xml
generated
Normal file
14
3.Software/CLI-Tool/.idea/deployment.xml
generated
Normal file
@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="PublishConfigData" remoteFilesAllowedToDisappearOnAutoupload="false">
|
||||
<serverData>
|
||||
<paths name="root@192.168.31.101:8022">
|
||||
<serverdata>
|
||||
<mappings>
|
||||
<mapping local="$PROJECT_DIR$" web="/" />
|
||||
</mappings>
|
||||
</serverdata>
|
||||
</paths>
|
||||
</serverData>
|
||||
</component>
|
||||
</project>
|
4
3.Software/CLI-Tool/.idea/encodings.xml
generated
Normal file
4
3.Software/CLI-Tool/.idea/encodings.xml
generated
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
|
||||
</project>
|
21
3.Software/CLI-Tool/.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
21
3.Software/CLI-Tool/.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@ -0,0 +1,21 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredPackages">
|
||||
<value>
|
||||
<list size="8">
|
||||
<item index="0" class="java.lang.String" itemvalue="tqdm" />
|
||||
<item index="1" class="java.lang.String" itemvalue="scipy" />
|
||||
<item index="2" class="java.lang.String" itemvalue="tensorboard" />
|
||||
<item index="3" class="java.lang.String" itemvalue="seaborn" />
|
||||
<item index="4" class="java.lang.String" itemvalue="opencv-python" />
|
||||
<item index="5" class="java.lang.String" itemvalue="PyYAML" />
|
||||
<item index="6" class="java.lang.String" itemvalue="matplotlib" />
|
||||
<item index="7" class="java.lang.String" itemvalue="pycocotools" />
|
||||
</list>
|
||||
</value>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</component>
|
6
3.Software/CLI-Tool/.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
3.Software/CLI-Tool/.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
7
3.Software/CLI-Tool/.idea/libraries/ROS.xml
generated
Normal file
7
3.Software/CLI-Tool/.idea/libraries/ROS.xml
generated
Normal file
@ -0,0 +1,7 @@
|
||||
<component name="libraryTable">
|
||||
<library name="ROS">
|
||||
<CLASSES />
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</component>
|
7
3.Software/CLI-Tool/.idea/libraries/workspace.xml
generated
Normal file
7
3.Software/CLI-Tool/.idea/libraries/workspace.xml
generated
Normal file
@ -0,0 +1,7 @@
|
||||
<component name="libraryTable">
|
||||
<library name="workspace">
|
||||
<CLASSES />
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
</library>
|
||||
</component>
|
4
3.Software/CLI-Tool/.idea/misc.xml
generated
Normal file
4
3.Software/CLI-Tool/.idea/misc.xml
generated
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
|
||||
</project>
|
8
3.Software/CLI-Tool/.idea/modules.xml
generated
Normal file
8
3.Software/CLI-Tool/.idea/modules.xml
generated
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/cmd_tool.iml" filepath="$PROJECT_DIR$/.idea/cmd_tool.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
12
3.Software/CLI-Tool/.idea/ros.xml
generated
Normal file
12
3.Software/CLI-Tool/.idea/ros.xml
generated
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ROSSettings">
|
||||
<option name="rosPath" value="" />
|
||||
<option name="workspacePath" value="" />
|
||||
<option name="additionalSources" value="" />
|
||||
<option name="excludedXmls" value="" />
|
||||
<option name="licenseLinkType" value="Summary" />
|
||||
<option name="knownKeys" value="" />
|
||||
<option name="depSources" value="https://raw.githubusercontent.com/ros/rosdistro/master/rosdep/base.yaml"https://raw.githubusercontent.com/ros/rosdistro/master/rosdep/python.yaml"https://raw.githubusercontent.com/ros/rosdistro/master/rosdep/ruby.yaml" />
|
||||
</component>
|
||||
</project>
|
6
3.Software/CLI-Tool/.idea/vcs.xml
generated
Normal file
6
3.Software/CLI-Tool/.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
|
||||
</component>
|
||||
</project>
|
11
3.Software/CLI-Tool/_addition/ref_demo.py
Normal file
11
3.Software/CLI-Tool/_addition/ref_demo.py
Normal file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
import ref_tool
|
||||
|
||||
# Find a connected REF-Unit (this will block until you connect one)
|
||||
print("finding an ref_tool...")
|
||||
my_drive = ref_tool.find_any()
|
||||
|
||||
# Find an REF-Unit that is connected on the serial port /dev/ttyUSB0
|
||||
# my_drive = ref_tool.find_any("serial:/dev/ttyUSB0")
|
5
3.Software/CLI-Tool/fibre/__init__.py
Normal file
5
3.Software/CLI-Tool/fibre/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
|
||||
from .discovery import find_any, find_all
|
||||
from .utils import Event, Logger, TimeoutError
|
||||
from .protocol import ChannelBrokenException, ChannelDamagedException
|
||||
from .shell import launch_shell
|
129
3.Software/CLI-Tool/fibre/discovery.py
Normal file
129
3.Software/CLI-Tool/fibre/discovery.py
Normal file
@ -0,0 +1,129 @@
|
||||
"""
|
||||
Provides functions for the discovery of Fibre nodes
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import threading
|
||||
import traceback
|
||||
import fibre.protocol
|
||||
import fibre.utils
|
||||
import fibre.remote_object
|
||||
from fibre.utils import Event, Logger
|
||||
from fibre.protocol import ChannelBrokenException, TimeoutError
|
||||
|
||||
# Load all installed transport layers
|
||||
|
||||
channel_types = {}
|
||||
|
||||
try:
|
||||
import fibre.usbbulk_transport
|
||||
channel_types['usb'] = fibre.usbbulk_transport.discover_channels
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import fibre.serial_transport
|
||||
channel_types['serial'] = fibre.serial_transport.discover_channels
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import fibre.tcp_transport
|
||||
channel_types['tcp'] = fibre.tcp_transport.discover_channels
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import fibre.udp_transport
|
||||
channel_types['udp'] = fibre.udp_transport.discover_channels
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
def noprint(text):
|
||||
pass
|
||||
|
||||
def find_all(path, serial_number,
|
||||
did_discover_object_callback,
|
||||
search_cancellation_token,
|
||||
channel_termination_token,
|
||||
logger):
|
||||
"""
|
||||
Starts scanning for Fibre nodes that match the specified path spec and calls
|
||||
the callback for each Fibre node that is found.
|
||||
This function is non-blocking.
|
||||
"""
|
||||
|
||||
def did_discover_channel(channel):
|
||||
"""
|
||||
Inits an object from a given channel and then calls did_discover_object_callback
|
||||
with the created object
|
||||
This queries the endpoint 0 on that channel to gain information
|
||||
about the interface, which is then used to init the corresponding object.
|
||||
"""
|
||||
try:
|
||||
logger.debug("Connecting to device on " + channel._name)
|
||||
try:
|
||||
json_bytes = channel.remote_endpoint_read_buffer(0)
|
||||
except (TimeoutError, ChannelBrokenException):
|
||||
logger.debug("no response - probably incompatible")
|
||||
return
|
||||
json_crc16 = fibre.protocol.calc_crc16(fibre.protocol.PROTOCOL_VERSION, json_bytes)
|
||||
channel._interface_definition_crc = json_crc16
|
||||
try:
|
||||
json_string = json_bytes.decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
logger.debug("device responded on endpoint 0 with something that is not ASCII")
|
||||
return
|
||||
logger.debug("JSON: " + json_string.replace('{"name"', '\n{"name"'))
|
||||
logger.debug("JSON checksum: 0x{:02X} 0x{:02X}".format(json_crc16 & 0xff, (json_crc16 >> 8) & 0xff))
|
||||
try:
|
||||
json_data = json.loads(json_string)
|
||||
except json.decoder.JSONDecodeError as error:
|
||||
logger.debug("device responded on endpoint 0 with something that is not JSON: " + str(error))
|
||||
return
|
||||
json_data = {"name": "fibre_node", "members": json_data}
|
||||
obj = fibre.remote_object.RemoteObject(json_data, None, channel, logger)
|
||||
|
||||
obj.__dict__['_json_data'] = json_data['members']
|
||||
obj.__dict__['_json_crc'] = json_crc16
|
||||
|
||||
device_serial_number = fibre.utils.get_serial_number_str(obj)
|
||||
if serial_number != None and device_serial_number != serial_number:
|
||||
logger.debug("Ignoring device with serial number {}".format(device_serial_number))
|
||||
return
|
||||
did_discover_object_callback(obj)
|
||||
except Exception:
|
||||
logger.debug("Unexpected exception after discovering channel: " + traceback.format_exc())
|
||||
|
||||
# For each connection type, kick off an appropriate discovery loop
|
||||
for search_spec in path.split(','):
|
||||
prefix = search_spec.split(':')[0]
|
||||
the_rest = ':'.join(search_spec.split(':')[1:])
|
||||
if prefix in channel_types:
|
||||
t = threading.Thread(target=channel_types[prefix],
|
||||
args=(the_rest, serial_number, did_discover_channel, search_cancellation_token, channel_termination_token, logger))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
else:
|
||||
raise Exception("Invalid path spec \"{}\"".format(search_spec))
|
||||
|
||||
|
||||
def find_any(path="usb", serial_number=None,
|
||||
search_cancellation_token=None, channel_termination_token=None,
|
||||
timeout=None, logger=Logger(verbose=False)):
|
||||
"""
|
||||
Blocks until the first matching Fibre node is connected and then returns that node
|
||||
"""
|
||||
result = [ None ]
|
||||
done_signal = Event(search_cancellation_token)
|
||||
def did_discover_object(obj):
|
||||
result[0] = obj
|
||||
done_signal.set()
|
||||
find_all(path, serial_number, did_discover_object, done_signal, channel_termination_token, logger)
|
||||
try:
|
||||
done_signal.wait(timeout=timeout)
|
||||
finally:
|
||||
done_signal.set() # terminate find_all
|
||||
return result[0]
|
365
3.Software/CLI-Tool/fibre/protocol.py
Normal file
365
3.Software/CLI-Tool/fibre/protocol.py
Normal file
@ -0,0 +1,365 @@
|
||||
# See protocol.hpp for an overview of the protocol
|
||||
|
||||
import time
|
||||
import struct
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
#import fibre.utils
|
||||
from fibre.utils import Event, wait_any, TimeoutError
|
||||
|
||||
import abc
|
||||
if sys.version_info >= (3, 4):
|
||||
ABC = abc.ABC
|
||||
else:
|
||||
ABC = abc.ABCMeta('ABC', (), {})
|
||||
|
||||
if sys.version_info < (3, 3):
|
||||
from monotonic import monotonic
|
||||
time.monotonic = monotonic
|
||||
|
||||
SYNC_BYTE = 0xAA
|
||||
CRC8_INIT = 0x42
|
||||
CRC16_INIT = 0x1337
|
||||
PROTOCOL_VERSION = 1
|
||||
|
||||
CRC8_DEFAULT = 0x37 # this must match the polynomial in the C++ implementation
|
||||
CRC16_DEFAULT = 0x3d65 # this must match the polynomial in the C++ implementation
|
||||
|
||||
MAX_PACKET_SIZE = 128
|
||||
|
||||
def calc_crc(remainder, value, polynomial, bitwidth):
|
||||
topbit = (1 << (bitwidth - 1))
|
||||
|
||||
# Bring the next byte into the remainder.
|
||||
remainder ^= (value << (bitwidth - 8))
|
||||
for bitnumber in range(0,8):
|
||||
if (remainder & topbit):
|
||||
remainder = (remainder << 1) ^ polynomial
|
||||
else:
|
||||
remainder = (remainder << 1)
|
||||
|
||||
return remainder & ((1 << bitwidth) - 1)
|
||||
|
||||
def calc_crc8(remainder, value):
|
||||
if isinstance(value, bytearray) or isinstance(value, bytes) or isinstance(value, list):
|
||||
for byte in value:
|
||||
if not isinstance(byte,int):
|
||||
byte = ord(byte)
|
||||
remainder = calc_crc(remainder, byte, CRC8_DEFAULT, 8)
|
||||
else:
|
||||
remainder = calc_crc(remainder, byte, CRC8_DEFAULT, 8)
|
||||
return remainder
|
||||
|
||||
def calc_crc16(remainder, value):
|
||||
if isinstance(value, bytearray) or isinstance(value, bytes) or isinstance(value, list):
|
||||
for byte in value:
|
||||
if not isinstance(byte, int):
|
||||
byte = ord(byte)
|
||||
remainder = calc_crc(remainder, byte, CRC16_DEFAULT, 16)
|
||||
else:
|
||||
remainder = calc_crc(remainder, value, CRC16_DEFAULT, 16)
|
||||
return remainder
|
||||
|
||||
# Can be verified with http://www.sunshine2k.de/coding/javascript/crc/crc_js.html:
|
||||
#print(hex(calc_crc8(0x12, [1, 2, 3, 4, 5, 0x10, 0x13, 0x37])))
|
||||
#print(hex(calc_crc16(0xfeef, [1, 2, 3, 4, 5, 0x10, 0x13, 0x37])))
|
||||
|
||||
class DeviceInitException(Exception):
|
||||
pass
|
||||
|
||||
class ChannelDamagedException(Exception):
|
||||
"""
|
||||
Raised when the channel is temporarily broken and a
|
||||
resend of the message might be successful
|
||||
"""
|
||||
pass
|
||||
|
||||
class ChannelBrokenException(Exception):
|
||||
"""
|
||||
Raised when the channel is permanently broken
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class StreamSource(ABC):
|
||||
@abc.abstractmethod
|
||||
def get_bytes(self, n_bytes, deadline):
|
||||
pass
|
||||
|
||||
class StreamSink(ABC):
|
||||
@abc.abstractmethod
|
||||
def process_bytes(self, bytes):
|
||||
pass
|
||||
|
||||
class PacketSource(ABC):
|
||||
@abc.abstractmethod
|
||||
def get_packet(self, deadline):
|
||||
pass
|
||||
|
||||
class PacketSink(ABC):
|
||||
@abc.abstractmethod
|
||||
def process_packet(self, packet):
|
||||
pass
|
||||
|
||||
|
||||
class StreamToPacketSegmenter(StreamSink):
|
||||
def __init__(self, output):
|
||||
self._header = []
|
||||
self._packet = []
|
||||
self._packet_length = 0
|
||||
self._output = output
|
||||
|
||||
def process_bytes(self, bytes):
|
||||
"""
|
||||
Processes an arbitrary number of bytes. If one or more full packets are
|
||||
are received, they are sent to this instance's output PacketSink.
|
||||
Incomplete packets are buffered between subsequent calls to this function.
|
||||
"""
|
||||
|
||||
for byte in bytes:
|
||||
if (len(self._header) < 3):
|
||||
# Process header byte
|
||||
self._header.append(byte)
|
||||
if (len(self._header) == 1) and (self._header[0] != SYNC_BYTE):
|
||||
self._header = []
|
||||
elif (len(self._header) == 2) and (self._header[1] & 0x80):
|
||||
self._header = [] # TODO: support packets larger than 128 bytes
|
||||
elif (len(self._header) == 3) and calc_crc8(CRC8_INIT, self._header):
|
||||
self._header = []
|
||||
elif (len(self._header) == 3):
|
||||
self._packet_length = self._header[1] + 2
|
||||
else:
|
||||
# Process payload byte
|
||||
self._packet.append(byte)
|
||||
|
||||
# If both header and packet are fully received, hand it on to the packet processor
|
||||
if (len(self._header) == 3) and (len(self._packet) == self._packet_length):
|
||||
if calc_crc16(CRC16_INIT, self._packet) == 0:
|
||||
self._output.process_packet(self._packet[:-2])
|
||||
self._header = []
|
||||
self._packet = []
|
||||
self._packet_length = 0
|
||||
|
||||
|
||||
class StreamBasedPacketSink(PacketSink):
|
||||
def __init__(self, output):
|
||||
self._output = output
|
||||
|
||||
def process_packet(self, packet):
|
||||
if (len(packet) >= MAX_PACKET_SIZE):
|
||||
raise NotImplementedError("packet larger than 127 currently not supported")
|
||||
|
||||
header = bytearray()
|
||||
header.append(SYNC_BYTE)
|
||||
header.append(len(packet))
|
||||
header.append(calc_crc8(CRC8_INIT, header))
|
||||
|
||||
self._output.process_bytes(header)
|
||||
self._output.process_bytes(packet)
|
||||
|
||||
# append CRC in big endian
|
||||
crc16 = calc_crc16(CRC16_INIT, packet)
|
||||
self._output.process_bytes(struct.pack('>H', crc16))
|
||||
|
||||
class PacketFromStreamConverter(PacketSource):
|
||||
def __init__(self, input):
|
||||
self._input = input
|
||||
|
||||
def get_packet(self, deadline):
|
||||
"""
|
||||
Requests bytes from the underlying input stream until a full packet is
|
||||
received or the deadline is reached, in which case None is returned. A
|
||||
deadline before the current time corresponds to non-blocking mode.
|
||||
"""
|
||||
while True:
|
||||
header = bytes()
|
||||
'''
|
||||
# TODO: sometimes this call hangs, even though the device apparently sent something
|
||||
header = header + self._input.get_bytes_or_fail(1, deadline)
|
||||
if (header[0] != SYNC_BYTE):
|
||||
#print("sync byte mismatch")
|
||||
continue
|
||||
|
||||
header = header + self._input.get_bytes_or_fail(1, deadline)
|
||||
if (header[1] & 0x80):
|
||||
#print("packet too large")
|
||||
continue # TODO: support packets larger than 128 bytes
|
||||
|
||||
header = header + self._input.get_bytes_or_fail(1, deadline)
|
||||
if calc_crc8(CRC8_INIT, header) != 0:
|
||||
#print("crc8 mismatch")
|
||||
continue
|
||||
|
||||
packet_length = header[1] + 2
|
||||
#print("wait for {} bytes".format(packet_length))
|
||||
packet = self._input.get_bytes_or_fail(packet_length, deadline)
|
||||
'''
|
||||
header = self._input.get_bytes_or_fail(3, deadline)
|
||||
#print("wait for {} bytes".format(packet_length))
|
||||
packet = self._input.get_bytes_or_fail(header[1], deadline)
|
||||
packet = packet + self._input.get_bytes_or_fail(2, deadline)
|
||||
if calc_crc16(CRC16_INIT, packet) != 0:
|
||||
#print("crc16 mismatch")
|
||||
continue
|
||||
return packet[:-2]
|
||||
|
||||
|
||||
class Channel(PacketSink):
|
||||
# Choose these parameters to be sensible for a specific transport layer
|
||||
_resend_timeout = 5.0 # [s]
|
||||
_send_attempts = 5
|
||||
|
||||
def __init__(self, name, input, output, cancellation_token, logger):
|
||||
"""
|
||||
Params:
|
||||
input: A PacketSource where this channel will source packets from on
|
||||
demand. Alternatively packets can be provided to this channel
|
||||
directly by calling process_packet on this instance.
|
||||
output: A PacketSink where this channel will put outgoing packets.
|
||||
"""
|
||||
self._name = name
|
||||
self._input = input
|
||||
self._output = output
|
||||
self._logger = logger
|
||||
self._outbound_seq_no = 0
|
||||
self._interface_definition_crc = 0
|
||||
self._expected_acks = {}
|
||||
self._responses = {}
|
||||
self._my_lock = threading.Lock()
|
||||
self._channel_broken = Event(cancellation_token)
|
||||
self.start_receiver_thread(Event(self._channel_broken))
|
||||
|
||||
def start_receiver_thread(self, cancellation_token):
|
||||
"""
|
||||
Starts the receiver thread that processes incoming messages.
|
||||
The thread quits as soon as the channel enters a broken state.
|
||||
"""
|
||||
def receiver_thread():
|
||||
error_ctr = 0
|
||||
try:
|
||||
while (not cancellation_token.is_set() and not self._channel_broken.is_set()
|
||||
and error_ctr < 10):
|
||||
# Set an arbitrary deadline because the get_packet function
|
||||
# currently doesn't support a cancellation_token
|
||||
deadline = time.monotonic() + 1.0
|
||||
try:
|
||||
response = self._input.get_packet(deadline)
|
||||
except TimeoutError:
|
||||
continue # try again
|
||||
except ChannelDamagedException:
|
||||
error_ctr += 1
|
||||
continue # try again
|
||||
if (error_ctr > 0):
|
||||
error_ctr -= 1
|
||||
# Process response
|
||||
# This should not throw an exception, otherwise the channel breaks
|
||||
self.process_packet(response)
|
||||
#print("receiver thread is exiting")
|
||||
except Exception:
|
||||
self._logger.debug("receiver thread is exiting: " + traceback.format_exc())
|
||||
finally:
|
||||
self._channel_broken.set()
|
||||
t = threading.Thread(target=receiver_thread)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def remote_endpoint_operation(self, endpoint_id, input, expect_ack, output_length):
|
||||
if input is None:
|
||||
input = bytearray(0)
|
||||
if (len(input) >= 128):
|
||||
raise Exception("packet larger than 127 currently not supported")
|
||||
|
||||
if (expect_ack):
|
||||
endpoint_id |= 0x8000
|
||||
|
||||
self._my_lock.acquire()
|
||||
try:
|
||||
self._outbound_seq_no = ((self._outbound_seq_no + 1) & 0x7fff)
|
||||
seq_no = self._outbound_seq_no
|
||||
finally:
|
||||
self._my_lock.release()
|
||||
seq_no |= 0x80 # FIXME: we hardwire one bit of the seq-no to 1 to avoid conflicts with the ascii protocol
|
||||
packet = struct.pack('<HHH', seq_no, endpoint_id, output_length)
|
||||
packet = packet + input
|
||||
|
||||
crc16 = calc_crc16(CRC16_INIT, packet)
|
||||
if (endpoint_id & 0x7fff == 0):
|
||||
trailer = PROTOCOL_VERSION
|
||||
else:
|
||||
trailer = self._interface_definition_crc
|
||||
#print("append trailer " + trailer)
|
||||
packet = packet + struct.pack('<H', trailer)
|
||||
|
||||
if (expect_ack):
|
||||
ack_event = Event()
|
||||
self._expected_acks[seq_no] = ack_event
|
||||
try:
|
||||
attempt = 0
|
||||
while (attempt < self._send_attempts):
|
||||
self._my_lock.acquire()
|
||||
try:
|
||||
self._output.process_packet(packet)
|
||||
except ChannelDamagedException:
|
||||
attempt += 1
|
||||
continue # resend
|
||||
except TimeoutError:
|
||||
attempt += 1
|
||||
continue # resend
|
||||
finally:
|
||||
self._my_lock.release()
|
||||
# Wait for ACK until the resend timeout is exceeded
|
||||
try:
|
||||
if wait_any(self._resend_timeout, ack_event, self._channel_broken) != 0:
|
||||
raise ChannelBrokenException()
|
||||
except TimeoutError:
|
||||
attempt += 1
|
||||
continue # resend
|
||||
return self._responses.pop(seq_no)
|
||||
# TODO: record channel statistics
|
||||
raise ChannelBrokenException() # Too many resend attempts
|
||||
finally:
|
||||
self._expected_acks.pop(seq_no)
|
||||
self._responses.pop(seq_no, None)
|
||||
else:
|
||||
# fire and forget
|
||||
self._output.process_packet(packet)
|
||||
return None
|
||||
|
||||
def remote_endpoint_read_buffer(self, endpoint_id):
|
||||
"""
|
||||
Handles reads from long endpoints
|
||||
"""
|
||||
# TODO: handle device that could (maliciously) send infinite stream
|
||||
buffer = bytes()
|
||||
while True:
|
||||
chunk_length = 512
|
||||
chunk = self.remote_endpoint_operation(endpoint_id, struct.pack("<I", len(buffer)), True, chunk_length)
|
||||
if (len(chunk) == 0):
|
||||
break
|
||||
buffer += chunk
|
||||
return buffer
|
||||
|
||||
def process_packet(self, packet):
|
||||
#print("process packet")
|
||||
packet = bytes(packet)
|
||||
if (len(packet) < 2):
|
||||
raise Exception("packet too short")
|
||||
|
||||
seq_no = struct.unpack('<H', packet[0:2])[0]
|
||||
|
||||
if (seq_no & 0x8000):
|
||||
seq_no &= 0x7fff
|
||||
ack_signal = self._expected_acks.get(seq_no, None)
|
||||
if (ack_signal):
|
||||
self._responses[seq_no] = packet[2:]
|
||||
ack_signal.set()
|
||||
#print("received ack for packet " + str(seq_no))
|
||||
else:
|
||||
print("received unexpected ACK: " + str(seq_no))
|
||||
|
||||
else:
|
||||
#if (calc_crc16(CRC16_INIT, struct.pack('<HBB', PROTOCOL_VERSION, packet[-2], packet[-1]))):
|
||||
# raise Exception("CRC16 mismatch")
|
||||
print("endpoint requested")
|
||||
# TODO: handle local endpoint operation
|
264
3.Software/CLI-Tool/fibre/remote_object.py
Normal file
264
3.Software/CLI-Tool/fibre/remote_object.py
Normal file
@ -0,0 +1,264 @@
|
||||
"""
|
||||
Provides functions for the discovery of Fibre nodes
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import struct
|
||||
import threading
|
||||
import fibre.protocol
|
||||
|
||||
class ObjectDefinitionError(Exception):
|
||||
pass
|
||||
|
||||
codecs = {}
|
||||
|
||||
class StructCodec():
|
||||
"""
|
||||
Generic serializer/deserializer based on struct pack
|
||||
"""
|
||||
def __init__(self, struct_format, target_type):
|
||||
self._struct_format = struct_format
|
||||
self._target_type = target_type
|
||||
def get_length(self):
|
||||
return struct.calcsize(self._struct_format)
|
||||
def serialize(self, value):
|
||||
value = self._target_type(value)
|
||||
return struct.pack(self._struct_format, value)
|
||||
def deserialize(self, buffer):
|
||||
value = struct.unpack(self._struct_format, buffer)
|
||||
value = value[0] if len(value) == 1 else value
|
||||
return self._target_type(value)
|
||||
|
||||
class RemoteProperty():
|
||||
"""
|
||||
Used internally by dynamically created objects to translate
|
||||
property assignments and fetches into endpoint operations on the
|
||||
object's associated channel
|
||||
"""
|
||||
def __init__(self, json_data, parent):
|
||||
self._parent = parent
|
||||
self.__channel__ = parent.__channel__
|
||||
id_str = json_data.get("id", None)
|
||||
if id_str is None:
|
||||
raise ObjectDefinitionError("unspecified endpoint ID")
|
||||
self._id = int(id_str)
|
||||
|
||||
self._name = json_data.get("name", None)
|
||||
if self._name is None:
|
||||
self._name = "[anonymous]"
|
||||
|
||||
type_str = json_data.get("type", None)
|
||||
if type_str is None:
|
||||
raise ObjectDefinitionError("unspecified type")
|
||||
|
||||
# Find all codecs that match the type_str and build a dictionary
|
||||
# of the form {type1: codec1, type2: codec2}
|
||||
eligible_types = {k: v[type_str] for (k,v) in codecs.items() if type_str in v}
|
||||
|
||||
if not eligible_types:
|
||||
raise ObjectDefinitionError("unsupported codec {}".format(type_str))
|
||||
|
||||
# TODO: better heuristics to select a matching type (i.e. prefer non lossless)
|
||||
eligible_types = list(eligible_types.items())
|
||||
self._property_type = eligible_types[0][0]
|
||||
self._codec = eligible_types[0][1]
|
||||
|
||||
access_mode = json_data.get("access", "r")
|
||||
self._can_read = 'r' in access_mode
|
||||
self._can_write = 'w' in access_mode
|
||||
|
||||
def get_value(self):
|
||||
buffer = self._parent.__channel__.remote_endpoint_operation(self._id, None, True, self._codec.get_length())
|
||||
return self._codec.deserialize(buffer)
|
||||
|
||||
def set_value(self, value):
|
||||
buffer = self._codec.serialize(value)
|
||||
# TODO: Currenly we wait for an ack here. Settle on the default guarantee.
|
||||
self._parent.__channel__.remote_endpoint_operation(self._id, buffer, True, 0)
|
||||
|
||||
def _dump(self):
|
||||
if self._name == "serial_number":
|
||||
# special case: serial number should be displayed in hex (TODO: generalize)
|
||||
val_str = "{:012X}".format(self.get_value())
|
||||
elif self._name == "error":
|
||||
# special case: errors should be displayed in hex (TODO: generalize)
|
||||
val_str = "0x{:04X}".format(self.get_value())
|
||||
else:
|
||||
val_str = str(self.get_value())
|
||||
return "{} = {} ({})".format(self._name, val_str, self._property_type.__name__)
|
||||
|
||||
class EndpointRefCodec():
|
||||
"""
|
||||
Serializer/deserializer for an endpoint reference
|
||||
"""
|
||||
def get_length(self):
|
||||
return struct.calcsize("<HH")
|
||||
def serialize(self, value):
|
||||
if value is None:
|
||||
(ep_id, ep_crc) = (0, 0)
|
||||
elif isinstance(value, RemoteProperty):
|
||||
(ep_id, ep_crc) = (value._id, value.__channel__._interface_definition_crc)
|
||||
else:
|
||||
raise TypeError("Expected value of type RemoteProperty or None but got '{}'. En example for a RemoteProperty is this expression: odrv0.axis0.controller._remote_attributes['pos_setpoint']".format(type(value).__name__))
|
||||
return struct.pack("<HH", ep_id, ep_crc)
|
||||
def deserialize(self, buffer):
|
||||
return struct.unpack("<HH", buffer)
|
||||
|
||||
codecs[int] = {
|
||||
'int8': StructCodec("<b", int),
|
||||
'uint8': StructCodec("<B", int),
|
||||
'int16': StructCodec("<h", int),
|
||||
'uint16': StructCodec("<H", int),
|
||||
'int32': StructCodec("<i", int),
|
||||
'uint32': StructCodec("<I", int),
|
||||
'int64': StructCodec("<q", int),
|
||||
'uint64': StructCodec("<Q", int)
|
||||
}
|
||||
|
||||
codecs[bool] = {
|
||||
'bool': StructCodec("<?", bool)
|
||||
}
|
||||
|
||||
codecs[float] = {
|
||||
'float': StructCodec("<f", float)
|
||||
}
|
||||
|
||||
codecs[RemoteProperty] = {
|
||||
'endpoint_ref': EndpointRefCodec()
|
||||
}
|
||||
|
||||
|
||||
class RemoteFunction(object):
|
||||
"""
|
||||
Represents a callable function that maps to a function call on a remote object
|
||||
"""
|
||||
def __init__(self, json_data, parent):
|
||||
self._parent = parent
|
||||
id_str = json_data.get("id", None)
|
||||
if id_str is None:
|
||||
raise ObjectDefinitionError("unspecified endpoint ID")
|
||||
self._trigger_id = int(id_str)
|
||||
|
||||
self._name = json_data.get("name", None)
|
||||
if self._name is None:
|
||||
self._name = "[anonymous]"
|
||||
|
||||
self._inputs = []
|
||||
for param_json in json_data.get("arguments", []) + json_data.get("inputs", []): # TODO: deprecate "arguments" keyword
|
||||
param_json["mode"] = "r"
|
||||
self._inputs.append(RemoteProperty(param_json, parent))
|
||||
|
||||
self._outputs = []
|
||||
for param_json in json_data.get("outputs", []): # TODO: deprecate "arguments" keyword
|
||||
param_json["mode"] = "r"
|
||||
self._outputs.append(RemoteProperty(param_json, parent))
|
||||
|
||||
def __call__(self, *args):
|
||||
if (len(self._inputs) != len(args)):
|
||||
raise TypeError("expected {} arguments but have {}".format(len(self._inputs), len(args)))
|
||||
for i in range(len(args)):
|
||||
self._inputs[i].set_value(args[i])
|
||||
self._parent.__channel__.remote_endpoint_operation(self._trigger_id, None, True, 0)
|
||||
if len(self._outputs) > 0:
|
||||
return self._outputs[0].get_value()
|
||||
|
||||
def _dump(self):
|
||||
return "{}({})".format(self._name, ", ".join("{}: {}".format(x._name, x._property_type.__name__) for x in self._inputs))
|
||||
|
||||
class RemoteObject(object):
|
||||
"""
|
||||
Object with functions and properties that map to remote endpoints
|
||||
"""
|
||||
def __init__(self, json_data, parent, channel, logger):
|
||||
"""
|
||||
Creates an object that implements the specified JSON type description by
|
||||
communicating over the provided channel
|
||||
"""
|
||||
# Directly write to __dict__ to avoid calling __setattr__ too early
|
||||
object.__getattribute__(self, "__dict__")["_remote_attributes"] = {}
|
||||
object.__getattribute__(self, "__dict__")["__sealed__"] = False
|
||||
# Assign once more to make linter happy
|
||||
self._remote_attributes = {}
|
||||
self.__sealed__ = False
|
||||
|
||||
self.__channel__ = channel
|
||||
self.__parent__ = parent
|
||||
|
||||
# Build attribute list from JSON
|
||||
for member_json in json_data.get("members", []):
|
||||
member_name = member_json.get("name", None)
|
||||
if member_name is None:
|
||||
logger.debug("ignoring unnamed attribute")
|
||||
continue
|
||||
|
||||
try:
|
||||
type_str = member_json.get("type", None)
|
||||
if type_str == "object":
|
||||
attribute = RemoteObject(member_json, self, channel, logger)
|
||||
elif type_str == "function":
|
||||
attribute = RemoteFunction(member_json, self)
|
||||
elif type_str != None:
|
||||
attribute = RemoteProperty(member_json, self)
|
||||
else:
|
||||
raise ObjectDefinitionError("no type information")
|
||||
except ObjectDefinitionError as ex:
|
||||
logger.debug("malformed member {}: {}".format(member_name, str(ex)))
|
||||
continue
|
||||
|
||||
self._remote_attributes[member_name] = attribute
|
||||
self.__dict__[member_name] = attribute
|
||||
|
||||
# Ensure that from here on out assignments to undefined attributes
|
||||
# raise an exception
|
||||
self.__sealed__ = True
|
||||
channel._channel_broken.subscribe(self._tear_down)
|
||||
|
||||
def _dump(self, indent, depth):
|
||||
if depth <= 0:
|
||||
return "..."
|
||||
lines = []
|
||||
for key, val in self._remote_attributes.items():
|
||||
if isinstance(val, RemoteObject):
|
||||
val_str = indent + key + (": " if depth == 1 else ":\n") + val._dump(indent + " ", depth - 1)
|
||||
else:
|
||||
val_str = indent + val._dump()
|
||||
lines.append(val_str)
|
||||
return "\n".join(lines)
|
||||
|
||||
def __str__(self):
|
||||
return self._dump("", depth=2)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __getattribute__(self, name):
|
||||
attr = object.__getattribute__(self, "_remote_attributes").get(name, None)
|
||||
if isinstance(attr, RemoteProperty):
|
||||
if attr._can_read:
|
||||
return attr.get_value()
|
||||
else:
|
||||
raise Exception("Cannot read from property {}".format(name))
|
||||
elif attr != None:
|
||||
return attr
|
||||
else:
|
||||
return object.__getattribute__(self, name)
|
||||
#raise AttributeError("Attribute {} not found".format(name))
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
attr = object.__getattribute__(self, "_remote_attributes").get(name, None)
|
||||
if isinstance(attr, RemoteProperty):
|
||||
if attr._can_write:
|
||||
attr.set_value(value)
|
||||
else:
|
||||
raise Exception("Cannot write to property {}".format(name))
|
||||
elif not object.__getattribute__(self, "__sealed__") or name in object.__getattribute__(self, "__dict__"):
|
||||
object.__getattribute__(self, "__dict__")[name] = value
|
||||
else:
|
||||
raise AttributeError("Attribute {} not found".format(name))
|
||||
|
||||
def _tear_down(self):
|
||||
# Clear all remote members
|
||||
for k in self._remote_attributes.keys():
|
||||
self.__dict__.pop(k)
|
||||
self._remote_attributes = {}
|
101
3.Software/CLI-Tool/fibre/serial_transport.py
Normal file
101
3.Software/CLI-Tool/fibre/serial_transport.py
Normal file
@ -0,0 +1,101 @@
|
||||
"""
|
||||
Provides classes that implement the StreamSource/StreamSink and
|
||||
PacketSource/PacketSink interfaces for serial ports.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import traceback
|
||||
import serial
|
||||
import serial.tools.list_ports
|
||||
import fibre
|
||||
from fibre.utils import TimeoutError
|
||||
|
||||
# TODO: make this customizable
|
||||
DEFAULT_BAUDRATE = 115200
|
||||
|
||||
class SerialStreamTransport(fibre.protocol.StreamSource, fibre.protocol.StreamSink):
|
||||
def __init__(self, port, baud):
|
||||
self._dev = serial.Serial(port, baud, timeout=1)
|
||||
|
||||
def process_bytes(self, bytes):
|
||||
self._dev.write(bytes)
|
||||
|
||||
def get_bytes(self, n_bytes, deadline):
|
||||
"""
|
||||
Returns n bytes unless the deadline is reached, in which case the bytes
|
||||
that were read up to that point are returned. If deadline is None the
|
||||
function blocks forever. A deadline before the current time corresponds
|
||||
to non-blocking mode.
|
||||
"""
|
||||
if deadline is None:
|
||||
self._dev.timeout = None
|
||||
else:
|
||||
self._dev.timeout = max(deadline - time.monotonic(), 0)
|
||||
return self._dev.read(n_bytes)
|
||||
|
||||
def get_bytes_or_fail(self, n_bytes, deadline):
|
||||
result = self.get_bytes(n_bytes, deadline)
|
||||
if len(result) < n_bytes:
|
||||
raise TimeoutError("expected {} bytes but got only {}", n_bytes, len(result))
|
||||
return result
|
||||
|
||||
def close(self):
|
||||
self._dev.close()
|
||||
|
||||
|
||||
def find_dev_serial_ports():
|
||||
try:
|
||||
return ['/dev/' + x for x in os.listdir('/dev')]
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
def find_pyserial_ports():
|
||||
return [x.device for x in serial.tools.list_ports.comports()]
|
||||
|
||||
|
||||
def discover_channels(path, serial_number, callback, cancellation_token, channel_termination_token, logger):
|
||||
"""
|
||||
Scans for serial ports that match the path spec.
|
||||
This function blocks until cancellation_token is set.
|
||||
Channels spawned by this function run until channel_termination_token is set.
|
||||
"""
|
||||
if path == None:
|
||||
# This regex should match all desired port names on macOS,
|
||||
# Linux and Windows but might match some incorrect port names.
|
||||
regex = r'^(/dev/tty\.usbmodem.*|/dev/ttyACM.*|COM[0-9]+)$'
|
||||
else:
|
||||
regex = "^" + path + "$"
|
||||
|
||||
known_devices = []
|
||||
def device_matcher(port_name):
|
||||
if port_name in known_devices:
|
||||
return False
|
||||
return bool(re.match(regex, port_name))
|
||||
|
||||
def did_disconnect(port_name, device):
|
||||
device.close()
|
||||
# TODO: yes there is a race condition here in case you wonder.
|
||||
known_devices.pop(known_devices.index(port_name))
|
||||
|
||||
while not cancellation_token.is_set():
|
||||
all_ports = find_pyserial_ports() + find_dev_serial_ports()
|
||||
new_ports = filter(device_matcher, all_ports)
|
||||
for port_name in new_ports:
|
||||
try:
|
||||
serial_device = SerialStreamTransport(port_name, DEFAULT_BAUDRATE)
|
||||
input_stream = fibre.protocol.PacketFromStreamConverter(serial_device)
|
||||
output_stream = fibre.protocol.StreamBasedPacketSink(serial_device)
|
||||
channel = fibre.protocol.Channel(
|
||||
"serial port {}@{}".format(port_name, DEFAULT_BAUDRATE),
|
||||
input_stream, output_stream, channel_termination_token, logger)
|
||||
channel.serial_device = serial_device
|
||||
except serial.serialutil.SerialException:
|
||||
logger.debug("Serial device init failed. Ignoring this port. More info: " + traceback.format_exc())
|
||||
known_devices.append(port_name)
|
||||
else:
|
||||
known_devices.append(port_name)
|
||||
channel._channel_broken.subscribe(lambda: did_disconnect(port_name, serial_device))
|
||||
callback(channel)
|
||||
time.sleep(1)
|
116
3.Software/CLI-Tool/fibre/shell.py
Normal file
116
3.Software/CLI-Tool/fibre/shell.py
Normal file
@ -0,0 +1,116 @@
|
||||
|
||||
import sys
|
||||
import platform
|
||||
import threading
|
||||
import fibre
|
||||
|
||||
def did_discover_device(device,
|
||||
interactive_variables, discovered_devices,
|
||||
branding_short, branding_long,
|
||||
logger, app_shutdown_token):
|
||||
"""
|
||||
Handles the discovery of new devices by displaying a
|
||||
message and making the device available to the interactive
|
||||
console
|
||||
"""
|
||||
serial_number = '{:012X}'.format(device.serial_number) if hasattr(device, 'serial_number') else "[unknown serial number]"
|
||||
if serial_number in discovered_devices:
|
||||
verb = "Reconnected"
|
||||
index = discovered_devices.index(serial_number)
|
||||
else:
|
||||
verb = "Connected"
|
||||
discovered_devices.append(serial_number)
|
||||
index = len(discovered_devices) - 1
|
||||
interactive_name = branding_short + str(index)
|
||||
|
||||
# Publish new device to interactive console
|
||||
interactive_variables[interactive_name] = device
|
||||
globals()[interactive_name] = device # Add to globals so tab complete works
|
||||
logger.notify("{} to {} {} as {}".format(verb, branding_long, serial_number, interactive_name))
|
||||
|
||||
# Subscribe to disappearance of the device
|
||||
device.__channel__._channel_broken.subscribe(lambda: did_lose_device(interactive_name, logger, app_shutdown_token))
|
||||
|
||||
def did_lose_device(interactive_name, logger, app_shutdown_token):
|
||||
"""
|
||||
Handles the disappearance of a device by displaying
|
||||
a message.
|
||||
"""
|
||||
if not app_shutdown_token.is_set():
|
||||
logger.warn("Oh no {} disappeared".format(interactive_name))
|
||||
|
||||
def launch_shell(args,
|
||||
interactive_variables,
|
||||
print_banner, print_help,
|
||||
logger, app_shutdown_token,
|
||||
branding_short="dev", branding_long="device"):
|
||||
"""
|
||||
Launches an interactive python or IPython command line
|
||||
interface.
|
||||
As devices are connected they are made available as
|
||||
"dev0", "dev1", ...
|
||||
The names of the variables can be customized by setting branding_short.
|
||||
"""
|
||||
|
||||
discovered_devices = []
|
||||
globals().update(interactive_variables)
|
||||
|
||||
# Connect to device
|
||||
logger.debug("Waiting for {}...".format(branding_long))
|
||||
fibre.find_all(args.path, args.serial_number,
|
||||
lambda dev: did_discover_device(dev, interactive_variables, discovered_devices, branding_short, branding_long, logger, app_shutdown_token),
|
||||
app_shutdown_token,
|
||||
app_shutdown_token,
|
||||
logger=logger)
|
||||
|
||||
# Check if IPython is installed
|
||||
if args.no_ipython:
|
||||
use_ipython = False
|
||||
else:
|
||||
try:
|
||||
import IPython
|
||||
use_ipython = True
|
||||
except:
|
||||
print("Warning: you don't have IPython installed.")
|
||||
print("If you want to have an improved interactive console with pretty colors,")
|
||||
print("you should install IPython\n")
|
||||
use_ipython = False
|
||||
|
||||
interactive_variables["help"] = lambda: print_help(args, len(discovered_devices) > 0)
|
||||
|
||||
# If IPython is installed, embed IPython shell, otherwise embed regular shell
|
||||
if use_ipython:
|
||||
help = lambda: print_help(args, len(discovered_devices) > 0) # Override help function # pylint: disable=W0612
|
||||
locals()['__name__'] = globals()['__name__'] # to fix broken "%run -i script.py"
|
||||
console = IPython.terminal.embed.InteractiveShellEmbed(banner1='')
|
||||
console.runcode = console.run_code # hack to make IPython look like the regular console
|
||||
interact = console
|
||||
else:
|
||||
# Enable tab complete if possible
|
||||
try:
|
||||
import readline # Works only on Unix
|
||||
readline.parse_and_bind("tab: complete")
|
||||
except:
|
||||
sudo_prefix = "" if platform.system() == "Windows" else "sudo "
|
||||
print("Warning: could not enable tab-complete. User experience will suffer.\n"
|
||||
"Run `{}pip install readline` and then restart this script to fix this."
|
||||
.format(sudo_prefix))
|
||||
|
||||
import code
|
||||
console = code.InteractiveConsole(locals=interactive_variables)
|
||||
interact = lambda: console.interact(banner='')
|
||||
|
||||
# install hook to hide ChannelBrokenException
|
||||
console.runcode('import sys')
|
||||
console.runcode('superexcepthook = sys.excepthook')
|
||||
console.runcode('def newexcepthook(ex_class,ex,trace):\n'
|
||||
' if ex_class.__module__ + "." + ex_class.__name__ != "fibre.ChannelBrokenException":\n'
|
||||
' superexcepthook(ex_class,ex,trace)')
|
||||
console.runcode('sys.excepthook=newexcepthook')
|
||||
|
||||
|
||||
# Launch shell
|
||||
print_banner()
|
||||
logger._skip_bottom_line = True
|
||||
interact()
|
||||
app_shutdown_token.set()
|
85
3.Software/CLI-Tool/fibre/tcp_transport.py
Normal file
85
3.Software/CLI-Tool/fibre/tcp_transport.py
Normal file
@ -0,0 +1,85 @@
|
||||
|
||||
import sys
|
||||
import socket
|
||||
import time
|
||||
import traceback
|
||||
import fibre.protocol
|
||||
from fibre.utils import wait_any, TimeoutError
|
||||
|
||||
def noprint(x):
|
||||
pass
|
||||
|
||||
class TCPTransport(fibre.protocol.StreamSource, fibre.protocol.StreamSink):
|
||||
def __init__(self, dest_addr, dest_port, logger):
|
||||
# TODO: FIXME: use IPv6
|
||||
# Problem: getaddrinfo fails if the resolver returns an
|
||||
# IPv4 address, but we are using AF_INET6
|
||||
#family = socket.AF_INET6 if socket.has_ipv6 else socket.AF_INET
|
||||
family = socket.AF_INET
|
||||
self.sock = socket.socket(family, socket.SOCK_STREAM)
|
||||
# TODO: Determine the right address to use from the list
|
||||
self.target = socket.getaddrinfo(dest_addr, dest_port, family)[0][4]
|
||||
# TODO: this blocks until a connection is established, or the system cancels it
|
||||
self.sock.connect(self.target)
|
||||
|
||||
def process_bytes(self, buffer):
|
||||
self.sock.send(buffer)
|
||||
|
||||
def get_bytes(self, n_bytes, deadline):
|
||||
"""
|
||||
Returns n bytes unless the deadline is reached, in which case the bytes
|
||||
that were read up to that point are returned. If deadline is None the
|
||||
function blocks forever. A deadline before the current time corresponds
|
||||
to non-blocking mode.
|
||||
"""
|
||||
# convert deadline to seconds (floating point)
|
||||
deadline = None if deadline is None else max(deadline - time.monotonic(), 0)
|
||||
self.sock.settimeout(deadline)
|
||||
try:
|
||||
data = self.sock.recv(n_bytes) # receive n_bytes
|
||||
return data
|
||||
except socket.timeout:
|
||||
# if we got a timeout data will still be none, so we call recv again
|
||||
# this time in non blocking state and see if we can get some data
|
||||
try:
|
||||
return self.sock.recv(n_bytes)
|
||||
except socket.timeout:
|
||||
raise TimeoutError
|
||||
|
||||
def get_bytes_or_fail(self, n_bytes, deadline):
|
||||
result = self.get_bytes(n_bytes, deadline)
|
||||
if len(result) < n_bytes:
|
||||
raise TimeoutError("expected {} bytes but got only {}".format(n_bytes, len(result)))
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def discover_channels(path, serial_number, callback, cancellation_token, channel_termination_token, logger):
|
||||
"""
|
||||
Tries to connect to a TCP server based on the path spec.
|
||||
This function blocks until cancellation_token is set.
|
||||
Channels spawned by this function run until channel_termination_token is set.
|
||||
"""
|
||||
try:
|
||||
dest_addr = ':'.join(path.split(":")[:-1])
|
||||
dest_port = int(path.split(":")[-1])
|
||||
except (ValueError, IndexError):
|
||||
raise Exception('"{}" is not a valid TCP destination. The format should be something like "localhost:1234".'
|
||||
.format(path))
|
||||
|
||||
while not cancellation_token.is_set():
|
||||
try:
|
||||
tcp_transport = fibre.tcp_transport.TCPTransport(dest_addr, dest_port, logger)
|
||||
stream2packet_input = fibre.protocol.PacketFromStreamConverter(tcp_transport)
|
||||
packet2stream_output = fibre.protocol.StreamBasedPacketSink(tcp_transport)
|
||||
channel = fibre.protocol.Channel(
|
||||
"TCP device {}:{}".format(dest_addr, dest_port),
|
||||
stream2packet_input, packet2stream_output,
|
||||
channel_termination_token, logger)
|
||||
except:
|
||||
#logger.debug("TCP channel init failed. More info: " + traceback.format_exc())
|
||||
pass
|
||||
else:
|
||||
callback(channel)
|
||||
wait_any(None, cancellation_token, channel._channel_broken)
|
||||
time.sleep(1)
|
67
3.Software/CLI-Tool/fibre/udp_transport.py
Normal file
67
3.Software/CLI-Tool/fibre/udp_transport.py
Normal file
@ -0,0 +1,67 @@
|
||||
|
||||
import sys
|
||||
import socket
|
||||
import time
|
||||
import traceback
|
||||
import fibre.protocol
|
||||
from fibre.utils import wait_any
|
||||
|
||||
def noprint(x):
|
||||
pass
|
||||
|
||||
class UDPTransport(fibre.protocol.PacketSource, fibre.protocol.PacketSink):
|
||||
def __init__(self, dest_addr, dest_port, logger):
|
||||
# TODO: FIXME: use IPv6
|
||||
# Problem: getaddrinfo fails if the resolver returns an
|
||||
# IPv4 address, but we are using AF_INET6
|
||||
#family = socket.AF_INET6 if socket.has_ipv6 else socket.AF_INET
|
||||
family = socket.AF_INET
|
||||
self.sock = socket.socket(family, socket.SOCK_DGRAM)
|
||||
# TODO: Determine the right address to use from the list
|
||||
self.target = socket.getaddrinfo(dest_addr,dest_port, family)[0][4]
|
||||
|
||||
def process_packet(self, buffer):
|
||||
self.sock.sendto(buffer, self.target)
|
||||
|
||||
def get_packet(self, deadline):
|
||||
# TODO: implement deadline
|
||||
deadline = None if deadline is None else max(deadline - time.monotonic(), 0)
|
||||
self.sock.settimeout(deadline)
|
||||
try:
|
||||
data, _ = self.sock.recvfrom(1024)
|
||||
return data
|
||||
except socket.timeout:
|
||||
# if we got a timeout data will still be none, so we call recv again
|
||||
# this time in non blocking state and see if we can get some data
|
||||
try:
|
||||
return self.sock.recvfrom(1024)
|
||||
except socket.timeout:
|
||||
raise TimeoutError
|
||||
|
||||
def discover_channels(path, serial_number, callback, cancellation_token, channel_termination_token, logger):
|
||||
"""
|
||||
Tries to connect to a UDP server based on the path spec.
|
||||
This function blocks until cancellation_token is set.
|
||||
Channels spawned by this function run until channel_termination_token is set.
|
||||
"""
|
||||
try:
|
||||
dest_addr = ':'.join(path.split(":")[:-1])
|
||||
dest_port = int(path.split(":")[-1])
|
||||
except (ValueError, IndexError):
|
||||
raise Exception('"{}" is not a valid UDP destination. The format should be something like "localhost:1234".'
|
||||
.format(path))
|
||||
|
||||
while not cancellation_token.is_set():
|
||||
try:
|
||||
udp_transport = fibre.udp_transport.UDPTransport(dest_addr, dest_port, logger)
|
||||
channel = fibre.protocol.Channel(
|
||||
"UDP device {}:{}".format(dest_addr, dest_port),
|
||||
udp_transport, udp_transport,
|
||||
channel_termination_token, logger)
|
||||
except:
|
||||
logger.debug("UDP channel init failed. More info: " + traceback.format_exc())
|
||||
pass
|
||||
else:
|
||||
callback(channel)
|
||||
wait_any(None, cancellation_token, channel._channel_broken)
|
||||
time.sleep(1)
|
215
3.Software/CLI-Tool/fibre/usbbulk_transport.py
Normal file
215
3.Software/CLI-Tool/fibre/usbbulk_transport.py
Normal file
@ -0,0 +1,215 @@
|
||||
# requires pyusb
|
||||
# pip install --pre pyusb
|
||||
|
||||
import usb.core
|
||||
import usb.util
|
||||
import sys
|
||||
import time
|
||||
import fibre.protocol
|
||||
import traceback
|
||||
import platform
|
||||
from fibre.utils import TimeoutError
|
||||
|
||||
# Currently we identify fibre-enabled devices by VID,PID
|
||||
# TODO: identify by USB descriptors
|
||||
WELL_KNOWN_VID_PID_PAIRS = [
|
||||
(0x1209, 0x0D31),
|
||||
(0x1209, 0x0D32),
|
||||
(0x1209, 0x0D33)
|
||||
]
|
||||
|
||||
class USBBulkTransport(fibre.protocol.PacketSource, fibre.protocol.PacketSink):
|
||||
def __init__(self, dev, logger):
|
||||
self._logger = logger
|
||||
self.dev = dev
|
||||
self.intf = None
|
||||
self._name = "USB device {}:{}".format(dev.idVendor, dev.idProduct)
|
||||
self._was_damaged = False
|
||||
|
||||
##
|
||||
# information about the connected device
|
||||
##
|
||||
def info(self):
|
||||
# loop through configurations
|
||||
string = ""
|
||||
for cfg in self.dev:
|
||||
string += "ConfigurationValue {0}\n".format(cfg.bConfigurationValue)
|
||||
for intf in cfg:
|
||||
string += "\tInterfaceNumber {0},{1}\n".format(intf.bInterfaceNumber, intf.bAlternateSetting)
|
||||
for ep in intf:
|
||||
string += "\t\tEndpointAddress {0}\n".format(ep.bEndpointAddress)
|
||||
return string
|
||||
|
||||
def init(self):
|
||||
# Under some conditions, the Linux USB/libusb stack ends up in a corrupt
|
||||
# state where there are a few packets in a receive queue but a call
|
||||
# to epr.read() does not return these packet until a new packet arrives.
|
||||
# This undesirable queue can be cleared by resetting the device.
|
||||
# On windows this would cause file-not-found errors in subsequent dev calls
|
||||
if platform.system() != 'Windows':
|
||||
self.dev.reset()
|
||||
|
||||
#self.dev.set_configuration() # no args: set first configuration
|
||||
|
||||
# Find the best interface
|
||||
self.cfg = self.dev.get_active_configuration()
|
||||
custom_interfaces = [i for i in self.cfg.interfaces() if i.bInterfaceClass == 0x00 and i.bInterfaceSubClass == 0x01]
|
||||
cdc_interfaces = [i for i in self.cfg.interfaces() if i.bInterfaceClass == 0x0a and i.bInterfaceSubClass == 0x00]
|
||||
all_compatible_interfaces = custom_interfaces + cdc_interfaces
|
||||
if len(all_compatible_interfaces) == 0:
|
||||
raise Exception("the device has no compatible interfaces")
|
||||
self.intf = all_compatible_interfaces[0]
|
||||
|
||||
# Try to detach kernel driver from interface
|
||||
try:
|
||||
if self.dev.is_kernel_driver_active(self.intf.bInterfaceNumber):
|
||||
self.dev.detach_kernel_driver(self.intf.bInterfaceNumber)
|
||||
self._logger.debug("Detached Kernel Driver")
|
||||
else:
|
||||
self._logger.debug("Kernel Driver was not attached")
|
||||
except NotImplementedError:
|
||||
pass #is_kernel_driver_active not implemented on Windows
|
||||
|
||||
# find write endpoint (first OUT endpoint)
|
||||
self.epw = usb.util.find_descriptor(self.intf,
|
||||
custom_match = \
|
||||
lambda e: \
|
||||
usb.util.endpoint_direction(e.bEndpointAddress) == \
|
||||
usb.util.ENDPOINT_OUT
|
||||
)
|
||||
assert self.epw is not None
|
||||
self._logger.debug("EndpointAddress for writing {}".format(self.epw.bEndpointAddress))
|
||||
# find read endpoint (first IN endpoint)
|
||||
self.epr = usb.util.find_descriptor(self.intf,
|
||||
custom_match = \
|
||||
lambda e: \
|
||||
usb.util.endpoint_direction(e.bEndpointAddress) == \
|
||||
usb.util.ENDPOINT_IN
|
||||
)
|
||||
assert self.epr is not None
|
||||
self._logger.debug("EndpointAddress for reading {}".format(self.epr.bEndpointAddress))
|
||||
|
||||
def deinit(self):
|
||||
if not self.intf is None:
|
||||
usb.util.release_interface(self.dev, self.intf)
|
||||
|
||||
def process_packet(self, usbBuffer):
|
||||
try:
|
||||
ret = self.epw.write(usbBuffer, 0)
|
||||
if self._was_damaged:
|
||||
self._logger.debug("Recovered from USB halt/stall condition")
|
||||
self._was_damaged = False
|
||||
return ret
|
||||
except usb.core.USBError as ex:
|
||||
if ex.errno == 19 or ex.errno == 32: # "no such device", "pipe error"
|
||||
raise fibre.protocol.ChannelBrokenException()
|
||||
elif ex.errno is None or ex.errno == 60 or ex.errno == 110: # timeout
|
||||
raise TimeoutError()
|
||||
else:
|
||||
self._logger.debug("error in usbbulk_transport.py, process_packet")
|
||||
self._logger.debug(traceback.format_exc())
|
||||
self._logger.debug("halt condition: {}".format(ex.errno))
|
||||
self._logger.debug(str(ex))
|
||||
# Try resetting halt/stall condition
|
||||
try:
|
||||
self.deinit()
|
||||
self.init()
|
||||
except usb.core.USBError:
|
||||
raise fibre.protocol.ChannelBrokenException()
|
||||
# Retry transfer
|
||||
self._was_damaged = True
|
||||
raise fibre.protocol.ChannelDamagedException()
|
||||
|
||||
def get_packet(self, deadline):
|
||||
try:
|
||||
bufferLen = self.epr.wMaxPacketSize
|
||||
timeout = max(int((deadline - time.monotonic()) * 1000), 0)
|
||||
ret = self.epr.read(bufferLen, timeout)
|
||||
if self._was_damaged:
|
||||
self._logger.debug("Recovered from USB halt/stall condition")
|
||||
self._was_damaged = False
|
||||
return bytearray(ret)
|
||||
except usb.core.USBError as ex:
|
||||
if ex.errno == 19 or ex.errno == 32: # "no such device", "pipe error"
|
||||
raise fibre.protocol.ChannelBrokenException()
|
||||
elif ex.errno is None or ex.errno == 60 or ex.errno == 110: # timeout
|
||||
raise TimeoutError()
|
||||
else:
|
||||
self._logger.debug("error in usbbulk_transport.py, process_packet")
|
||||
self._logger.debug(traceback.format_exc())
|
||||
self._logger.debug("halt condition: {}".format(ex.errno))
|
||||
self._logger.debug(str(ex))
|
||||
# Try resetting halt/stall condition
|
||||
try:
|
||||
self.deinit()
|
||||
self.init()
|
||||
except usb.core.USBError:
|
||||
raise fibre.protocol.ChannelBrokenException()
|
||||
# Retry transfer
|
||||
self._was_damaged = True
|
||||
raise fibre.protocol.ChannelDamagedException()
|
||||
|
||||
|
||||
def discover_channels(path, serial_number, callback, cancellation_token, channel_termination_token, logger):
|
||||
"""
|
||||
Scans for USB devices that match the path spec.
|
||||
This function blocks until cancellation_token is set.
|
||||
Channels spawned by this function run until channel_termination_token is set.
|
||||
"""
|
||||
if path == None or path == "":
|
||||
bus = None
|
||||
address = None
|
||||
else:
|
||||
try:
|
||||
bus = int(path.split(":")[0])
|
||||
address = int(path.split(":")[1])
|
||||
except (ValueError, IndexError):
|
||||
raise Exception("{} is not a valid USB path specification. "
|
||||
"Expected a string of the format BUS:DEVICE where BUS "
|
||||
"and DEVICE are integers.".format(path))
|
||||
|
||||
known_devices = []
|
||||
def device_matcher(device):
|
||||
#print(" test {:04X}:{:04X}".format(device.idVendor, device.idProduct))
|
||||
try:
|
||||
if (device.bus, device.address) in known_devices:
|
||||
return False
|
||||
if bus != None and device.bus != bus:
|
||||
return False
|
||||
if address != None and device.address != address:
|
||||
return False
|
||||
if serial_number != None and device.serial_number != serial_number:
|
||||
return False
|
||||
if (device.idVendor, device.idProduct) not in WELL_KNOWN_VID_PID_PAIRS:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
while not cancellation_token.is_set():
|
||||
logger.debug("USB discover loop")
|
||||
devices = usb.core.find(find_all=True, custom_match=device_matcher)
|
||||
for usb_device in devices:
|
||||
try:
|
||||
bulk_device = USBBulkTransport(usb_device, logger)
|
||||
logger.debug(bulk_device.info())
|
||||
bulk_device.init()
|
||||
channel = fibre.protocol.Channel(
|
||||
"USB device bus {} device {}".format(usb_device.bus, usb_device.address),
|
||||
bulk_device, bulk_device, channel_termination_token, logger)
|
||||
channel.usb_device = usb_device # for debugging only
|
||||
except usb.core.USBError as ex:
|
||||
if ex.errno == 13:
|
||||
logger.debug("USB device access denied. Did you set up your udev rules correctly?")
|
||||
continue
|
||||
elif ex.errno == 16:
|
||||
logger.debug("USB device busy. I'll reset it and try again.")
|
||||
usb_device.reset()
|
||||
continue
|
||||
else:
|
||||
logger.debug("USB device init failed. Ignoring this device. More info: " + traceback.format_exc())
|
||||
known_devices.append((usb_device.bus, usb_device.address))
|
||||
else:
|
||||
known_devices.append((usb_device.bus, usb_device.address))
|
||||
callback(channel)
|
||||
time.sleep(1)
|
237
3.Software/CLI-Tool/fibre/utils.py
Normal file
237
3.Software/CLI-Tool/fibre/utils.py
Normal file
@ -0,0 +1,237 @@
|
||||
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import platform
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
try:
|
||||
if platform.system() == 'Windows':
|
||||
import win32console
|
||||
# TODO: we should win32console anyway so we could just omit colorama
|
||||
import colorama
|
||||
colorama.init()
|
||||
except ImportError:
|
||||
print("Could not init terminal features.")
|
||||
sys.stdout.flush()
|
||||
pass
|
||||
|
||||
if sys.version_info < (3, 3):
|
||||
class TimeoutError(Exception):
|
||||
pass
|
||||
else:
|
||||
TimeoutError = TimeoutError
|
||||
|
||||
def get_serial_number_str(device):
|
||||
if hasattr(device, 'serial_number'):
|
||||
return format(device.serial_number, 'x').upper()
|
||||
else:
|
||||
return "[unknown serial number]"
|
||||
|
||||
## Threading utils ##
|
||||
class Event():
|
||||
"""
|
||||
Alternative to threading.Event(), enhanced by the subscribe() function
|
||||
that the original fails to provide.
|
||||
@param Trigger: if supplied, the newly created event will be triggered
|
||||
as soon as the trigger event becomes set
|
||||
"""
|
||||
def __init__(self, trigger=None):
|
||||
self._evt = threading.Event()
|
||||
self._subscribers = []
|
||||
self._mutex = threading.Lock()
|
||||
if not trigger is None:
|
||||
trigger.subscribe(lambda: self.set())
|
||||
|
||||
def is_set(self):
|
||||
return self._evt.is_set()
|
||||
|
||||
def set(self):
|
||||
"""
|
||||
Sets the event and invokes all subscribers if the event was
|
||||
not already set
|
||||
"""
|
||||
self._mutex.acquire()
|
||||
try:
|
||||
if not self._evt.is_set():
|
||||
self._evt.set()
|
||||
for s in self._subscribers:
|
||||
s()
|
||||
finally:
|
||||
self._mutex.release()
|
||||
|
||||
def subscribe(self, handler):
|
||||
"""
|
||||
Invokes the specified handler exactly once as soon as the
|
||||
specified event is set. If the event is already set, the
|
||||
handler is invoked immediately.
|
||||
Returns a function that can be invoked to unsubscribe.
|
||||
"""
|
||||
if handler is None:
|
||||
raise TypeError
|
||||
self._mutex.acquire()
|
||||
try:
|
||||
self._subscribers.append(handler)
|
||||
if self._evt.is_set():
|
||||
handler()
|
||||
finally:
|
||||
self._mutex.release()
|
||||
return handler
|
||||
|
||||
def unsubscribe(self, handler):
|
||||
self._mutex.acquire()
|
||||
try:
|
||||
self._subscribers.pop(self._subscribers.index(handler))
|
||||
finally:
|
||||
self._mutex.release()
|
||||
|
||||
def wait(self, timeout=None):
|
||||
if not self._evt.wait(timeout=timeout):
|
||||
raise TimeoutError()
|
||||
|
||||
def trigger_after(self, timeout):
|
||||
"""
|
||||
Triggers the event after the specified timeout.
|
||||
This function returns immediately.
|
||||
"""
|
||||
def delayed_trigger():
|
||||
if not self.wait(timeout=timeout):
|
||||
self.set()
|
||||
threading.Thread(target=delayed_trigger)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
|
||||
def wait_any(timeout=None, *events):
|
||||
"""
|
||||
Blocks until any of the specified events are triggered.
|
||||
Returns the index of the event that was triggerd or raises
|
||||
a TimeoutError
|
||||
Param timeout: A timeout in seconds
|
||||
"""
|
||||
or_event = threading.Event()
|
||||
subscriptions = []
|
||||
for event in events:
|
||||
subscriptions.append((event, event.subscribe(lambda: or_event.set())))
|
||||
or_event.wait(timeout=timeout)
|
||||
for event, sub in subscriptions:
|
||||
event.unsubscribe(sub)
|
||||
for i in range(len(events)):
|
||||
if events[i].is_set():
|
||||
return i
|
||||
raise TimeoutError()
|
||||
|
||||
|
||||
## Log utils ##
|
||||
|
||||
class Logger():
|
||||
"""
|
||||
Logs messages to stdout
|
||||
"""
|
||||
|
||||
COLOR_DEFAULT = 0
|
||||
COLOR_GREEN = 1
|
||||
COLOR_CYAN = 2
|
||||
COLOR_YELLOW = 3
|
||||
COLOR_RED = 4
|
||||
|
||||
_VT100Colors = {
|
||||
COLOR_GREEN: '\x1b[92;1m',
|
||||
COLOR_CYAN: '\x1b[96;1m',
|
||||
COLOR_YELLOW: '\x1b[93;1m',
|
||||
COLOR_RED: '\x1b[91;1m',
|
||||
COLOR_DEFAULT: '\x1b[0m'
|
||||
}
|
||||
|
||||
_Win32Colors = {
|
||||
COLOR_GREEN: 0x0A,
|
||||
COLOR_CYAN: 0x0B,
|
||||
COLOR_YELLOW: 0x0E,
|
||||
COLOR_RED: 0x0C,
|
||||
COLOR_DEFAULT: 0x07
|
||||
}
|
||||
|
||||
def __init__(self, verbose=True):
|
||||
self._prefix = ''
|
||||
self._skip_bottom_line = False # If true, messages are printed one line above the cursor
|
||||
self._verbose = verbose
|
||||
self._print_lock = threading.Lock()
|
||||
if platform.system() == 'Windows':
|
||||
self._stdout_buf = win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE)
|
||||
|
||||
def indent(self, prefix=' '):
|
||||
indented_logger = Logger()
|
||||
indented_logger._prefix = self._prefix + prefix
|
||||
return indented_logger
|
||||
|
||||
def print_on_second_last_line(self, text, color):
|
||||
"""
|
||||
Prints a text on the second last line.
|
||||
This can be used to print a message above the command
|
||||
prompt. If the command prompt spans multiple lines
|
||||
there will be glitches.
|
||||
If the printed text spans multiple lines there will also
|
||||
be glitches (though this could be fixed).
|
||||
"""
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
# Windows <10 doesn't understand VT100 escape codes and the colorama
|
||||
# also doesn't support the specific escape codes we need so we use the
|
||||
# native Win32 API.
|
||||
info = self._stdout_buf.GetConsoleScreenBufferInfo()
|
||||
cursor_pos = info['CursorPosition']
|
||||
scroll_rect=win32console.PySMALL_RECTType(
|
||||
Left=0, Top=1,
|
||||
Right=info['Window'].Right,
|
||||
Bottom=cursor_pos.Y-1)
|
||||
scroll_dest = win32console.PyCOORDType(scroll_rect.Left, scroll_rect.Top-1)
|
||||
self._stdout_buf.ScrollConsoleScreenBuffer(
|
||||
scroll_rect, scroll_rect, scroll_dest, # clipping rect is same as scroll rect
|
||||
u' ', Logger._Win32Colors[color]) # fill with empty cells with the desired color attributes
|
||||
line_start = win32console.PyCOORDType(0, cursor_pos.Y-1)
|
||||
self._stdout_buf.WriteConsoleOutputCharacter(text, line_start)
|
||||
|
||||
else:
|
||||
# Assume we're in a terminal that interprets VT100 escape codes.
|
||||
# TODO: test on macOS
|
||||
|
||||
# Escape character sequence:
|
||||
# ESC 7: store cursor position
|
||||
# ESC 1A: move cursor up by one
|
||||
# ESC 1S: scroll entire viewport by one
|
||||
# ESC 1L: insert 1 line at cursor position
|
||||
# (print text)
|
||||
# ESC 8: restore old cursor position
|
||||
|
||||
self._print_lock.acquire()
|
||||
sys.stdout.write('\x1b7\x1b[1A\x1b[1S\x1b[1L')
|
||||
sys.stdout.write(Logger._VT100Colors[color] + text + Logger._VT100Colors[Logger.COLOR_DEFAULT])
|
||||
sys.stdout.write('\x1b8')
|
||||
sys.stdout.flush()
|
||||
self._print_lock.release()
|
||||
|
||||
def print_colored(self, text, color):
|
||||
if self._skip_bottom_line:
|
||||
self.print_on_second_last_line(text, color)
|
||||
else:
|
||||
# On Windows, colorama does the job of interpreting the VT100 escape sequences
|
||||
self._print_lock.acquire()
|
||||
sys.stdout.write(Logger._VT100Colors[color] + text + Logger._VT100Colors[Logger.COLOR_DEFAULT] + '\n')
|
||||
sys.stdout.flush()
|
||||
self._print_lock.release()
|
||||
|
||||
def debug(self, text):
|
||||
if self._verbose:
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_DEFAULT)
|
||||
def success(self, text):
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_GREEN)
|
||||
def info(self, text):
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_DEFAULT)
|
||||
def notify(self, text):
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_CYAN)
|
||||
def warn(self, text):
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_YELLOW)
|
||||
def error(self, text):
|
||||
# TODO: write to stderr
|
||||
self.print_colored(self._prefix + text, Logger.COLOR_RED)
|
21
3.Software/CLI-Tool/ref_tool/__init__.py
Normal file
21
3.Software/CLI-Tool/ref_tool/__init__.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(os.path.realpath(__file__)))),
|
||||
"Firmware", "fibre", "python"))
|
||||
|
||||
# Syntactic sugar to make usage more intuative.
|
||||
# Try/pass used to break install-time dep issues
|
||||
try:
|
||||
import fibre
|
||||
|
||||
find_any = fibre.find_any
|
||||
find_all = fibre.find_all
|
||||
except:
|
||||
pass
|
||||
|
||||
# Standard convention is to add a __version__ attribute to the package
|
||||
from .version import get_version_str
|
||||
|
||||
del get_version_str
|
69
3.Software/CLI-Tool/ref_tool/code_generator.py
Normal file
69
3.Software/CLI-Tool/ref_tool/code_generator.py
Normal file
@ -0,0 +1,69 @@
|
||||
|
||||
import jinja2
|
||||
import os
|
||||
import json
|
||||
|
||||
def get_flat_endpoint_list(json, prefix, id_offset):
|
||||
flat_list = []
|
||||
for item in json:
|
||||
item = item.copy()
|
||||
if 'id' in item:
|
||||
item['id'] -= id_offset
|
||||
if 'type' in item:
|
||||
if item['type'] in {'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64'}:
|
||||
item['type'] += '_t'
|
||||
is_property = True
|
||||
elif item['type'] in {'bool', 'float'}:
|
||||
is_property = True
|
||||
elif item['type'] in {'function'}:
|
||||
if len(item.get('arguments', [])) == 0 and len(item.get('inputs', [])) == 0 and len(item.get('outputs', [])) == 0:
|
||||
item['type'] = 'void'
|
||||
is_property = True
|
||||
else:
|
||||
is_property = False
|
||||
else:
|
||||
is_property = False
|
||||
if is_property:
|
||||
item['name'] = prefix + item['name']
|
||||
flat_list.append(item)
|
||||
if 'members' in item:
|
||||
flat_list = flat_list + get_flat_endpoint_list(item['members'], prefix + item['name'] + '.', id_offset)
|
||||
return flat_list
|
||||
|
||||
def generate_code(odrv, template_file, output_file):
|
||||
json_data = odrv._json_data
|
||||
json_crc = odrv._json_crc
|
||||
|
||||
axis0_json = [item for item in json_data if item['name'].startswith("axis0")][0]
|
||||
axis1_json = [item for item in json_data if item['name'].startswith("axis1")][0]
|
||||
json_data = [item for item in json_data if not item['name'].startswith("axis")]
|
||||
endpoints = get_flat_endpoint_list(json_data, '', 0)
|
||||
per_axis_offset = axis1_json['members'][0]['id'] - axis0_json['members'][0]['id']
|
||||
axis_endpoints = get_flat_endpoint_list(axis0_json['members'], 'axis.', 0)
|
||||
axis_endpoints_copy = get_flat_endpoint_list(axis1_json['members'], 'axis.', per_axis_offset)
|
||||
if axis_endpoints != axis_endpoints_copy:
|
||||
raise Exception("axis0 and axis1 don't look exactly equal")
|
||||
|
||||
env = jinja2.Environment(
|
||||
#loader = jinja2.FileSystemLoader("/Data/Projects/")
|
||||
#trim_blocks=True,
|
||||
#lstrip_blocks=True
|
||||
)
|
||||
|
||||
# Expose helper functions to jinja template code
|
||||
#env.filters["delimit"] = camel_case_to_words
|
||||
|
||||
#import ipdb; ipdb.set_trace()
|
||||
|
||||
# Load and render template
|
||||
template = env.from_string(template_file.read())
|
||||
output = template.render(
|
||||
json_crc=json_crc,
|
||||
endpoints=endpoints,
|
||||
per_axis_offset=per_axis_offset,
|
||||
axis_endpoints=axis_endpoints,
|
||||
output_name=os.path.basename(output_file.name)
|
||||
)
|
||||
|
||||
# Output
|
||||
output_file.write(output)
|
82
3.Software/CLI-Tool/ref_tool/configuration.py
Normal file
82
3.Software/CLI-Tool/ref_tool/configuration.py
Normal file
@ -0,0 +1,82 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import fibre.remote_object
|
||||
from ref_tool.utils import OperationAbortedException, yes_no_prompt
|
||||
|
||||
def get_dict(obj, is_config_object):
|
||||
result = {}
|
||||
for (k,v) in obj._remote_attributes.items():
|
||||
if isinstance(v, fibre.remote_object.RemoteProperty) and is_config_object:
|
||||
result[k] = v.get_value()
|
||||
elif isinstance(v, fibre.remote_object.RemoteObject):
|
||||
sub_dict = get_dict(v, k == 'config')
|
||||
if sub_dict != {}:
|
||||
result[k] = sub_dict
|
||||
return result
|
||||
|
||||
def set_dict(obj, path, config_dict):
|
||||
errors = []
|
||||
for (k,v) in config_dict.items():
|
||||
name = path + ("." if path != "" else "") + k
|
||||
if not k in obj._remote_attributes:
|
||||
errors.append("Could not restore {}: property not found on device".format(name))
|
||||
continue
|
||||
remote_attribute = obj._remote_attributes[k]
|
||||
if isinstance(remote_attribute, fibre.remote_object.RemoteObject):
|
||||
errors += set_dict(remote_attribute, name, v)
|
||||
else:
|
||||
try:
|
||||
remote_attribute.set_value(v)
|
||||
except Exception as ex:
|
||||
errors.append("Could not restore {}: {}".format(name, str(ex)))
|
||||
return errors
|
||||
|
||||
def get_temp_config_filename(device):
|
||||
serial_number = fibre.utils.get_serial_number_str(device)
|
||||
safe_serial_number = ''.join(filter(str.isalnum, serial_number))
|
||||
return os.path.join(tempfile.gettempdir(), 'ref_tool-config-{}.json'.format(safe_serial_number))
|
||||
|
||||
def backup_config(device, filename, logger):
|
||||
"""
|
||||
Exports the configuration of an ODrive to a JSON file.
|
||||
If no file name is provided, the file is placed into a
|
||||
temporary directory.
|
||||
"""
|
||||
|
||||
if filename is None:
|
||||
filename = get_temp_config_filename(device)
|
||||
|
||||
logger.info("Saving configuration to {}...".format(filename))
|
||||
|
||||
if os.path.exists(filename):
|
||||
if not yes_no_prompt("The file {} already exists. Do you want to override it?".format(filename), True):
|
||||
raise OperationAbortedException()
|
||||
|
||||
data = get_dict(device, False)
|
||||
with open(filename, 'w') as file:
|
||||
json.dump(data, file)
|
||||
logger.info("Configuration saved.")
|
||||
|
||||
def restore_config(device, filename, logger):
|
||||
"""
|
||||
Restores the configuration stored in a file
|
||||
"""
|
||||
|
||||
if filename is None:
|
||||
filename = get_temp_config_filename(device)
|
||||
|
||||
with open(filename) as file:
|
||||
data = json.load(file)
|
||||
|
||||
logger.info("Restoring configuration from {}...".format(filename))
|
||||
errors = set_dict(device, "", data)
|
||||
|
||||
for error in errors:
|
||||
logger.info(error)
|
||||
if errors:
|
||||
logger.warn("Some of the configuration could not be restored.")
|
||||
|
||||
device.save_configuration()
|
||||
logger.info("Configuration restored.")
|
80
3.Software/CLI-Tool/ref_tool/shell.py
Normal file
80
3.Software/CLI-Tool/ref_tool/shell.py
Normal file
@ -0,0 +1,80 @@
|
||||
import fibre
|
||||
import ref_tool
|
||||
from ref_tool.utils import start_liveplotter
|
||||
|
||||
def print_banner():
|
||||
print('Please connect your Dummy-Robot.')
|
||||
print('You can also type help() or quit().')
|
||||
|
||||
def print_help(args, have_devices):
|
||||
print('')
|
||||
if have_devices:
|
||||
print('Connect your REF-Unit to {} and power it up.'.format(args.path))
|
||||
print('After that, the following message should appear:')
|
||||
print(' "Connected to REF-Unit [serial number] as odrv0"')
|
||||
print('')
|
||||
print('Once the REF-Unit is connected, type "ref0." and press <tab>')
|
||||
else:
|
||||
print('Type "ref0." and press <tab>')
|
||||
print('This will present you with all the properties that you can reference')
|
||||
print('')
|
||||
print('For example: "odrv0.motor0.encoder.pos_estimate"')
|
||||
print('will print the current encoder position on motor 0')
|
||||
print('and "odrv0.motor0.pos_setpoint = 10000"')
|
||||
print('will send motor0 to 10000')
|
||||
print('')
|
||||
|
||||
|
||||
interactive_variables = {}
|
||||
|
||||
discovered_devices = []
|
||||
|
||||
def did_discover_device(odrive, logger, app_shutdown_token):
|
||||
"""
|
||||
Handles the discovery of new devices by displaying a
|
||||
message and making the device available to the interactive
|
||||
console
|
||||
"""
|
||||
serial_number = odrive.serial_number if hasattr(odrive, 'serial_number') else "[unknown serial number]"
|
||||
if serial_number in discovered_devices:
|
||||
verb = "Reconnected"
|
||||
index = discovered_devices.index(serial_number)
|
||||
else:
|
||||
verb = "Connected"
|
||||
discovered_devices.append(serial_number)
|
||||
index = len(discovered_devices) - 1
|
||||
interactive_name = "odrv" + str(index)
|
||||
|
||||
# Publish new ODrive to interactive console
|
||||
interactive_variables[interactive_name] = odrive
|
||||
globals()[interactive_name] = odrive # Add to globals so tab complete works
|
||||
logger.notify("{} to ODrive {:012X} as {}".format(verb, serial_number, interactive_name))
|
||||
|
||||
# Subscribe to disappearance of the device
|
||||
odrive.__channel__._channel_broken.subscribe(lambda: did_lose_device(interactive_name, logger, app_shutdown_token))
|
||||
|
||||
def did_lose_device(interactive_name, logger, app_shutdown_token):
|
||||
"""
|
||||
Handles the disappearance of a device by displaying
|
||||
a message.
|
||||
"""
|
||||
if not app_shutdown_token.is_set():
|
||||
logger.warn("Oh no {} disappeared".format(interactive_name))
|
||||
|
||||
def launch_shell(args, logger, app_shutdown_token):
|
||||
"""
|
||||
Launches an interactive python or IPython command line
|
||||
interface.
|
||||
As ODrives are connected they are made available as
|
||||
"odrv0", "odrv1", ...
|
||||
"""
|
||||
|
||||
interactive_variables = {
|
||||
'start_liveplotter': start_liveplotter,
|
||||
}
|
||||
|
||||
fibre.launch_shell(args,
|
||||
interactive_variables,
|
||||
print_banner, print_help,
|
||||
logger, app_shutdown_token,
|
||||
branding_short="dummy", branding_long="Dummy-Robot")
|
181
3.Software/CLI-Tool/ref_tool/utils.py
Normal file
181
3.Software/CLI-Tool/ref_tool/utils.py
Normal file
@ -0,0 +1,181 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from fibre.utils import Event
|
||||
|
||||
try:
|
||||
if platform.system() == 'Windows':
|
||||
import win32console
|
||||
import colorama
|
||||
colorama.init()
|
||||
except ImportError:
|
||||
print("Could not init terminal features.")
|
||||
print("Refer to install instructions at http://docs.odriverobotics.com/#downloading-and-installing-tools")
|
||||
sys.stdout.flush()
|
||||
pass
|
||||
|
||||
_VT100Colors = {
|
||||
'green': '\x1b[92;1m',
|
||||
'cyan': '\x1b[96;1m',
|
||||
'yellow': '\x1b[93;1m',
|
||||
'red': '\x1b[91;1m',
|
||||
'default': '\x1b[0m'
|
||||
}
|
||||
|
||||
class OperationAbortedException(Exception):
|
||||
pass
|
||||
|
||||
data_rate = 10
|
||||
plot_rate = 10
|
||||
num_samples = 1000
|
||||
def start_liveplotter(get_var_callback):
|
||||
"""
|
||||
Starts a liveplotter.
|
||||
The variable that is plotted is retrieved from get_var_callback.
|
||||
This function returns immediately and the liveplotter quits when
|
||||
the user closes it.
|
||||
"""
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
cancellation_token = Event()
|
||||
|
||||
global vals
|
||||
vals = []
|
||||
def fetch_data():
|
||||
global vals
|
||||
while not cancellation_token.is_set():
|
||||
try:
|
||||
data = get_var_callback()
|
||||
except Exception as ex:
|
||||
print(str(ex))
|
||||
time.sleep(1)
|
||||
continue
|
||||
vals.append(data)
|
||||
if len(vals) > num_samples:
|
||||
vals = vals[-num_samples:]
|
||||
time.sleep(1/data_rate)
|
||||
|
||||
# TODO: use animation for better UI performance, see:
|
||||
# https://matplotlib.org/examples/animation/simple_anim.html
|
||||
def plot_data():
|
||||
global vals
|
||||
|
||||
plt.ion()
|
||||
|
||||
# Make sure the script terminates when the user closes the plotter
|
||||
def did_close(evt):
|
||||
cancellation_token.set()
|
||||
fig = plt.figure()
|
||||
fig.canvas.mpl_connect('close_event', did_close)
|
||||
|
||||
while not cancellation_token.is_set():
|
||||
plt.clf()
|
||||
plt.plot(vals)
|
||||
plt.legend(list(range(len(vals))))
|
||||
fig.canvas.draw()
|
||||
fig.canvas.start_event_loop(1/plot_rate)
|
||||
|
||||
fetch_t = threading.Thread(target=fetch_data)
|
||||
fetch_t.daemon = True
|
||||
fetch_t.start()
|
||||
|
||||
plot_t = threading.Thread(target=plot_data)
|
||||
plot_t.daemon = True
|
||||
plot_t.start()
|
||||
|
||||
|
||||
return cancellation_token;
|
||||
#plot_data()
|
||||
|
||||
def print_drv_regs(name, motor):
|
||||
"""
|
||||
Dumps the current gate driver regisers for the specified motor
|
||||
"""
|
||||
fault = motor.gate_driver.drv_fault
|
||||
status_reg_1 = motor.gate_driver.status_reg_1
|
||||
status_reg_2 = motor.gate_driver.status_reg_2
|
||||
ctrl_reg_1 = motor.gate_driver.ctrl_reg_1
|
||||
ctrl_reg_2 = motor.gate_driver.ctrl_reg_2
|
||||
print(name + ": " + str(fault))
|
||||
print("DRV Fault Code: " + str(fault))
|
||||
print("Status Reg 1: " + str(status_reg_1) + " (" + format(status_reg_1, '#010b') + ")")
|
||||
print("Status Reg 2: " + str(status_reg_2) + " (" + format(status_reg_2, '#010b') + ")")
|
||||
print("Control Reg 1: " + str(ctrl_reg_1) + " (" + format(ctrl_reg_1, '#013b') + ")")
|
||||
print("Control Reg 2: " + str(ctrl_reg_2) + " (" + format(ctrl_reg_2, '#09b') + ")")
|
||||
|
||||
def show_oscilloscope(odrv):
|
||||
size = 18000
|
||||
values = []
|
||||
for i in range(size):
|
||||
values.append(odrv.get_oscilloscope_val(i))
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
plt.plot(values)
|
||||
plt.show()
|
||||
|
||||
def rate_test(device):
|
||||
"""
|
||||
Tests how many integers per second can be transmitted
|
||||
"""
|
||||
|
||||
# import matplotlib.pyplot as plt
|
||||
# plt.ion()
|
||||
|
||||
print("reading 10000 values...")
|
||||
numFrames = 10000
|
||||
vals = []
|
||||
for _ in range(numFrames):
|
||||
vals.append(device.axis0.loop_counter)
|
||||
|
||||
loopsPerFrame = (vals[-1] - vals[0])/numFrames
|
||||
loopsPerSec = (168000000/(2*10192))
|
||||
FramePerSec = loopsPerSec/loopsPerFrame
|
||||
print("Frames per second: " + str(FramePerSec))
|
||||
|
||||
# plt.plot(vals)
|
||||
# plt.show(block=True)
|
||||
|
||||
def usb_burn_in_test(get_var_callback, cancellation_token):
|
||||
"""
|
||||
Starts background threads that read a values form the USB device in a spin-loop
|
||||
"""
|
||||
|
||||
def fetch_data():
|
||||
global vals
|
||||
i = 0
|
||||
while not cancellation_token.is_set():
|
||||
try:
|
||||
get_var_callback()
|
||||
i += 1
|
||||
except Exception as ex:
|
||||
print(str(ex))
|
||||
time.sleep(1)
|
||||
i = 0
|
||||
continue
|
||||
if i % 1000 == 0:
|
||||
print("read {} values".format(i))
|
||||
threading.Thread(target=fetch_data, daemon=True).start()
|
||||
|
||||
def yes_no_prompt(question, default=None):
|
||||
if default is None:
|
||||
question += " [y/n] "
|
||||
elif default == True:
|
||||
question += " [Y/n] "
|
||||
elif default == False:
|
||||
question += " [y/N] "
|
||||
|
||||
while True:
|
||||
print(question, end='')
|
||||
|
||||
choice = input().lower()
|
||||
if choice in {'yes', 'y'}:
|
||||
return True
|
||||
elif choice in {'no', 'n'}:
|
||||
return False
|
||||
elif choice == '' and default is not None:
|
||||
return default
|
94
3.Software/CLI-Tool/ref_tool/version.py
Normal file
94
3.Software/CLI-Tool/ref_tool/version.py
Normal file
@ -0,0 +1,94 @@
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def version_str_to_tuple(version_string):
|
||||
"""
|
||||
Converts a version string to a tuple of the form
|
||||
(major, minor, revision, prerelease)
|
||||
|
||||
Example: "fw-v0.3.6-23" => (0, 3, 6, True)
|
||||
"""
|
||||
regex=r'.*v([0-9a-zA-Z]+).([0-9a-zA-Z]+).([0-9a-zA-Z]+)(.*)'
|
||||
return (int(re.sub(regex, r"\1", version_string)),
|
||||
int(re.sub(regex, r"\2", version_string)),
|
||||
int(re.sub(regex, r"\3", version_string)),
|
||||
(re.sub(regex, r"\4", version_string) != ""))
|
||||
|
||||
|
||||
def get_version_from_git():
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
try:
|
||||
# Determine the current git commit version
|
||||
git_tag = subprocess.check_output(["git", "describe", "--always", "--tags", "--dirty=*"],
|
||||
cwd=script_dir)
|
||||
git_tag = git_tag.decode(sys.stdout.encoding).rstrip('\n')
|
||||
|
||||
(major, minor, revision, is_prerelease) = version_str_to_tuple(git_tag)
|
||||
|
||||
# if is_prerelease:
|
||||
# revision += 1
|
||||
return git_tag, major, minor, revision, is_prerelease
|
||||
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
return "[unknown version]", 0, 0, 0, 1
|
||||
|
||||
def get_version_str(git_only=False, is_post_release=False, bump_rev=False, release_override=False):
|
||||
"""
|
||||
Returns the versions of the tools
|
||||
If git_only is true, the version.txt file is ignored even
|
||||
if it is present.
|
||||
"""
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Try to read the version.txt file that is generated during
|
||||
# the packaging step
|
||||
version_file_path = os.path.join(script_dir, 'version.txt')
|
||||
if os.path.exists(version_file_path) and git_only == False:
|
||||
with open(version_file_path) as version_file:
|
||||
return version_file.readline().rstrip('\n')
|
||||
|
||||
_, major, minor, revision, unreleased = get_version_from_git()
|
||||
if bump_rev:
|
||||
revision += 1
|
||||
version = '{}.{}.{}'.format(major, minor, revision)
|
||||
if is_post_release:
|
||||
version += ".post"
|
||||
elif not release_override and unreleased:
|
||||
version += ".dev"
|
||||
return version
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='Version Dump\n')
|
||||
parser.add_argument("--output", type=argparse.FileType('w'), default='-',
|
||||
help="C header output file")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
git_name, major, minor, revision, unreleased = get_version_from_git()
|
||||
print('Firmware version {}.{}.{}{} ({})'.format(
|
||||
major, minor, revision, '-dev' if unreleased else '',
|
||||
git_name))
|
||||
args.output.write('#define FW_VERSION "{}"\n'.format(git_name))
|
||||
args.output.write('#define FW_VERSION_MAJOR {}\n'.format(major))
|
||||
args.output.write('#define FW_VERSION_MINOR {}\n'.format(minor))
|
||||
args.output.write('#define FW_VERSION_REVISION {}\n'.format(revision))
|
||||
args.output.write('#define FW_VERSION_UNRELEASED {}\n'.format(1 if unreleased else 0))
|
||||
|
||||
def setup_udev_rules(logger):
|
||||
if platform.system() != 'Linux':
|
||||
logger.error("This command only makes sense on Linux")
|
||||
return
|
||||
if os.getuid() != 0:
|
||||
logger.warn("you should run this as root, otherwise it will probably not work")
|
||||
with open('/etc/udev/rules.d/91-ref_tool.rules', 'w') as file:
|
||||
file.write('SUBSYSTEM=="usb", ATTR{idVendor}=="1209", ATTR{idProduct}=="0d3[0-9]", MODE="0666", ENV{ID_MM_DEVICE_IGNORE}="1"\n')
|
||||
subprocess.check_call(["udevadm", "control", "--reload-rules"])
|
||||
subprocess.check_call(["udevadm", "trigger"])
|
||||
logger.info('udev rules configured successfully')
|
9
3.Software/CLI-Tool/run.bat
Normal file
9
3.Software/CLI-Tool/run.bat
Normal file
@ -0,0 +1,9 @@
|
||||
@echo off
|
||||
if not defined TAG (
|
||||
set TAG=1
|
||||
start wt -p "cmd" %0
|
||||
exit
|
||||
)
|
||||
|
||||
|
||||
ipython "%~dp0\run_shell.py" -- %*
|
163
3.Software/CLI-Tool/run_shell.py
Normal file
163
3.Software/CLI-Tool/run_shell.py
Normal file
@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(
|
||||
os.path.realpath(__file__))),
|
||||
"Firmware", "fibre", "python"))
|
||||
from fibre import Logger, Event
|
||||
import ref_tool
|
||||
from ref_tool.configuration import *
|
||||
|
||||
old_print = print
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
kwargs.pop('flush', False)
|
||||
old_print(*args, **kwargs)
|
||||
file = kwargs.get('file', sys.stdout)
|
||||
file.flush() if file is not None else sys.stdout.flush()
|
||||
|
||||
|
||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
## Parse arguments ##
|
||||
parser = argparse.ArgumentParser(description='Robot-Embedded-Framework command line utility\n',
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
subparsers = parser.add_subparsers(help='sub-command help', dest='command')
|
||||
shell_parser = subparsers.add_parser('shell',
|
||||
help='Drop into an interactive python shell that lets you interact with the ODrive(s)')
|
||||
shell_parser.add_argument("--no-ipython", action="store_true",
|
||||
help="Use the regular Python shell "
|
||||
"instead of the IPython shell, "
|
||||
"even if IPython is installed.")
|
||||
subparsers.add_parser('liveplotter', help="For plotting of REF parameters (i.e. position) in real time")
|
||||
|
||||
# General arguments
|
||||
parser.add_argument("-p", "--path", metavar="PATH", action="store",
|
||||
help="The path(s) where REF-board(s) should be discovered.\n"
|
||||
"By default the script will connect to any REF-board on USB.\n\n"
|
||||
"To select a specific USB device:\n"
|
||||
" --path usb:BUS:DEVICE\n"
|
||||
"usbwhere BUS and DEVICE are the bus and device numbers as shown in `lsusb`.\n\n"
|
||||
"To select a specific serial port:\n"
|
||||
" --path serial:PATH\n"
|
||||
"where PATH is the path of the serial port. For example \"/dev/ttyUSB0\".\n"
|
||||
"You can use `ls /dev/tty*` to find the correct port.\n\n"
|
||||
"You can combine USB and serial specs by separating them with a comma (no space!)\n"
|
||||
"Example:\n"
|
||||
" --path usb,serial:/dev/ttyUSB0\n"
|
||||
"means \"discover any USB device or a serial device on /dev/ttyUSB0\"")
|
||||
parser.add_argument("-s", "--serial-number", action="store",
|
||||
help="The 12-digit serial number of the device. "
|
||||
"This is a string consisting of 12 upper case hexadecimal "
|
||||
"digits as displayed in lsusb. \n"
|
||||
" example: 385F324D3037\n"
|
||||
"You can list all devices connected to USB by running\n"
|
||||
"(lsusb -d 1209:0d32 -v; lsusb -d 0483:df11 -v) | grep iSerial\n"
|
||||
"If omitted, any device is accepted.")
|
||||
parser.add_argument("-v", "--verbose", action="store_true",
|
||||
help="print debug information")
|
||||
parser.add_argument("--version", action="store_true",
|
||||
help="print version information and exit")
|
||||
|
||||
parser.set_defaults(path="usb")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Default command
|
||||
if args.command is None:
|
||||
args.command = 'shell'
|
||||
args.no_ipython = False
|
||||
logger = Logger(verbose=args.verbose)
|
||||
|
||||
app_shutdown_token = Event()
|
||||
|
||||
try:
|
||||
if args.command == 'shell':
|
||||
# if ".dev" in ref_tool.__version__:
|
||||
# print("")
|
||||
# logger.warn("Developer Preview")
|
||||
# print("")
|
||||
import ref_tool.shell
|
||||
|
||||
ref_tool.shell.launch_shell(args, logger, app_shutdown_token)
|
||||
|
||||
elif args.command == 'liveplotter':
|
||||
from ref_tool.utils import start_liveplotter
|
||||
|
||||
print("Waiting for ODrive...")
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
search_cancellation_token=app_shutdown_token,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
|
||||
# If you want to plot different values, change them here.
|
||||
# You can plot any number of values concurrently.
|
||||
cancellation_token = start_liveplotter(lambda: [
|
||||
ref_unit.axis0.encoder.pos_estimate,
|
||||
ref_unit.axis1.encoder.pos_estimate,
|
||||
])
|
||||
|
||||
print("Showing plot. Press Ctrl+C to exit.")
|
||||
while not cancellation_token.is_set():
|
||||
time.sleep(1)
|
||||
|
||||
elif args.command == 'drv-status':
|
||||
from ref_tool.utils import print_drv_regs
|
||||
|
||||
print("Waiting for ODrive...")
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
search_cancellation_token=app_shutdown_token,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
print_drv_regs("Motor 0", ref_unit.axis0.motor)
|
||||
print_drv_regs("Motor 1", ref_unit.axis1.motor)
|
||||
|
||||
elif args.command == 'rate-test':
|
||||
from ref_tool.utils import rate_test
|
||||
|
||||
print("Waiting for ODrive...")
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
search_cancellation_token=app_shutdown_token,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
rate_test(ref_unit)
|
||||
|
||||
elif args.command == 'udev-setup':
|
||||
from ref_tool.version import setup_udev_rules
|
||||
|
||||
setup_udev_rules(logger)
|
||||
|
||||
elif args.command == 'generate-code':
|
||||
from ref_tool.code_generator import generate_code
|
||||
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
generate_code(ref_unit, args.template, args.output)
|
||||
|
||||
elif args.command == 'backup-config':
|
||||
from ref_tool.configuration import backup_config
|
||||
|
||||
print("Waiting for ODrive...")
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
search_cancellation_token=app_shutdown_token,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
backup_config(ref_unit, args.file, logger)
|
||||
|
||||
elif args.command == 'restore-config':
|
||||
from ref_tool.configuration import restore_config
|
||||
|
||||
print("Waiting for ODrive...")
|
||||
ref_unit = ref_tool.find_any(path=args.path, serial_number=args.serial_number,
|
||||
search_cancellation_token=app_shutdown_token,
|
||||
channel_termination_token=app_shutdown_token)
|
||||
restore_config(ref_unit, args.file, logger)
|
||||
|
||||
else:
|
||||
raise Exception("unknown command: " + args.command)
|
||||
|
||||
except OperationAbortedException:
|
||||
logger.info("Operation aborted.")
|
||||
finally:
|
||||
app_shutdown_token.set()
|
Loading…
Reference in New Issue
Block a user