From 7f2d374c4f6c14e1915216629b8307e80282caa0 Mon Sep 17 00:00:00 2001 From: Fabien-B Date: Thu, 6 Jun 2024 11:31:06 +0200 Subject: [PATCH] Update natnet SDK. (#3249) --- .../python/natnet3.x/DataDescriptions.py | 781 +++++++ .../python/natnet3.x/MoCapData.py | 998 +++++++++ .../python/natnet3.x/NatNetClient.py | 1927 ++++++++++++++--- .../python/natnet3.x/display_objects.py | 136 +- .../python/natnet3.x/natnet2ivy.py | 51 +- 5 files changed, 3494 insertions(+), 399 deletions(-) create mode 100644 sw/ground_segment/python/natnet3.x/DataDescriptions.py create mode 100644 sw/ground_segment/python/natnet3.x/MoCapData.py diff --git a/sw/ground_segment/python/natnet3.x/DataDescriptions.py b/sw/ground_segment/python/natnet3.x/DataDescriptions.py new file mode 100644 index 0000000000..e952fbdadb --- /dev/null +++ b/sw/ground_segment/python/natnet3.x/DataDescriptions.py @@ -0,0 +1,781 @@ +#Copyright © 2021 Naturalpoint +# +#Licensed under the Apache License, Version 2.0 (the "License") +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +#http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. + + +# OptiTrack NatNet direct depacketization sample for Python 3.x +# + + +# Uses the Python NatNetClient.py library to establish a connection (by creating a NatNetClient), +# and receive data via a NatNet connection and decode it using the NatNetClient library. + + + +import copy +import hashlib +import random + +K_SKIP = [0,0,1] +K_FAIL = [0,1,0] +K_PASS = [1,0,0] + + +def get_tab_str(tab_str, level): + out_tab_str="" + loop_range = range(0,level) + for _ in loop_range: + out_tab_str+=tab_str + return out_tab_str + +def add_lists(totals, totals_tmp): + totals[0]+=totals_tmp[0] + totals[1]+=totals_tmp[1] + totals[2]+=totals_tmp[2] + return totals + + + +def test_hash(test_name, test_hash_str, test_object): + out_str = test_object.get_as_string() + out_hash_str=hashlib.sha1(out_str.encode()).hexdigest() + ret_value=True + if test_hash_str == out_hash_str : + print("[PASS]:%s"%test_name) + else: + print("[FAIL]:%s test_hash_str != out_hash_str"%test_name) + print("test_hash_str=%s"%test_hash_str) + print("out_hash_str=%s"%out_hash_str) + print("out_str =\n%s"%out_str) + ret_value=False + return ret_value + +def test_hash2(test_name, test_hash_str, test_object, run_test=True): + ret_value = K_FAIL + out_str = "FAIL" + out_str2="" + indent_string=" " + if not run_test: + ret_value = K_SKIP + out_str = "SKIP" + elif test_object == None: + out_str = "FAIL" + ret_value = K_FAIL + out_str2 = "%sERROR: test_object was None"%indent_string + else: + obj_out_hash_str = "" + if str(type(test_object)) != 'NoneType': + obj_out_str = test_object.get_as_string() + obj_out_hash_str=hashlib.sha1(obj_out_str.encode()).hexdigest() + + if test_hash_str == obj_out_hash_str: + out_str = "PASS" + ret_value = K_PASS + else: + out_str2+="%s%s test_hash_str != out_hash_str\n"%(indent_string,test_name) + out_str2+="%stest_hash_str=%s\n"%(indent_string,test_hash_str) + out_str2+="%sobj_out_hash_str=%s\n"%(indent_string,obj_out_hash_str) + out_str2+="%sobj_out_str =\n%s"%(indent_string,obj_out_str) + ret_value = K_FAIL + print("[%s]:%s"%(out_str,test_name)) + + if len(out_str2): + print("%s"%out_str2) + return ret_value + + + +def get_as_string(input_str): + if type(input_str) == str: + return input_str + else: + return input_str.decode('utf-8') + + +def get_data_sub_packet_type(new_data): + out_string="" + data_type = type(new_data) + if data_type == MarkerSetDescription: + out_string="Type: 0 Markerset\n" + elif data_type == RigidBodyDescription: + out_string="Type: 1 Rigid Body\n" + elif data_type == SkeletonDescription: + out_string="Type: 2 Skeleton\n" + elif data_type == ForcePlateDescription: + out_string="Type: 3 Force Plate\n" + elif data_type == DeviceDescription: + out_string="Type: 4 Device\n" + elif data_type == CameraDescription: + out_string="Type: 5 Camera\n" + elif data_type == AssetDescription: + out_string="Type: 6 Asset\n" + elif data_type == None: + out_string="Type: None\n" + else: + out_string="Type: Unknown %s\n"%str(data_type) + return out_string + +# cMarkerSetDescription +class MarkerSetDescription: + def __init__(self): + self.marker_set_name="Not Set" + self.marker_names_list=[] + + def set_name(self,new_name): + self.marker_set_name=new_name + + def get_num_markers(self): + return len(self.marker_names_list) + + def add_marker_name(self,marker_name): + self.marker_names_list.append(copy.copy(marker_name)) + return self.get_num_markers() + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_tab_str3 = get_tab_str(tab_str, level+2) + out_string="" + out_string+="%sMarker Set Name: %s\n"%(out_tab_str,get_as_string(self.marker_set_name)) + num_markers = len(self.marker_names_list) + out_string+="%sMarker Count : %d\n"%(out_tab_str2, num_markers) + for i in range(num_markers): + out_string+="%s%3.1d Marker Name: %s\n"%(out_tab_str3,i, get_as_string(self.marker_names_list[i])) + return out_string + +class RBMarker: + def __init__(self, marker_name="", active_label=0, pos=[0.0,0.0,0.0]): + self.marker_name = marker_name + self.active_label = active_label + self.pos=pos + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_string="" + out_string += "%sMarker Label: %s Position: [%f %f %f] %s\n" % \ + (out_tab_str, self.active_label, self.pos[0],self.pos[1],self.pos[2],self.marker_name ) + return out_string + + +class RigidBodyDescription: + def __init__(self,sz_name="", new_id=0, parent_id=0,pos=[0.0,0.0,0.0]): + self.sz_name=sz_name + self.id_num = new_id + self.parent_id = parent_id + self.pos=pos + self.rb_marker_list=[] + + + def set_name(self,new_name): + self.sz_name=new_name + + def set_id(self, new_id): + self.id_num = new_id + + def set_parent_id(self, parent_id): + self.parent_id = parent_id + + def set_pos(self,p_x,p_y,p_z): + self.pos=[p_x,p_y,p_z] + + def get_num_markers(self): + return len(self.rb_marker_list) + + def add_rb_marker(self,new_rb_maker): + self.rb_marker_list.append(copy.deepcopy(new_rb_maker)) + return self.get_num_markers() + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_string="" + out_string += "%sRigid Body Name : %s\n"%(out_tab_str, get_as_string(self.sz_name)) + out_string += "%sID : %d\n"%(out_tab_str, self.id_num) + out_string += "%sParent ID : %d\n"%(out_tab_str, self.parent_id) + out_string += "%sPosition : [%3.2f, %3.2f, %3.2f]\n"%(out_tab_str, self.pos[0],self.pos[1],self.pos[2]) + num_markers= len(self.rb_marker_list) + out_string += "%sNumber of Markers : %d\n"%(out_tab_str, num_markers ) + # loop over markers + for i in range(num_markers): + out_string += "%s%i %s"%(out_tab_str2,i,self.rb_marker_list[i].get_as_string(tab_str,0)) + return out_string + + + +class SkeletonDescription: + def __init__(self, name="", new_id=0): + self.name = name + self.id_num = new_id + self.rigid_body_description_list=[] + + def set_name(self,new_name): + self.name=new_name + + def set_id(self, new_id): + self.id_num = new_id + + def add_rigid_body_description(self,rigid_body_description): + self.rigid_body_description_list.append(copy.deepcopy(rigid_body_description)) + return len(self.rigid_body_description_list) + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_string = "" + out_string += "%sName : %s\n"%(out_tab_str,get_as_string(self.name)) + out_string += "%sID : %d\n"% (out_tab_str, self.id_num) + num_bones = len(self.rigid_body_description_list) + out_string += "%sRigid Body (Bone) Count : %d\n" % (out_tab_str, num_bones) + for i in range(num_bones): + out_string += "%sRigid Body (Bone) %d\n"%(out_tab_str2, i) + out_string += self.rigid_body_description_list[i].get_as_string(tab_str,level+2) + return out_string + + +class ForcePlateDescription: + def __init__(self, new_id=0, serial_number=""): + self.id_num = new_id + self.serial_number = serial_number + self.width = 0 + self.length = 0 + self.position=[0.0,0.0,0.0] + self.cal_matrix= [[0.0 for col in range(12)] for row in range(12)] + self.corners = [[0.0 for col in range(3)] for row in range(4)] + self.plate_type = 0 + self.channel_data_type = 0 + self.channel_list=[] + + def set_id(self, new_id): + self.id_num = new_id + + def set_serial_number(self,serial_number): + self.serial_number=serial_number + + def set_dimensions(self, width, length): + self.width = width + self.length = length + + def set_origin(self, p_x, p_y, p_z): + self.position=[p_x,p_y,p_z] + + def set_cal_matrix(self, cal_matrix): + self.cal_matrix = cal_matrix + + def set_corners(self, corners): + self.corners = corners + + def set_plate_type(self, plate_type): + self.plate_type=plate_type + + def set_channel_data_type(self, channel_data_type): + self.channel_data_type = channel_data_type + + def add_channel_name(self,channel_name): + self.channel_list.append(copy.deepcopy(channel_name)) + return len(self.channel_list) + + def get_cal_matrix_as_string(self, tab_str="", level=0): + """Get force plate calibration matrix as string""" + out_tab_str=get_tab_str(tab_str,level) + out_tab_str2=get_tab_str(tab_str,level+1) + out_string="" + out_string+="%sCal Matrix:\n"%out_tab_str + for i in range(0,12): + out_string+="%s%2.1d %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e\n" % \ + (out_tab_str2,i, + self.cal_matrix[i][0], self.cal_matrix[i][1], + self.cal_matrix[i][2], self.cal_matrix[i][3], + self.cal_matrix[i][4], self.cal_matrix[i][5], + self.cal_matrix[i][6], self.cal_matrix[i][7], + self.cal_matrix[i][8], self.cal_matrix[i][9], + self.cal_matrix[i][10], self.cal_matrix[i][11]) + return out_string + + def get_corners_as_string(self, tab_str="", level=0): + """Get force plate corner positions as a string""" + # Corners 4x3 floats + out_tab_str=get_tab_str(tab_str,level) + out_tab_str2=get_tab_str(tab_str,level+1) + out_string="" + out_string+="%sCorners:\n"%out_tab_str + for i in range(0,4): + out_string+="%s%2.1d %3.3e %3.3e %3.3e\n" % \ + (out_tab_str2,i, + self.corners[i][0], self.corners[i][1], self.corners[i][2]) + return out_string + + + def get_as_string(self, tab_str=" ", level=0): + """Get force plate description as a class""" + out_tab_str = get_tab_str(tab_str, level) + out_string = "" + out_string += "%sID : %d\n"% (out_tab_str, self.id_num) + out_string += "%sSerial Number : %s\n"% (out_tab_str,\ + get_as_string(self.serial_number)) + out_string += "%sWidth : %3.2f\n"%(out_tab_str, self.width) + out_string += "%sLength : %3.2f\n"%(out_tab_str, self.length) + out_string += "%sOrigin : %3.2f, %3.2f, %3.2f\n"%(out_tab_str, + self.position[0], + self.position[1], + self.position[2]) + out_string += self.get_cal_matrix_as_string(tab_str, level) + out_string += self.get_corners_as_string(tab_str, level) + + out_string+="%sPlate Type : %d\n"%(out_tab_str, self.plate_type) + out_string+="%sChannel Data Type : %d\n"%(out_tab_str, self.channel_data_type) + num_channels = len(self.channel_list) + out_string+="%sNumber of Channels : %d\n"%(out_tab_str, num_channels) + # Channel Names list of NoC strings + out_tab_str2=get_tab_str(tab_str, level+1) + for channel_num in range(num_channels): + out_string += "%sChannel Name %d: %s\n"%(out_tab_str2, channel_num,\ + get_as_string(self.channel_list[channel_num]) ) + + return out_string + +class DeviceDescription: + """Device Description class""" + def __init__(self,new_id,name, serial_number,device_type,channel_data_type): + self.id_num=new_id + self.name=name + self.serial_number=serial_number + self.device_type=device_type + self.channel_data_type=channel_data_type + self.channel_list=[] + + def set_id(self, new_id): + """Set the device id""" + self.id_num=new_id + + def set_name(self, name): + """Set the Device name""" + self.name=name + + def add_channel_name(self, channel_name): + """Add channel name to channel_list""" + self.channel_list.append(channel_name) + return len(self.channel_list) + + def get_as_string(self, tab_str=" ", level=0): + """Get Device Description as string""" + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_string = "" + out_string +="%sID : %5.1d\n"%(out_tab_str, self.id_num) + out_string +="%sName : %s\n"%(out_tab_str,get_as_string(self.name)) + out_string +="%sSerial Number : %s\n"%(out_tab_str,get_as_string(self.serial_number)) + out_string +="%sDevice Type : %d\n"%(out_tab_str,self.device_type) + out_string +="%sChannel Data Type : %d\n"%(out_tab_str, self.channel_data_type) + num_channels = len(self.channel_list) + out_string +="%sNumber of Channels : %d\n"%(out_tab_str, num_channels) + for i in range(num_channels): + out_string+="%sChannel %2.1d Name : %s\n"%(out_tab_str2, i,\ + get_as_string(self.channel_list[i])) + return out_string + +class CameraDescription: + """Camera Description class""" + def __init__(self, name, position_vec3, orientation_quat): + self.name=name + self.position=position_vec3 + self.orientation=orientation_quat + + def get_as_string(self, tab_str="..", level=0): + """Get Camera Description as a string""" + out_tab_str = get_tab_str(tab_str, level) + out_string = "" + out_string += "%sName : %s\n"%(out_tab_str,get_as_string(self.name)) + out_string += "%sPosition : [%3.2f, %3.2f, %3.2f]\n"% \ + (out_tab_str,self.position[0], self.position[1], self.position[2] ) + out_string += "%sOrientation : [%3.2f, %3.2f, %3.2f, %3.2f]\n"% \ + (out_tab_str,\ + self.orientation[0], self.orientation[1],\ + self.orientation[2], self.orientation[3] ) + return out_string + +class MarkerDescription: + """Marker Description class""" + def __init__(self, name, marker_id, position, marker_size, marker_params): + self.name=name + self.marker_id=marker_id + self.position=position + self.marker_size=marker_size + self.marker_params=marker_params + + def get_as_string(self, tab_str="..", level=0): + """Get Marker Description as a string""" + out_tab_str = get_tab_str(tab_str, level) + out_string = "" + out_string += "%sName : %s\n"%(out_tab_str,get_as_string(self.name)) + out_string += "%sPosition : [%3.2f, %3.2f, %3.2f]\n"% \ + (out_tab_str,self.position[0], self.position[1], self.position[2] ) + out_string += "Size : %d\n"%(self.marker_size) + out_string += "Params : %d\n"%(self.marker_params) + + return out_string + +class AssetDescription: + """Asset Description class""" + def __init__(self, name, assetType, assetID, rigidbodyArray, markerArray): + self.name=name + self.assetType=assetType + self.assetID=assetID + self.rigidbodyArray=rigidbodyArray + self.markerArray=markerArray + + def get_as_string(self, tab_str="..", level=0): + """Get Asset Description as a string""" + out_tab_str = get_tab_str(tab_str, level) + out_string = "" + out_string += "Asset Description\n" + out_string += "%sName : %s\n"%(out_tab_str,get_as_string(self.name)) + out_string += "assetType : %d\n"%(self.assetType) + out_string += "assetID : %d\n"%(self.assetID) + out_string += "numRBs : %d\n"%(self.rigidbodyArray.size()) + out_string += "numMarkers : %d\n"%(self.markerArray.size()) + + return out_string + + + +# cDataDescriptions +# Full data descriptions +class DataDescriptions(): + """Data Descriptions class""" + order_num = 0 + def __init__(self): + self.data_order_dict={} + self.marker_set_list=[] + self.rigid_body_list=[] + self.skeleton_list=[] + self.force_plate_list=[] + self.device_list=[] + self.camera_list=[] + + def generate_order_name(self): + """Generate the name for the order list based on the current length of the list""" + # should be a one up counter instead of based on length of data_order_dict + order_name="data_%3.3d"%self.order_num + self.order_num += 1 + return order_name + + # Add Marker Set + def add_marker_set(self, new_marker_set): + """Add a marker set""" + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.marker_set_list) + self.data_order_dict[order_name]=("marker_set_list", pos) + self.marker_set_list.append(copy.deepcopy(new_marker_set)) + + # Add Rigid Body + def add_rigid_body(self, new_rigid_body): + """Add a rigid body""" + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.rigid_body_list) + self.data_order_dict[order_name]=("rigid_body_list", pos) + self.rigid_body_list.append(copy.deepcopy(new_rigid_body)) + + + # Add a skeleton + def add_skeleton(self, new_skeleton): + """Add a skeleton""" + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.skeleton_list) + self.data_order_dict[order_name]=("skeleton_list", pos) + self.skeleton_list.append(copy.deepcopy(new_skeleton)) + + + # Add a force plate + def add_force_plate(self, new_force_plate): + """Add a force plate""" + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.force_plate_list) + self.data_order_dict[order_name]=("force_plate_list", pos) + self.force_plate_list.append(copy.deepcopy(new_force_plate)) + + + def add_device(self, newdevice): + """ add_device - Add a device""" + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.device_list) + self.data_order_dict[order_name]=("device_list", pos) + self.device_list.append(copy.deepcopy(newdevice)) + + + def add_camera(self, newcamera): + """ Add a new camera """ + order_name = self.generate_order_name() + + # generate order entry + pos = len(self.camera_list) + self.data_order_dict[order_name]=("camera_list", pos) + self.camera_list.append(copy.deepcopy(newcamera)) + + def add_data(self, new_data): + """Add data based on data type""" + data_type = type(new_data) + if data_type == MarkerSetDescription: + self.add_marker_set(new_data) + elif data_type == RigidBodyDescription: + self.add_rigid_body(new_data) + elif data_type == SkeletonDescription: + self.add_skeleton(new_data) + elif data_type == ForcePlateDescription: + self.add_force_plate(new_data) + elif data_type == DeviceDescription: + self.add_device(new_data) + elif data_type == CameraDescription: + self.add_camera(new_data) + elif data_type is None: + data_type = None + else: + print("ERROR: Type %s unknown"%str(data_type)) + + def get_object_from_list(self, list_name, pos_num): + """Determine list name and position of the object""" + ret_value = None + if (list_name =="marker_set_list") and \ + (pos_num < len(self.marker_set_list)): + ret_value = self.marker_set_list[pos_num] + + elif (list_name =="rigid_body_list") and \ + (pos_num < len(self.rigid_body_list)): + ret_value = self.rigid_body_list[pos_num] + + elif (list_name =="skeleton_list") and \ + (pos_num < len(self.skeleton_list)): + ret_value = self.skeleton_list[pos_num] + + elif (list_name =="force_plate_list") and \ + (pos_num < len(self.force_plate_list)): + ret_value = self.force_plate_list[pos_num] + + elif (list_name =="device_list") and \ + (pos_num < len(self.device_list)): + ret_value = self.device_list[pos_num] + + elif (list_name =="camera_list") and \ + (pos_num < len(self.camera_list)): + ret_value = self.camera_list[pos_num] + + else: + ret_value = None + + return ret_value + + def get_as_string(self, tab_str=" ", level = 0): + """Ensure data comes back as a string""" + out_tab_str = get_tab_str(tab_str,level) + out_tab_str2 = get_tab_str(tab_str,level+1) + out_tab_str3 = get_tab_str(tab_str,level+2) + out_string="" + num_data_sets=len(self.data_order_dict) + out_string+="%sNumber of Data Sets: %d\n"%(out_tab_str, num_data_sets) + i=0 + for tmp_key,tmp_value in self.data_order_dict.items(): + #tmp_name,tmp_num=self.data_order_dict[data_set] + tmp_name=tmp_value[0] + tmp_num =tmp_value[1] + tmp_object = self.get_object_from_list(tmp_name, tmp_num) + out_string += "%sDataset %3.1d\n"%(out_tab_str2, i) + tmp_string = get_data_sub_packet_type(tmp_object) + if tmp_string != "": + out_string += "%s%s"%(out_tab_str2, tmp_string) + #out_string += "%s%s %s %d\n"%(out_tab_str2, data_set, tmp_name,tmp_num) + out_string += "%s%s %s %s\n"%(out_tab_str2,tmp_key, tmp_name,tmp_num) + if tmp_object is not None: + out_string += tmp_object.get_as_string(tab_str,level+2) + else: + out_string += "%s%s %s %s not found\n"%(out_tab_str3,tmp_key, tmp_name,tmp_num) + out_string += "\n" + i+=1 + + return out_string + +# cDataDescriptions END + +def generate_marker_set_description(set_num=0): + """generate_marker_set_description - Testing functions""" + marker_set_description = MarkerSetDescription() + marker_set_description.set_name("MarkerSetName%3.3d"%set_num) + marker_set_description.add_marker_name("MarkerName%3.3d_0"% set_num) + marker_set_description.add_marker_name("MarkerName%3.3d_1"% set_num) + marker_set_description.add_marker_name("MarkerName%3.3d_2"% set_num) + marker_set_description.add_marker_name("MarkerName%3.3d_3"% set_num) + return marker_set_description + +def generate_rb_marker(marker_num=0): + """generate_rb_marker - Generate rigid body marker based on marker number""" + marker_num_mod = marker_num % 4 + marker_name="RBMarker_%3.3d"%marker_num + marker_active_label = marker_num+10000 + marker_pos=[1.0,4.0,9.0] + if marker_num_mod == 1: + marker_pos=[1.0, 8.0, 27.0] + elif marker_num_mod == 2: + marker_pos=[3.1, 4.1, 5.9] + elif marker_num_mod == 3: + marker_pos=[1.0, 3.0, 6.0] + + return RBMarker(marker_name, marker_active_label, marker_pos) + +def generate_rigid_body_description(rbd_num=0): + """generate_rigid_body_description - Generate Rigid Body Description Data""" + rbd=RigidBodyDescription() + rbd.set_name("rigidBodyDescription_%3.3d"%rbd_num) + rbd.set_id(3141) + rbd.set_parent_id(314) + rbd.set_pos(1,4,9) + rbd.add_rb_marker(generate_rb_marker(0)) + rbd.add_rb_marker(generate_rb_marker(1)) + rbd.add_rb_marker(generate_rb_marker(2)) + + return rbd + +def generate_skeleton_description(skeleton_num=0): + """generate_skeleton_description -Generate Test SkeletonDescription Data""" + skel_desc=SkeletonDescription("SkeletonDescription_%3.3d"%skeleton_num,skeleton_num) + #generate some rigid bodies to add + skel_desc.add_rigid_body_description(generate_rigid_body_description(0)) + skel_desc.add_rigid_body_description(generate_rigid_body_description(1)) + skel_desc.add_rigid_body_description(generate_rigid_body_description(2)) + skel_desc.add_rigid_body_description(generate_rigid_body_description(3)) + skel_desc.add_rigid_body_description(generate_rigid_body_description(5)) + skel_desc.add_rigid_body_description(generate_rigid_body_description(7)) + return skel_desc + +def generate_force_plate_description(force_plate_num=0): + """generate_force_plate_description - Generate Test ForcePlateDescription Data""" + fp_id=force_plate_num + random.seed(force_plate_num) + + serial_number="S/N_%5.5d"%random.randint(0,99999) + width=random.random()*10 + length=random.random()*10 + origin=[(random.random()*100),(random.random()*100),(random.random()*100)] + corners = [[0.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [1.0, 0.0, 0.0]] + + fp_desc=ForcePlateDescription(fp_id, serial_number) + fp_desc.set_dimensions(width, length) + fp_desc.set_origin(origin[0],origin[1],origin[2]) + #fp_desc.set_cal_matrix(cal_matrix) + fp_desc.set_corners(corners) + for i in range(3): + fp_desc.add_channel_name("channel_%3.3d"%i) + return fp_desc + + +def generate_device_description(dev_num=0): + """generate_device_description- Generate Test DeviceDescription Data""" + new_id=0 + name="Device%3.3d"%dev_num + serial_number="SerialNumber%3.3d"%dev_num + device_type=dev_num%4 + channel_data_type=dev_num%5 + dev_desc = DeviceDescription(new_id,name, serial_number,device_type,channel_data_type) + for i in range(channel_data_type+3): + dev_desc.add_channel_name("channel_name_%2.2d"%i) + return dev_desc + + +def generate_camera_description(cam_num=0): + """generate_camera_description - Generate Test CameraDescription data""" + pos_vec3=[1,2,3] + orientation_quat=[1,2,3,4] + return CameraDescription("Camera_%3.3d"%cam_num, pos_vec3, orientation_quat) + + +#generate_data_descriptions - Generate Test DataDescriptions +def generate_data_descriptions(data_desc_num=0): + """Generate data descriptions""" + data_descs = DataDescriptions() + + data_descs.add_data(generate_marker_set_description(data_desc_num+0)) + data_descs.add_data(generate_marker_set_description(data_desc_num+1)) + + data_descs.add_data(generate_rigid_body_description(data_desc_num+0)) + data_descs.add_data(generate_rigid_body_description(data_desc_num+1)) + + data_descs.add_skeleton(generate_skeleton_description(data_desc_num+3)) + data_descs.add_skeleton(generate_skeleton_description(data_desc_num+9)) + data_descs.add_skeleton(generate_skeleton_description(data_desc_num+27)) + + data_descs.add_force_plate(generate_force_plate_description(data_desc_num+123)) + data_descs.add_force_plate(generate_force_plate_description(data_desc_num+87)) + data_descs.add_force_plate(generate_force_plate_description(data_desc_num+21)) + + data_descs.add_device(generate_device_description(data_desc_num+0)) + data_descs.add_device(generate_device_description(data_desc_num+2)) + data_descs.add_device(generate_device_description(data_desc_num+4)) + + data_descs.add_camera(generate_camera_description(data_desc_num+0)) + data_descs.add_camera(generate_camera_description(data_desc_num+10)) + data_descs.add_camera(generate_camera_description(data_desc_num+3)) + data_descs.add_camera(generate_camera_description(data_desc_num+7)) + return data_descs + + +# test_all - Test all the major classes +def test_all(run_test=True): + """Test all the Data Description classes""" + totals=[0,0,0] + if run_test is True: + test_cases=[["Test Marker Set Description 0", "754fe535286ca84bd054d9aca5e9906ab9384d92", + "generate_marker_set_description(0)",True], + ["Test RB Marker 0", "0f2612abf2ce70e479d7b9912f646f12910b3310", + "generate_rb_marker(0)",True], + ["Test Rigid Body Description 0", "7a4e93dcda442c1d9c5dcc5c01a247e4a6c01b66", + "generate_rigid_body_description(0)",True], + ["Test Skeleton Description 0", "b4d1a031dd7c323e3d316b5312329881a6a552ca", + "generate_skeleton_description(0)",True], + ["Test Force Plate Description 0", "b385dd1096bdd9f521eb48bb9cbfb3414ea075bd", + "generate_force_plate_description(0)",True], + ["Test Device Description 0", "39b4fdda402bc73c0b1cd5c7f61599476aa9a926", + "generate_device_description(0)",True], + ["Test Camera Description 0", "614602c5d290bda3b288138d5e25516dd1e1e85a", + "generate_camera_description(0)",True], + ["Test Data Description 0", "e5f448d10087ac818a65934710a85fc7ebfdf89e", + "generate_data_descriptions(0)",True], + ] + num_tests = len(test_cases) + for i in range(num_tests): + data = eval(test_cases[i][2]) + totals_tmp = test_hash2(test_cases[i][0],test_cases[i][1],data,test_cases[i][3]) + totals=add_lists(totals, totals_tmp) + + print("--------------------") + print("[PASS] Count = %3.1d"%totals[0]) + print("[FAIL] Count = %3.1d"%totals[1]) + print("[SKIP] Count = %3.1d"%totals[2]) + + return totals + + + +# +if __name__ == "__main__": + test_all(True) diff --git a/sw/ground_segment/python/natnet3.x/MoCapData.py b/sw/ground_segment/python/natnet3.x/MoCapData.py new file mode 100644 index 0000000000..47b54f590b --- /dev/null +++ b/sw/ground_segment/python/natnet3.x/MoCapData.py @@ -0,0 +1,998 @@ +#Copyright © 2021 Naturalpoint +# +#Licensed under the Apache License, Version 2.0 (the "License") +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +#http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. + + +# OptiTrack NatNet direct depacketization sample for Python 3.x +# + + +# Uses the Python NatNetClient.py library to establish a connection (by creating a NatNetClient), +# and receive data via a NatNet connection and decode it using the NatNetClient library. + +#Utility functions + +import copy +import hashlib +import random + +K_SKIP = [0,0,1] +K_FAIL = [0,1,0] +K_PASS = [1,0,0] + +# get_tab_str +# generate a string that takes the nesting level into account +def get_tab_str(tab_str, level): + out_tab_str="" + loop_range = range(0,level) + for _ in loop_range: + out_tab_str+=tab_str + return out_tab_str + +def add_lists(totals, totals_tmp): + totals[0]+=totals_tmp[0] + totals[1]+=totals_tmp[1] + totals[2]+=totals_tmp[2] + return totals + +def test_hash(test_name, test_hash_str, test_object): + out_str = test_object.get_as_string() + out_hash_str=hashlib.sha1(out_str.encode()).hexdigest() + ret_value=True + if test_hash_str == out_hash_str: + print("[PASS]:%s"%test_name) + else: + print("[FAIL]:%s test_hash_str != out_hash_str"%test_name) + print("test_hash_str=%s"%test_hash_str) + print("out_hash_str=%s"%out_hash_str) + print("out_str =\n%s"%out_str) + ret_value=False + return ret_value + + +def test_hash2(test_name, test_hash_str, test_object, run_test=True): + ret_value = K_FAIL + out_str = "FAIL" + out_str2="" + indent_string=" " + if not run_test: + ret_value = K_SKIP + out_str = "SKIP" + elif test_object == None: + out_str = "FAIL" + ret_value = K_FAIL + out_str2 = "%sERROR: test_object was None"%indent_string + else: + + if str(type(test_object)) != 'NoneType': + obj_out_str = test_object.get_as_string() + obj_out_hash_str=hashlib.sha1(obj_out_str.encode()).hexdigest() + + if test_hash_str == obj_out_hash_str: + out_str = "PASS" + ret_value = K_PASS + else: + out_str2+="%s%s test_hash_str != out_hash_str\n"%(indent_string,test_name) + out_str2+="%stest_hash_str=%s\n"%(indent_string,test_hash_str) + out_str2+="%sobj_out_hash_str=%s\n"%(indent_string,obj_out_hash_str) + out_str2+="%sobj_out_str =\n%s"%(indent_string,obj_out_str) + ret_value = K_FAIL + print("[%s]:%s"%(out_str,test_name)) + + if len(out_str2): + print("%s"%out_str2) + return ret_value + + +def get_as_string(input_str): + type_input_str=str(type(input_str)) + if type_input_str == "": + return input_str + elif type_input_str == "": + return "" + elif type_input_str == "": + return input_str.decode('utf-8') + else: + print("type_input_str = %s NOT HANDLED"%type_input_str) + return input_str + + +#MoCap Frame Classes +class FramePrefixData: + def __init__(self, frame_number): + self.frame_number=frame_number + + def get_as_string(self,tab_str=" ", level = 0): + out_tab_str = get_tab_str(tab_str, level) + out_str = "%sFrame #: %3.1d\n"%(out_tab_str,self.frame_number) + return out_str + +class MarkerData: + def __init__(self): + self.model_name="" + self.marker_pos_list=[] + + def set_model_name(self, model_name): + self.model_name = model_name + + def add_pos(self, pos): + self.marker_pos_list.append(copy.deepcopy(pos)) + return len(self.marker_pos_list) + + + def get_num_points(self): + return len(self.marker_pos_list) + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str="" + out_str+="%sMarkerData:\n"%out_tab_str + if self.model_name != "": + out_str+="%sModel Name : %s\n"%(out_tab_str, get_as_string(self.model_name)) + marker_count = len(self.marker_pos_list) + out_str+="%sMarker Count :%3.1d\n"%(out_tab_str, marker_count) + for i in range(marker_count): + pos = self.marker_pos_list[i] + out_str+="%sMarker %3.1d pos : [x=%3.2f,y=%3.2f,z=%3.2f]\n"%(out_tab_str2,i,pos[0], pos[1], pos[2]) + return out_str + +class MarkerSetData: + def __init__(self): + self.marker_data_list=[] + self.unlabeled_markers=MarkerData() + self.unlabeled_markers.set_model_name("") + + def add_marker_data(self, marker_data): + self.marker_data_list.append(copy.deepcopy(marker_data)) + return len(self.marker_data_list) + + def add_unlabeled_marker(self, pos): + self.unlabeled_markers.add_pos(pos) + + def get_marker_set_count(self): + return len(self.marker_data_list) + + def get_unlabeled_marker_count(self): + return self.unlabeled_markers.get_num_points() + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + + + out_str="" + + # Labeled markers count + marker_data_count = len(self.marker_data_list) + out_str+= "%sMarker Set Count:%3.1d\n"% (out_tab_str,marker_data_count) + for marker_data in self.marker_data_list: + out_str += marker_data.get_as_string(tab_str,level+1) + + # Unlabeled markers count (4 bytes) + unlabeled_markers_count = self.unlabeled_markers.get_num_points() + out_str += "%sUnlabeled Markers Count:%3.1d\n"%(out_tab_str, unlabeled_markers_count ) + out_str += self.unlabeled_markers.get_as_string(tab_str,level+1) + return out_str + +class LegacyMarkerData: + def __init__(self): + self.marker_pos_list=[] + + def add_pos(self, pos): + self.marker_pos_list.append(copy.deepcopy(pos)) + return len(self.marker_pos_list) + + def get_marker_count(self): + return len(self.marker_pos_list) + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str="" + marker_count = len(self.marker_pos_list) + out_str+="%sLegacy Marker Count :%3.1d\n"%(out_tab_str, marker_count) + for i in range(marker_count): + pos = self.marker_pos_list[i] + out_str+="%sMarker %3.1d pos : [x=%3.2f,y=%3.2f,z=%3.2f]\n"%(out_tab_str2,i,pos[0], pos[1], pos[2]) + return out_str + +class RigidBodyMarker: + def __init__(self): + self.pos = [0.0,0.0,0.0] + self.id_num = 0 + self.size = 0 + self.error = 0 + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_str = "" + out_str+="RBMarker:\n" + out_str += "%sPosition: [%3.2f %3.2f %3.2f]\n"%( out_tab_str, self.pos[0], self.pos[1], self.pos[2] ) + out_str += "%sID : %3.1d\n"%(out_tab_str, self.id_num) + out_str += "%sSize : %3.1d\n"%(out_tab_str, self.size) + return out_str + + +class RigidBody: + def __init__(self, new_id, pos, rot): + self.id_num = new_id + self.pos=pos + self.rot=rot + self.rb_marker_list=[] + self.tracking_valid = False + self.error = 0.0 + + def add_rigid_body_marker(self, rigid_body_marker): + self.rb_marker_list.append(copy.deepcopy(rigid_body_marker)) + return len(self.rb_marker_list) + + + def get_as_string(self, tab_str=0, level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + + out_str="" + + # header + out_str += "%sID : %3.1d\n"% (out_tab_str, self.id_num) + # Position and orientation + out_str += "%sPosition : [%3.2f, %3.2f, %3.2f]\n"% (out_tab_str, self.pos[0], self.pos[1], self.pos[2] ) + out_str += "%sOrientation : [%3.2f, %3.2f, %3.2f, %3.2f]\n"% (out_tab_str, self.rot[0], self.rot[1], self.rot[2], self.rot[3] ) + + marker_count = len(self.rb_marker_list) + marker_count_range = range( 0, marker_count ) + + # Marker Data + if marker_count > 0: + out_str += "%sMarker Count: %3.1d\n"%(out_tab_str, marker_count ) + for i in marker_count_range: + out_str += "%sMarker %3.1d\n"%(out_tab_str2, i) + rbmarker = self.rb_marker_list[i] + out_str += rbmarker.get_as_string(tab_str, level+2) + + out_str += "%sMarker Error : %3.2f\n"% (out_tab_str, self.error) + + # Valid Tracking + tf_string = 'False' + if self.tracking_valid: + tf_string = 'True' + out_str += "%sTracking Valid: %s\n"%(out_tab_str, tf_string) + + return out_str + + +class RigidBodyData: + def __init__(self): + self.rigid_body_list=[] + + + def add_rigid_body(self, rigid_body): + self.rigid_body_list.append(copy.deepcopy(rigid_body)) + return len(self.rigid_body_list) + + + def get_rigid_body_count(self): + return len(self.rigid_body_list) + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_str="" + rigid_body_count=len(self.rigid_body_list) + out_str += "%sRigid Body Count: %3.1d\n"%(out_tab_str, rigid_body_count) + for rigid_body in self.rigid_body_list: + out_str += rigid_body.get_as_string(tab_str, level+1) + return out_str + +class Skeleton: + def __init__(self, new_id=0): + self.id_num=new_id + self.rigid_body_list=[] + + + def add_rigid_body(self, rigid_body): + self.rigid_body_list.append(copy.deepcopy(rigid_body)) + return len(self.rigid_body_list) + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str="" + out_str+="Skeleton:\n" + out_str+="%sID: %3.1d\n"%(out_tab_str, self.id_num) + rigid_body_count=len(self.rigid_body_list) + out_str += "%sRigid Body Count: %3.1d\n"%(out_tab_str, rigid_body_count) + for rb_num in range(rigid_body_count): + out_str += "%sRigid Body %3.1d\n"%(out_tab_str2, rb_num) + out_str += self.rigid_body_list[rb_num].get_as_string(tab_str, level+2) + return out_str + + +class SkeletonData: + def __init__(self): + self.skeleton_list=[] + + + def add_skeleton(self, new_skeleton): + self.skeleton_list.append(copy.deepcopy(new_skeleton)) + + + def get_skeleton_count(self): + return len(self.skeleton_list) + + + def get_as_string(self, tab_str = " ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + + out_str = "" + skeleton_count = len(self.skeleton_list) + out_str += "%sSkeleton Count: %3.1d\n"%(out_tab_str, skeleton_count) + for skeleton_num in range(skeleton_count): + out_str += "%sSkeleton %3.1d\n"%(out_tab_str2, skeleton_num) + out_str += self.skeleton_list[skeleton_num].get_as_string(tab_str, level+2) + return out_str + +class AssetMarkerData: + def __init__(self, marker_id, pos, marker_size=0.0, marker_params=0, residual=0.0): + self.marker_id=marker_id + self.pos=pos + self.marker_size=marker_size + self.marker_params=marker_params + self.residual=residual + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str="" + out_str+="%sID : %s\n"%(out_tab_str, get_as_string(self.marker_id)) + out_str+="%sPos : %3.2f %3.2f %3.2f\n"%(out_tab_str, self.pos[0], self.pos[1], self.pos[2]) + out_str+="%sSize : %3.2f\n"%(out_tab_str, self.marker_size) + out_str+="%sParams : %s\n"%(out_tab_str, self.marker_params) + out_str+="%sResidual : %3.2f\n"%(out_tab_str, self.residual) + + return out_str + +class AssetRigidBodyData: + def __init__(self, new_id, pos, rot, mean_error=0.0, param=0): + self.id_num=new_id + self.pos = pos + self.rot = rot + self.mean_error = mean_error + self.param = param + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_str="" + out_str += "%sID : %s\n"%(out_tab_str, get_as_string(self.marker_id)) + out_str += "%sPosition : %3.2f %3.2f %3.2f\n"%(out_tab_str, self.pos[0], self.pos[1], self.pos[2]) + out_str += "%sOrientation : %3.2f %3.2f %3.2f %3.2f\n"%(out_tab_str, self.rot[0], self.rot[1], self.rot[2], self.rot[3]) + out_str += "%sMean Error : %3.2f\n"%(out_tab_str, self.mean_error) + out_str += "%sParam : %s\n"%(out_tab_str, self.param) + + return out_str + +class AssetData: + def __init__(self): + self.asset_id=0 + self.asset_rigid_body_list=[] + self.asset_marker_list=[] + + def set_id(self, new_id): + self.asset_id=new_id + + def add_rigid_body(self, rigid_body): + self.rigid_body_list.append(copy.deepcopy(rigid_body)) + return len(self.rigid_body_list) + + def add_marker(self, marker): + self.marker_list.append(copy.deepcopy(marker)) + return len(self.marker_list) + + def get_rigid_body_count(self): + return len(self.asset_rigid_body_list) + + def get_marker_count(self): + return len(self.asset_marker_list) + + def get_as_string(self, tab_str = " ", level = 0): + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + out_str+="%sAssetData:\n"%out_tab_str + out_str += "%sID :%s"%(out_tab_str,get_as_string(self.asset_id)) + rigid_body_count = len(self.rigid_body_list) + out_str += "%srigid_body Count: %3.1d\n"%(out_tab_str, rigid_body_count) + for i in range(rigid_body_count): + out_str += self.rigid_body_list[i].get_as_string(tab_str, level+1, i) + + marker_count = len(self.marker_list) + out_str += "%smarker Count: %3.1d\n"%(out_tab_str, marker_count) + for i in range(marker_count): + out_str += self.marker_list[i].get_as_string(tab_str, level+1, i) + + return out_str + + +class LabeledMarker: + def __init__(self, new_id, pos, size=0.0, param = 0, residual=0.0): + self.id_num=new_id + self.pos = pos + self.size = size + self.param = param + self.residual = residual + if str(type(size)) == "": + self.size=size[0] + + def __decode_marker_id(self): + model_id = self.id_num >> 16 + marker_id = self.id_num & 0x0000ffff + return model_id, marker_id + + def __decode_param(self): + occluded = ( self.param & 0x01 ) != 0 + point_cloud_solved = ( self.param & 0x02 ) != 0 + model_solved = ( self.param & 0x04 ) != 0 + return occluded,point_cloud_solved, model_solved + + def get_as_string(self, tab_str, level): + out_tab_str = get_tab_str(tab_str, level) + model_id, marker_id = self.__decode_marker_id() + out_str = "" + out_str+="%sLabeledMarker:\n"%out_tab_str + out_str += "%sID : [MarkerID: %3.1d] [ModelID: %3.1d]\n"%(out_tab_str, marker_id,model_id) + out_str += "%spos : [%3.2f, %3.2f, %3.2f]\n"%(out_tab_str, self.pos[0],self.pos[1],self.pos[2]) + out_str += "%ssize : [%3.2f]\n"%(out_tab_str, self.size) + + occluded, point_cloud_solved, model_solved = self.__decode_param() + out_str += "%soccluded : [%3.1d]\n"%(out_tab_str, occluded) + out_str += "%spoint_cloud_solved : [%3.1d]\n"%(out_tab_str, point_cloud_solved) + out_str += "%smodel_solved : [%3.1d]\n"%(out_tab_str, model_solved) + out_str += "%serr : [%3.2f]\n"%(out_tab_str, self.residual) + + return out_str + + +class LabeledMarkerData: + def __init__(self): + self.labeled_marker_list=[] + + def add_labeled_marker(self, labeled_marker): + self.labeled_marker_list.append(copy.deepcopy(labeled_marker)) + return len(self.labeled_marker_list) + + def get_labeled_marker_count(self): + return len(self.labeled_marker_list) + + def get_as_string(self, tab_str = " ", level = 0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str = "" + + labeled_marker_count = len(self.labeled_marker_list) + out_str += "%sLabeled Marker Count:%3.1d\n"%(out_tab_str, labeled_marker_count ) + for i in range( 0, labeled_marker_count ): + out_str += "%sLabeled Marker %3.1d\n"%(out_tab_str2,i) + labeled_marker = self.labeled_marker_list[i] + out_str += labeled_marker.get_as_string(tab_str, level+2) + return out_str + +class ForcePlateChannelData: + def __init__(self): + # list of floats + self.frame_list=[] + + + def add_frame_entry(self, frame_entry): + self.frame_list.append(copy.deepcopy(frame_entry)) + return len(self.frame_list) + + + def get_as_string(self, tab_str, level, channel_num = -1): + fc_max = 4 + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + frame_count = len(self.frame_list) + fc_show = min(frame_count, fc_max) + out_str += "%s"%(out_tab_str) + if channel_num >= 0 : + out_str += "Channel %3.1d: "%channel_num + out_str += "%3.1d Frames - Frame Data: "%(frame_count) + for i in range(fc_show): + out_str += "%3.2f "%(self.frame_list[i]) + if fc_show < frame_count : + out_str += " - Showing %3.1d of %3.1d frames"%(fc_show, frame_count) + out_str += "\n" + return out_str + +class ForcePlate: + def __init__(self, new_id=0): + self.id_num = new_id + self.channel_data_list=[] + + def add_channel_data(self, channel_data): + self.channel_data_list.append(copy.deepcopy(channel_data)) + return len(self.channel_data_list) + + def get_as_string(self, tab_str, level): + out_tab_str = get_tab_str(tab_str, level) + out_str = "" + + out_str += "%sID : %3.1d"%(out_tab_str, self.id_num) + num_channels = len(self.channel_data_list) + out_str += "%sChannel Count: %3.1d\n"%(out_tab_str, num_channels) + for i in range(num_channels): + out_str += self.channel_data_list[i].get_as_string(tab_str, level+1,i) + return out_str + +class ForcePlateData: + def __init__(self): + self.force_plate_list=[] + + def add_force_plate(self, force_plate): + self.force_plate_list.append(copy.deepcopy(force_plate)) + return len(self.force_plate_list) + + + def get_force_plate_count(self): + return len(self.force_plate_list) + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + out_tab_str2 = get_tab_str(tab_str, level+1) + out_str="" + + force_plate_count = len(self.force_plate_list) + out_str += "%sForce Plate Count: %3.1d\n"%(out_tab_str, force_plate_count) + for i in range(force_plate_count): + out_str += "%sForce Plate %3.1d\n"%(out_tab_str2, i) + out_str += self.force_plate_list[i].get_as_string(tab_str, level+2) + + return out_str + +class DeviceChannelData: + def __init__(self): + # list of floats + self.frame_list=[] + + + def add_frame_entry(self, frame_entry): + self.frame_list.append(copy.deepcopy(frame_entry)) + return len(self.frame_list) + + + def get_as_string(self, tab_str, level, channel_num = -1): + fc_max = 4 + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + frame_count = len(self.frame_list) + fc_show = min(frame_count, fc_max) + out_str += "%s"%(out_tab_str) + if channel_num >= 0: + out_str += "Channel %3.1d: "%channel_num + out_str += "%3.1d Frames - Frame Data: "%(frame_count) + for i in range(fc_show): + out_str += "%3.2f "%(self.frame_list[i]) + if fc_show < frame_count: + out_str += " - Showing %3.1d of %3.1d frames"%(fc_show, frame_count) + out_str += "\n" + return out_str + + +class Device: + def __init__(self, new_id): + self.id_num=new_id + self.channel_data_list = [] + + def add_channel_data(self, channel_data): + self.channel_data_list.append(copy.deepcopy(channel_data)) + return len(self.channel_data_list) + + def get_as_string(self, tab_str, level, device_num): + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + + num_channels = len(self.channel_data_list) + out_str+= "%sDevice %3.1d ID: %3.1d Num Channels: %3.1d\n"% (out_tab_str, device_num, self.id_num, num_channels ) + for i in range(num_channels): + out_str += self.channel_data_list[i].get_as_string(tab_str, level+1, i) + + return out_str + + +class DeviceData: + def __init__(self): + self.device_list=[] + + def add_device(self, device): + self.device_list.append(copy.deepcopy(device)) + return len(self.device_list) + + + def get_device_count(self): + return len(self.device_list) + + + def get_as_string(self, tab_str = " ", level = 0): + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + + device_count = len(self.device_list) + out_str += "%sDevice Count: %3.1d\n"%(out_tab_str, device_count) + for i in range(device_count): + out_str += self.device_list[i].get_as_string(tab_str, level+1, i) + return out_str + +class FrameSuffixData: + def __init__(self): + self.timecode=-1 + self.timecode_sub=-1 + self.timestamp = -1 + self.stamp_camera_mid_exposure = -1 + self.stamp_data_received = -1 + self.stamp_transmit = -1 + self.prec_timestamp_secs = -1 + self.prec_timestamp_frac_secs = -1 + self.param = 0 + self.is_recording = False + self.tracked_models_changed = True + + + def get_as_string(self, tab_str=" ", level=0): + out_tab_str = get_tab_str(tab_str, level) + + out_str = "" + if not self.timestamp == -1: + out_str += "%sTimestamp : %3.2f\n"%(out_tab_str, self.timestamp) + if not self.stamp_camera_mid_exposure == -1: + out_str += "%sMid-exposure timestamp : %3.1d\n"%(out_tab_str, self.stamp_camera_mid_exposure) + if not self.stamp_data_received == -1: + out_str += "%sCamera data received timestamp : %3.1d\n"%(out_tab_str, self.stamp_data_received) + if not self.stamp_transmit == -1: + out_str += "%sTransmit timestamp : %3.1d\n"%(out_tab_str, self.stamp_transmit) + if not self.prec_timestamp_secs == -1: + hours = int(self.prec_timestamp_secs/3600) + minutes=int(self.prec_timestamp_secs/60)%60 + seconds=self.prec_timestamp_secs%60 + hms_string="%sPrecision timestamp (hh:mm:ss) - %2.1d:%2.2d:%2.2d\n"%(out_tab_str,hours, minutes, seconds) + out_str += hms_string + out_str += "%sPrecision timestamp (seconds) : %3.1d\n"%(out_tab_str, self.prec_timestamp_secs) + if not self.prec_timestamp_frac_secs == -1: + out_str += "%sPrecision timestamp (fractional seconds) : %3.1d\n"%(out_tab_str, self.prec_timestamp_frac_secs) + + return out_str + +class MoCapData: + def __init__(self): + #Packet Parts + self.prefix_data = None + self.marker_set_data = None + self.legacy_other_markers = None + self.rigid_body_data = None + self.asset_data = None + self.skeleton_data = None + self.labeled_marker_data = None + self.force_plate_data = None + self.device_data = None + self.suffix_data = None + + def set_prefix_data(self, new_prefix_data): + self.prefix_data = new_prefix_data + + def set_marker_set_data(self, new_marker_set_data): + self.marker_set_data = new_marker_set_data + + def set_legacy_other_markers(self, new_marker_set_data): + self.legacy_other_markers = new_marker_set_data + + def set_rigid_body_data(self, new_rigid_body_data): + self.rigid_body_data = new_rigid_body_data + + def set_skeleton_data(self, new_skeleton_data): + self.skeleton_data = new_skeleton_data + + def set_asset_data(self, new_asset_data): + self.asset_data=new_asset_data + + def set_labeled_marker_data(self, new_labeled_marker_data): + self.labeled_marker_data = new_labeled_marker_data + + def set_force_plate_data(self, new_force_plate_data): + self.force_plate_data = new_force_plate_data + + def set_device_data(self, new_device_data): + self.device_data = new_device_data + + def set_suffix_data(self, new_suffix_data): + self.suffix_data = new_suffix_data + + def get_as_string(self, tab_str = " ", level = 0): + out_tab_str = get_tab_str(tab_str, level) + + out_str="" + out_str+= "%sMoCap Frame Begin\n%s-----------------\n"%(out_tab_str,out_tab_str) + if not self.prefix_data == None: + out_str+=self.prefix_data.get_as_string() + else: + out_str+="%sNo Prefix Data Set\n"%(out_tab_str) + + if not self.marker_set_data == None: + out_str+=self.marker_set_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Marker Set Data Set\n"%(out_tab_str) + + if not self.rigid_body_data == None: + out_str+=self.rigid_body_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Rigid Body Data Set\n"%(out_tab_str) + + if not self.skeleton_data == None: + out_str+=self.skeleton_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Skeleton Data Set\n"%(out_tab_str) + + if not self.labeled_marker_data == None: + out_str+=self.labeled_marker_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Labeled Marker Data Set\n"%(out_tab_str) + + if not self.force_plate_data == None: + out_str+=self.force_plate_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Force Plate Data Set\n"%(out_tab_str) + + if not self.device_data == None: + out_str+=self.device_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Device Data Set\n"%(out_tab_str) + + if not self.suffix_data == None: + out_str+=self.suffix_data.get_as_string(tab_str, level+1) + else: + out_str+="%sNo Suffix Data Set\n"%(out_tab_str) + + out_str+= "%sMoCap Frame End\n%s-----------------\n"%(out_tab_str,out_tab_str) + + return out_str + + + +# test program + +def generate_prefix_data(frame_num = 0): + frame_prefix_data = FramePrefixData(frame_num) + return frame_prefix_data + +def generate_label(label_base="label", label_num=0): + out_label= "%s_%3.3d"%(label_base, label_num) + return out_label + +def generate_position_srand(pos_num=0, frame_num=0): + random.seed(pos_num + (frame_num*1000)) + position=[(random.random()*100),(random.random()*100),(random.random()*100)] + return position + +def generate_marker_data(label_base, label_num, num_points=1): + label=generate_label(label_base, label_num) + if((label_base == None) or (label_base == "")): + label="" + marker_data=MarkerData() + marker_data.set_model_name(label) + start_num=label_num * 10000 + end_num = start_num+num_points + for point_num in range(start_num, end_num): + position=generate_position_srand(point_num) + marker_data.add_pos(position) + + return marker_data + + +def generate_marker_set_data(frame_num = 0, marker_set_num=0): + marker_set_data=MarkerSetData() + #add labeled markers + marker_set_data.add_marker_data(generate_marker_data("marker",0,3)) + marker_set_data.add_marker_data(generate_marker_data("marker",1,6)) + marker_set_data.add_marker_data(generate_marker_data("marker",2,5)) + #add unlabeled markers + num_points=5 + start_num=(frame_num * 100000) + (10000 + marker_set_num) + end_num = start_num+num_points + for point_num in range(start_num, end_num): + position=generate_position_srand(point_num) + marker_set_data.add_unlabeled_marker(position) + return marker_set_data + +def generate_rigid_body_marker_srand(marker_num=0, frame_num = 0): + rigid_body_marker=RigidBodyMarker() + rbm_num=11000+marker_num + random.seed(rbm_num) + rigid_body_marker.pos=generate_position_srand(rbm_num, frame_num) + rigid_body_marker.id_num=marker_num + rigid_body_marker.size=1 + rigid_body_marker.error=random.random() + + return rigid_body_marker +def generate_rigid_body(body_num=0, frame_num = 0): + pos=generate_position_srand(10000+body_num, frame_num) + rot = [1,0,0,0] + rigid_body = RigidBody(body_num,pos,rot) + rigid_body.add_rigid_body_marker(generate_rigid_body_marker_srand(0, frame_num)) + rigid_body.add_rigid_body_marker(generate_rigid_body_marker_srand(1, frame_num)) + rigid_body.add_rigid_body_marker(generate_rigid_body_marker_srand(2)) + return rigid_body + +def generate_rigid_body_data(frame_num = 0): + rigid_body_data=RigidBodyData() + # add rigid bodies + rigid_body_data.add_rigid_body(generate_rigid_body(0, frame_num)) + rigid_body_data.add_rigid_body(generate_rigid_body(1, frame_num)) + rigid_body_data.add_rigid_body(generate_rigid_body(2, frame_num)) + return rigid_body_data + +def generate_skeleton(frame_num=0, skeleton_num=0,num_rbs=1): + skeleton = Skeleton(skeleton_num) + # add rigid bodies + rb_seed_start=skeleton_num *165 + rb_seed_end=rb_seed_start + num_rbs + for rb_num in range(rb_seed_start, rb_seed_end): + skeleton.add_rigid_body(generate_rigid_body(rb_num, frame_num)) + return skeleton + +def generate_skeleton_data(frame_num = 0): + skeleton_data = SkeletonData() + skeleton_data.add_skeleton(generate_skeleton(frame_num, 0, 2)) + skeleton_data.add_skeleton(generate_skeleton(frame_num, 1, 6)) + skeleton_data.add_skeleton(generate_skeleton(frame_num, 2, 3)) + return skeleton_data + +def generate_labeled_marker(frame_num=0, marker_num=0): + point_num = (frame_num *2000) + marker_num + pos = generate_position_srand(point_num) + size = 1 + param = 0 + #occluded 0x01 + param += 0x01 * 0 + #point_cloud_solved 0x02 + param += 0x02 * 0 + #model_solved 0x04 + param += 0x04 * 1 + residual = 0.01 + return LabeledMarker(marker_num, pos, size, param,residual) + + +def generate_labeled_marker_data(frame_num = 0): + labeled_marker_data = LabeledMarkerData() + #add labeled marker + labeled_marker_data.add_labeled_marker(generate_labeled_marker(frame_num,0)) + labeled_marker_data.add_labeled_marker(generate_labeled_marker(frame_num,1)) + labeled_marker_data.add_labeled_marker(generate_labeled_marker(frame_num,2)) + + return labeled_marker_data + +def generate_fp_channel_data(frame_num=0,fp_num=0, channel_num=0, num_frames =1): + rseed=(frame_num*100000)+(fp_num*10000)+(channel_num *1000) + random.seed(rseed) + fp_channel_data = ForcePlateChannelData() + for _ in range(num_frames): + fp_channel_data.add_frame_entry(100.0*random.random()) + return fp_channel_data + + +def generate_force_plate(frame_num=0, fp_num = 0, num_channels=1): + force_plate = ForcePlate(fp_num) + #add channel_data + for i in range(num_channels): + force_plate.add_channel_data(generate_fp_channel_data(frame_num,fp_num, i, 10)) + return force_plate + + +def generate_force_plate_data(frame_num = 0): + force_plate_data = ForcePlateData() + # add force plates + force_plate_data.add_force_plate(generate_force_plate(frame_num, 0,3)) + force_plate_data.add_force_plate(generate_force_plate(frame_num, 1,4)) + force_plate_data.add_force_plate(generate_force_plate(frame_num, 2,2)) + return force_plate_data + +def generate_device_channel_data(frame_num=0,device_num=0, channel_num=0, num_frames =1): + rseed=(frame_num*100000)+(device_num*10000)+(channel_num *1000) + random.seed(rseed) + device_channel_data = DeviceChannelData() + for _ in range(num_frames): + device_channel_data.add_frame_entry(100.0*random.random()) + return device_channel_data + + +def generate_device(frame_num=0, device_num=0): + device = Device(device_num) + device.add_channel_data(generate_device_channel_data(frame_num, device_num,1,4)) + device.add_channel_data(generate_device_channel_data(frame_num, device_num,3,2)) + device.add_channel_data(generate_device_channel_data(frame_num, device_num,7,6)) + return device + +def generate_device_data(frame_num = 0): + device_data=DeviceData() + device_data.add_device(generate_device(frame_num, 0)) + device_data.add_device(generate_device(frame_num, 2)) + return device_data + +def generate_suffix_data(frame_num = 0): + frame_suffix_data = FrameSuffixData() + frame_suffix_data.stamp_camera_mid_exposure = 5844402979291+frame_num + frame_suffix_data.stamp_data_received = 0 + frame_suffix_data.stamp_transmit = 5844403268753+ frame_num + frame_suffix_data.prec_timestamp_secs = 0 + frame_suffix_data.prec_timestamp_frac_secs = 0 + frame_suffix_data.timecode = 0 + frame_suffix_data.timecode_sub = 0 + frame_suffix_data.timestamp = 762.63 + return frame_suffix_data + + +def generate_mocap_data(frame_num=0): + mocap_data=MoCapData() + + mocap_data.set_prefix_data(generate_prefix_data(frame_num)) + mocap_data.set_marker_set_data(generate_marker_set_data(frame_num)) + mocap_data.set_rigid_body_data(generate_rigid_body_data(frame_num)) + mocap_data.set_skeleton_data(generate_skeleton_data(frame_num)) + mocap_data.set_labeled_marker_data(generate_labeled_marker_data(frame_num)) + mocap_data.set_force_plate_data(generate_force_plate_data(frame_num)) + mocap_data.set_device_data(generate_device_data(frame_num)) + mocap_data.set_suffix_data(generate_suffix_data(frame_num)) + + return mocap_data + +def test_all(run_test=True): + totals=[0,0,0] + if run_test is True: + test_cases=[["Test Prefix Data 0", "bffba016d02cf2167780df31aee697e1ec746b4c", + "generate_prefix_data(0)",True], + ["Test Marker Set Data 0", "d2550194fed1b1fc525f4f4d06bf584f291f41c7", + "generate_marker_set_data(0)",True], + ["Test Rigid Body Data 0", "abd1a48a476eaa9b5c4fae6e705e03aa75f85624", + "generate_rigid_body_data(0)",True], + ["Test Skeleton Data 0", "1e36e3334e291cebfaa530d7aab2122d6983ecab", + "generate_skeleton_data(0)",True], + ["Test Labeled Marker Data 0", "25f3ee026c3c8fc716fbb05c34138ef5afd95d75", + "generate_labeled_marker_data(0)",True], + ["Test Force Plate Data 0", "b83d04a1b89169bdcefee3bc3951c3bdcb6b792e", + "generate_force_plate_data(0)",True], + ["Test Device Data 0", "be10f0b93a7ba3858dce976b7868c1f79fd719c3", + "generate_device_data(0)",True], + ["Test Suffix Data 0", "6aa02c434bdb53a418ae1b1f73317dc80a5f887d", + "generate_suffix_data(0)",True], + ["Test MoCap Data 0", "09930ecf665d9eb3ca61616f9bcc55890373f414", + "generate_mocap_data(0)",True] + ] + num_tests = len(test_cases) + for i in range(num_tests): + data = eval(test_cases[i][2]) + totals_tmp = test_hash2(test_cases[i][0],test_cases[i][1],data,test_cases[i][3]) + totals=add_lists(totals, totals_tmp) + + print("--------------------") + print("[PASS] Count = %3.1d"%totals[0]) + print("[FAIL] Count = %3.1d"%totals[1]) + print("[SKIP] Count = %3.1d"%totals[2]) + + return totals + +if __name__ == "__main__": + test_all(True) diff --git a/sw/ground_segment/python/natnet3.x/NatNetClient.py b/sw/ground_segment/python/natnet3.x/NatNetClient.py index 0acfaebc1f..beb7e1190b 100644 --- a/sw/ground_segment/python/natnet3.x/NatNetClient.py +++ b/sw/ground_segment/python/natnet3.x/NatNetClient.py @@ -1,56 +1,118 @@ -# -# Modified version of the NatNet 3.0 Python Client example from NatNetSDK +#Copyright © 2018 Naturalpoint # +#Licensed under the Apache License, Version 2.0 (the "License") +#you may not use this file except in compliance with the License. +#You may obtain a copy of the License at +# +#http://www.apache.org/licenses/LICENSE-2.0 +# +#Unless required by applicable law or agreed to in writing, software +#distributed under the License is distributed on an "AS IS" BASIS, +#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#See the License for the specific language governing permissions and +#limitations under the License. + +# OptiTrack NatNet direct depacketization library for Python 3.x import socket import struct from threading import Thread +import copy +import time +import DataDescriptions +import MoCapData + +def trace( *args ): + # uncomment the one you want to use + #print( "".join(map(str,args)) ) + pass + +#Used for Data Description functions +def trace_dd( *args ): + # uncomment the one you want to use + #print( "".join(map(str,args)) ) + pass + +#Used for MoCap Frame Data functions +def trace_mf( *args ): + # uncomment the one you want to use + #print( "".join(map(str,args)) ) + pass + +def get_message_id(data): + message_id = int.from_bytes( data[0:2], byteorder='little' ) + return message_id + # Create structs for reading various object types to speed up parsing. +Vector2 = struct.Struct( '1 on / print every nth mocap frame + print_level = 20 + + def __init__( self ): + # Change this value to the IP address of the NatNet server. + self.server_ip_address = "127.0.0.1" + + # Change this value to the IP address of your local network interface + self.local_ip_address = "127.0.0.1" # This should match the multicast address listed in Motive's streaming settings. - self.multicastAddress = multicast + self.multicast_address = "239.255.42.99" # NatNet Command channel - self.commandPort = commandPort - + self.command_port = 1510 + # NatNet Data channel - self.dataPort = dataPort + self.data_port = 1511 + + self.use_multicast = True # Set this to a callback method of your choice to receive per-rigid-body data at each frame. - self.rigidBodyListener = rigidBodyListener + self.rigid_body_listener = None + self.new_frame_listener = None + self.rigid_body_list_listener = None + self.marker_set_listener = None - # Set this to a callback method of your choice to receive data at each frame. - self.newFrameListener = newFrameListener - - # Set this to a callback method of your choice to receive rigid-body data list and timestamp at each frame. - self.rigidBodyListListener = rigidBodyListListener - self.rigidBodyList = [] + # Set Application Name + self.__application_name = "Not Set" - # Set this to a callback method of your choice to receive markerset data at each frame - self.markerSetListener = markerSetListener + # NatNet stream version server is capable of. This will be updated during initialization only. + self.__nat_net_stream_version_server = [0,0,0,0] - # NatNet stream version. This will be updated to the actual version the server is using during initialization. - self.__natNetStreamVersion = version + # NatNet stream version. This will be updated to the actual version the server is using during runtime. + self.__nat_net_requested_version = [0,0,0,0] - # Trace verbose level - self.verbose = verbose + # server stream version. This will be updated to the actual version the server is using during initialization. + self.__server_version = [0,0,0,0] + + # Lock values once run is called + self.__is_locked = False + + # Server has the ability to change bitstream version + self.__can_change_bitstream_version = False + + self.command_thread = None + self.data_thread = None + self.command_socket = None + self.data_socket = None + + self.stop_threads=False - # flag to stop threads - self.running = False # Client/server message ids - NAT_PING = 0 - NAT_PINGRESPONSE = 1 + NAT_CONNECT = 0 + NAT_SERVERINFO = 1 NAT_REQUEST = 2 NAT_RESPONSE = 3 NAT_REQUEST_MODELDEF = 4 @@ -58,475 +120,1712 @@ class NatNetClient: NAT_REQUEST_FRAMEOFDATA = 6 NAT_FRAMEOFDATA = 7 NAT_MESSAGESTRING = 8 - NAT_DISCONNECT = 9 + NAT_DISCONNECT = 9 + NAT_KEEPALIVE = 10 NAT_UNRECOGNIZED_REQUEST = 100 + NAT_UNDEFINED = 999999.9999 - def __trace( self, *args ): - if self.verbose: - print( "".join(map(str,args)) ) - # Create a data socket to attach to the NatNet stream - def __createDataSocket( self, port ): - result = socket.socket( socket.AF_INET, # Internet - socket.SOCK_DGRAM, - socket.IPPROTO_UDP) # UDP - result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - result.bind( ('', port) ) + def set_client_address(self, local_ip_address): + if not self.__is_locked: + self.local_ip_address = local_ip_address + + def get_client_address(self): + return self.local_ip_address + + def set_server_address(self,server_ip_address): + if not self.__is_locked: + self.server_ip_address = server_ip_address + + def get_server_address(self): + return self.server_ip_address + + + def set_use_multicast(self, use_multicast): + if not self.__is_locked: + self.use_multicast = use_multicast + + def can_change_bitstream_version(self): + return self.__can_change_bitstream_version + + def set_nat_net_version(self, major, minor): + """checks to see if stream version can change, then changes it with position reset""" + return_code = -1 + if self.__can_change_bitstream_version and \ + ((major != self.__nat_net_requested_version[0]) or\ + (minor != self.__nat_net_requested_version[1])): + sz_command = "Bitstream,%1.1d.%1.1d"%(major, minor) + return_code = self.send_command(sz_command) + if return_code >=0: + self.__nat_net_requested_version[0] = major + self.__nat_net_requested_version[1] = minor + self.__nat_net_requested_version[2] = 0 + self.__nat_net_requested_version[3] = 0 + print("changing bitstream MAIN") + # get original output state + #print_results = self.get_print_results() + #turn off output + #self.set_print_results(False) + # force frame send and play reset + self.send_command("TimelinePlay") + time.sleep(0.1) + tmpCommands=["TimelinePlay", + "TimelineStop", + "SetPlaybackCurrentFrame,0", + "TimelineStop"] + self.send_commands(tmpCommands,False) + time.sleep(2) + #reset to original output state + #self.set_print_results(print_results) + else: + print("Bitstream change request failed") + return return_code + + + def get_major(self): + return self.__nat_net_requested_version[0] + + def get_minor(self): + return self.__nat_net_requested_version[1] + + def set_print_level(self, print_level=0): + if(print_level >=0): + self.print_level = print_level + return self.print_level + + def get_print_level(self): + return self.print_level + + + def connected(self): + ret_value = True + # check sockets + if self.command_socket == None: + ret_value = False + elif self.data_socket ==None: + ret_value = False + # check versions + elif self.get_application_name() == "Not Set": + ret_value = False + elif (self.__server_version[0] == 0) and\ + (self.__server_version[1] == 0) and\ + (self.__server_version[2] == 0) and\ + (self.__server_version[3] == 0): + ret_value = False + return ret_value - mreq = struct.pack("4sl", socket.inet_aton(self.multicastAddress), socket.INADDR_ANY) - result.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) - return result # Create a command socket to attach to the NatNet stream - def __createCommandSocket( self ): - result = socket.socket( socket.AF_INET, socket.SOCK_DGRAM ) - result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - result.bind( ('', 0) ) - result.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + def __create_command_socket( self ): + result = None + if self.use_multicast : + # Multicast case + result = socket.socket( socket.AF_INET, socket.SOCK_DGRAM, 0 ) + # allow multiple clients on same machine to use multicast group address/port + result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + try: + result.bind( ('', 0) ) + except socket.error as msg: + print("ERROR: command socket error occurred:\n%s" %msg) + print("Check Motive/Server mode requested mode agreement. You requested Multicast ") + result = None + except socket.herror: + print("ERROR: command socket herror occurred") + result = None + except socket.gaierror: + print("ERROR: command socket gaierror occurred") + result = None + except socket.timeout: + print("ERROR: command socket timeout occurred. Server not responding") + result = None + # set to broadcast mode + result.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + # set timeout to allow for keep alive messages + result.settimeout(2.0) + else: + # Unicast case + result = socket.socket( socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + try: + result.bind( (self.local_ip_address, 0) ) + except socket.error as msg: + print("ERROR: command socket error occurred:\n%s" %msg) + print("Check Motive/Server mode requested mode agreement. You requested Unicast ") + result = None + except socket.herror: + print("ERROR: command socket herror occurred") + result = None + except socket.gaierror: + print("ERROR: command socket gaierror occurred") + result = None + except socket.timeout: + print("ERROR: command socket timeout occurred. Server not responding") + result = None + + # set timeout to allow for keep alive messages + result.settimeout(2.0) + result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + return result + + # Create a data socket to attach to the NatNet stream + def __create_data_socket( self, port ): + result = None + + if self.use_multicast: + # Multicast case + result = socket.socket( socket.AF_INET, # Internet + socket.SOCK_DGRAM, + 0) # UDP + result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + result.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(self.multicast_address) + socket.inet_aton(self.local_ip_address)) + try: + result.bind( (self.local_ip_address, port) ) + except socket.error as msg: + print("ERROR: data socket error occurred:\n%s" %msg) + print(" Check Motive/Server mode requested mode agreement. You requested Multicast ") + result = None + except socket.herror: + print("ERROR: data socket herror occurred") + result = None + except socket.gaierror: + print("ERROR: data socket gaierror occurred") + result = None + except socket.timeout: + print("ERROR: data socket timeout occurred. Server not responding") + result = None + else: + # Unicast case + result = socket.socket( socket.AF_INET, # Internet + socket.SOCK_DGRAM, + socket.IPPROTO_UDP) + result.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + #result.bind( (self.local_ip_address, port) ) + try: + result.bind( ('', 0) ) + except socket.error as msg: + print("ERROR: data socket error occurred:\n%s" %msg) + print("Check Motive/Server mode requested mode agreement. You requested Unicast ") + result = None + except socket.herror: + print("ERROR: data socket herror occurred") + result = None + except socket.gaierror: + print("ERROR: data socket gaierror occurred") + result = None + except socket.timeout: + print("ERROR: data socket timeout occurred. Server not responding") + result = None + + if(self.multicast_address != "255.255.255.255"): + result.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(self.multicast_address) + socket.inet_aton(self.local_ip_address)) return result # Unpack a rigid body object from a data packet - def __unpackRigidBody( self, data ): + def __unpack_rigid_body( self, data, major, minor, rb_num): offset = 0 # ID (4 bytes) - id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "ID:", id ) + + trace_mf( "RB: %3.1d ID: %3.1d"% (rb_num, new_id)) # Position and orientation pos = Vector3.unpack( data[offset:offset+12] ) offset += 12 - self.__trace( "\tPosition:", pos[0],",", pos[1],",", pos[2] ) + trace_mf( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (pos[0], pos[1], pos[2] )) + rot = Quaternion.unpack( data[offset:offset+16] ) offset += 16 - self.__trace( "\tOrientation:", rot[0],",", rot[1],",", rot[2],",", rot[3] ) + trace_mf( "\tOrientation : [%3.2f, %3.2f, %3.2f, %3.2f]"% (rot[0], rot[1], rot[2], rot[3] )) + + rigid_body = MoCapData.RigidBody(new_id, pos, rot) # Send information to any listener. - if self.rigidBodyListener is not None: - self.rigidBodyListener( id, pos, rot ) + if self.rigid_body_listener is not None: + self.rigid_body_listener( new_id, pos, rot ) # RB Marker Data ( Before version 3.0. After Version 3.0 Marker data is in description ) - if( self.__natNetStreamVersion[0] < 3 ) : + if( major < 3 and major != 0) : # Marker count (4 bytes) - markerCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - markerCountRange = range( 0, markerCount ) - self.__trace( "\tMarker Count:", markerCount ) + marker_count_range = range( 0, marker_count ) + trace_mf( "\tMarker Count:", marker_count ) + + rb_marker_list=[] + for i in marker_count_range: + rb_marker_list.append(MoCapData.RigidBodyMarker()) # Marker positions - for i in markerCountRange: - mpos = Vector3.unpack( data[offset:offset+12] ) + for i in marker_count_range: + pos = Vector3.unpack( data[offset:offset+12] ) offset += 12 - self.__trace( "\tMarker", i, ":", mpos[0],",", mpos[1],",", mpos[2] ) + trace_mf( "\tMarker", i, ":", pos[0],",", pos[1],",", pos[2] ) + rb_marker_list[i].pos=pos - if( self.__natNetStreamVersion[0] >= 2 ): + if major >= 2: # Marker ID's - for i in markerCountRange: - mid = int.from_bytes( data[offset:offset+4], byteorder='little' ) + for i in marker_count_range: + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "\tMarker ID", i, ":", mid ) + trace_mf( "\tMarker ID", i, ":", new_id ) + rb_marker_list[i].id=new_id # Marker sizes - for i in markerCountRange: + for i in marker_count_range: size = FloatValue.unpack( data[offset:offset+4] ) offset += 4 - self.__trace( "\tMarker Size", i, ":", size[0] ) + trace_mf( "\tMarker Size", i, ":", size[0] ) + rb_marker_list[i].size=size - if( self.__natNetStreamVersion[0] >= 2 ): - markerError, = FloatValue.unpack( data[offset:offset+4] ) + for i in marker_count_range: + rigid_body.add_rigid_body_marker(rb_marker_list[i]) + if major >= 2 : + marker_error, = FloatValue.unpack( data[offset:offset+4] ) offset += 4 - self.__trace( "\tMarker Error:", markerError ) + trace_mf( "\tMean Marker Error: %3.2f"% marker_error ) + rigid_body.error = marker_error # Version 2.6 and later - trackingValid = True # set valid by default - if( ( ( self.__natNetStreamVersion[0] == 2 ) and ( self.__natNetStreamVersion[1] >= 6 ) ) or self.__natNetStreamVersion[0] > 2 or self.__natNetStreamVersion[0] == 0 ): + if ( ( major == 2 ) and ( minor >= 6 ) ) or major > 2 : param, = struct.unpack( 'h', data[offset:offset+2] ) - trackingValid = ( param & 0x01 ) != 0 + tracking_valid = ( param & 0x01 ) != 0 offset += 2 - self.__trace( "\tTracking Valid:", 'True' if trackingValid else 'False' ) + is_valid_str='False' + if tracking_valid: + is_valid_str = 'True' + trace_mf( "\tTracking Valid: %s"%is_valid_str) + if tracking_valid: + rigid_body.tracking_valid = True + else: + rigid_body.tracking_valid = False - # Store data - self.rigidBodyList.append((id, pos, rot, trackingValid)) - - return offset + return offset, rigid_body # Unpack a skeleton object from a data packet - def __unpackSkeleton( self, data ): + def __unpack_skeleton( self, data, major, minor, skeleton_num=0): + offset = 0 - - id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "ID:", id ) - - rigidBodyCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + trace_mf( "Skeleton %3.1d ID: %3.1d"% (skeleton_num, new_id )) + skeleton = MoCapData.Skeleton(new_id) + + rigid_body_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Rigid Body Count:", rigidBodyCount ) - for j in range( 0, rigidBodyCount ): - offset += self.__unpackRigidBody( data[offset:] ) + trace_mf( "Rigid Body Count : %3.1d"% rigid_body_count ) + if(rigid_body_count > 0): + for rb_num in range( 0, rigid_body_count ): + offset_tmp, rigid_body = self.__unpack_rigid_body( data[offset:], major, minor, rb_num ) + skeleton.add_rigid_body(rigid_body) + offset+=offset_tmp - return offset + return offset, skeleton - # Unpack data from a motion capture frame message - def __unpackMocapData( self, data ): - self.__trace( "Begin MoCap Frame\n-----------------\n" ) - - data = memoryview( data ) +#Unpack Mocap Data Functions + def __unpack_frame_prefix_data( self, data): offset = 0 - self.rigidBodyList = [] - # Frame number (4 bytes) - frameNumber = int.from_bytes( data[offset:offset+4], byteorder='little' ) + frame_number = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Frame #:", frameNumber ) + trace_mf( "Frame #: %3.1d"% frame_number ) + frame_prefix_data=MoCapData.FramePrefixData(frame_number) + return offset, frame_prefix_data + + def __unpack_data_size(self, data, major, minor): + sizeInBytes=0 + offset=0 + + if( ( (major == 4) and (minor>0) ) or (major > 4)): + sizeInBytes = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_mf( "Byte Count: %3.1d"% sizeInBytes ) + + return offset, sizeInBytes + + def __unpack_legacy_other_markers( self, data, packet_size, major, minor): + offset = 0 # Marker set count (4 bytes) - markerSetCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + other_marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Marker Set Count:", markerSetCount ) + trace_mf( "Other Marker Count:", other_marker_count ) - for i in range( 0, markerSetCount ): - # Model name - modelName, separator, remainder = bytes(data[offset:]).partition( b'\0' ) - offset += len( modelName ) + 1 - self.__trace( "Model Name:", modelName.decode( 'utf-8' ) ) + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp - # Marker count (4 bytes) - markerCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - self.__trace( "Marker Count:", markerCount ) - - posList = [] - for j in range( 0, markerCount ): + other_marker_data = MoCapData.LegacyMarkerData() + if(other_marker_count > 0): + # get legacy_marker positions + ### legacy_marker_data + for j in range( 0, other_marker_count ): pos = Vector3.unpack( data[offset:offset+12] ) - posList.append(pos) offset += 12 - self.__trace( "\tMarker", j, ":", pos[0],",", pos[1],",", pos[2] ) + trace_mf( "\tMarker %3.1d : [x=%3.2f,y=%3.2f,z=%3.2f]"%( j, pos[0], pos[1], pos[2] )) + other_marker_data.add_pos(pos) + + return offset, other_marker_data + + def __unpack_marker_set_data( self, data, packet_size, major, minor): + marker_set_data=MoCapData.MarkerSetData() + offset = 0 + # Marker set count (4 bytes) + marker_set_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_mf( "Marker Set Count:", marker_set_count ) + + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + + for i in range( 0, marker_set_count ): + marker_data = MoCapData.MarkerData() + # Model name + model_name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( model_name ) + 1 + trace_mf( "Model Name : ", model_name.decode( 'utf-8' ) ) + marker_data.set_model_name(model_name) + # Marker count (4 bytes) + marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + if(marker_count < 0): + print("WARNING: Early return. Invalid marker count") + offset = len(data) + return offset, marker_set_data + elif(marker_count > 10000): + print("WARNING: Early return. Marker count too high") + offset = len(data) + return offset, marker_set_data + + trace_mf( "Marker Count : ", marker_count ) + for j in range( 0, marker_count ): + if(len(data)<(offset+12)): + print("WARNING: Early return. Out of data at marker ",j," of ", marker_count) + offset = len(data) + return offset, marker_set_data + break + pos = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + trace_mf( "\tMarker %3.1d : [x=%3.2f,y=%3.2f,z=%3.2f]"%( j, pos[0], pos[1], pos[2] )) + marker_data.add_pos(pos) + marker_set_data.add_marker_data(marker_data) - if self.markerSetListener is not None: - self.markerSetListener(modelName, posList) - # Unlabeled markers count (4 bytes) - unlabeledMarkersCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - self.__trace( "Unlabeled Markers Count:", unlabeledMarkersCount ) + #unlabeled_markers_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + #offset += 4 + #trace_mf( "Unlabeled Markers Count:", unlabeled_markers_count ) - for i in range( 0, unlabeledMarkersCount ): - pos = Vector3.unpack( data[offset:offset+12] ) - offset += 12 - self.__trace( "\tMarker", i, ":", pos[0],",", pos[1],",", pos[2] ) + #for i in range( 0, unlabeled_markers_count ): + # pos = Vector3.unpack( data[offset:offset+12] ) + # offset += 12 + # trace_mf( "\tMarker %3.1d : [%3.2f,%3.2f,%3.2f]"%( i, pos[0], pos[1], pos[2] )) + # marker_set_data.add_unlabeled_marker(pos) + return offset, marker_set_data + def __unpack_rigid_body_data( self, data, packet_size, major, minor): + rigid_body_data = MoCapData.RigidBodyData() + offset = 0 # Rigid body count (4 bytes) - rigidBodyCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + rigid_body_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Rigid Body Count:", rigidBodyCount ) + trace_mf( "Rigid Body Count:", rigid_body_count ) - for i in range( 0, rigidBodyCount ): - offset += self.__unpackRigidBody( data[offset:] ) + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + for i in range( 0, rigid_body_count ): + offset_tmp, rigid_body = self.__unpack_rigid_body( data[offset:], major, minor, i ) + offset += offset_tmp + rigid_body_data.add_rigid_body(rigid_body) + + return offset, rigid_body_data + + + def __unpack_skeleton_data( self, data, packet_size, major, minor): + skeleton_data = MoCapData.SkeletonData() + + offset = 0 # Version 2.1 and later - skeletonCount = 0 - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] > 0 ) or self.__natNetStreamVersion[0] > 2 ): - skeletonCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + skeleton_count = 0 + if( ( major == 2 and minor > 0 ) or major > 2 ): + skeleton_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Skeleton Count:", skeletonCount ) - for i in range( 0, skeletonCount ): - offset += self.__unpackSkeleton( data[offset:] ) + trace_mf( "Skeleton Count:", skeleton_count ) + + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + if(skeleton_count >0): + for skeleton_num in range( 0, skeleton_count ): + rel_offset, skeleton = self.__unpack_skeleton( data[offset:], major, minor, skeleton_num ) + offset += rel_offset + skeleton_data.add_skeleton(skeleton) + return offset, skeleton_data + + def __decode_marker_id(self, new_id): + model_id = 0 + marker_id = 0 + model_id = new_id >> 16 + marker_id = new_id & 0x0000ffff + return model_id, marker_id + + def __unpack_labeled_marker_data( self, data, packet_size, major, minor): + labeled_marker_data = MoCapData.LabeledMarkerData() + offset = 0 # Labeled markers (Version 2.3 and later) - labeledMarkerCount = 0 - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] > 3 ) or self.__natNetStreamVersion[0] > 2 ): - labeledMarkerCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + labeled_marker_count = 0 + if( ( major == 2 and minor > 3 ) or major > 2 ): + labeled_marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Labeled Marker Count:", labeledMarkerCount ) - for i in range( 0, labeledMarkerCount ): - id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + trace_mf( "Labeled Marker Count:", labeled_marker_count ) + + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + + for lm_num in range( 0, labeled_marker_count ): + model_id = 0 + marker_id = 0 + tmp_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 + model_id, marker_id = self.__decode_marker_id(tmp_id) pos = Vector3.unpack( data[offset:offset+12] ) offset += 12 size = FloatValue.unpack( data[offset:offset+4] ) offset += 4 + trace_mf("%3.1d ID : [MarkerID: %3.1d] [ModelID: %3.1d]"%(lm_num, marker_id,model_id)) + trace_mf(" pos : [%3.2f, %3.2f, %3.2f]"%(pos[0],pos[1],pos[2])) + trace_mf(" size : [%3.2f]"%size) + # Version 2.6 and later - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] >= 6 ) or self.__natNetStreamVersion[0] > 2 or self.__natNetStreamVersion[0] == 0 ): + param = 0 + if( ( major == 2 and minor >= 6 ) or major > 2): param, = struct.unpack( 'h', data[offset:offset+2] ) offset += 2 - occluded = ( param & 0x01 ) != 0 - pointCloudSolved = ( param & 0x02 ) != 0 - modelSolved = ( param & 0x04 ) != 0 + #occluded = ( param & 0x01 ) != 0 + #point_cloud_solved = ( param & 0x02 ) != 0 + #model_solved = ( param & 0x04 ) != 0 # Version 3.0 and later - if( self.__natNetStreamVersion[0] >= 3 or self.__natNetStreamVersion[0] == 0 ): + residual = 0.0 + if major >= 3 : residual, = FloatValue.unpack( data[offset:offset+4] ) offset += 4 - self.__trace( "Residual:", residual ) + residual = residual * 1000.0 + trace_mf( " err : [%3.2f]"% residual ) + labeled_marker = MoCapData.LabeledMarker(tmp_id,pos,size,param, residual) + labeled_marker_data.add_labeled_marker(labeled_marker) + + return offset, labeled_marker_data + + def __unpack_force_plate_data( self, data, packet_size, major, minor): + force_plate_data = MoCapData.ForcePlateData() + n_frames_show_max = 4 + offset = 0 # Force Plate data (version 2.9 and later) - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] >= 9 ) or self.__natNetStreamVersion[0] > 2 ): - forcePlateCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + force_plate_count = 0 + if( ( major == 2 and minor >= 9 ) or major > 2 ): + force_plate_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Force Plate Count:", forcePlateCount ) - for i in range( 0, forcePlateCount ): + trace_mf( "Force Plate Count:", force_plate_count ) + + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + + for i in range( 0, force_plate_count ): # ID - forcePlateID = int.from_bytes( data[offset:offset+4], byteorder='little' ) + force_plate_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Force Plate", i, ":", forcePlateID ) + force_plate = MoCapData.ForcePlate(force_plate_id) # Channel Count - forcePlateChannelCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + force_plate_channel_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - # Channel Data - for j in range( 0, forcePlateChannelCount ): - self.__trace( "\tChannel", j, ":", forcePlateID ) - forcePlateChannelFrameCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - for k in range( 0, forcePlateChannelFrameCount ): - forcePlateChannelVal = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - self.__trace( "\t\t", forcePlateChannelVal ) + trace_mf( "\tForce Plate %3.1d ID: %3.1d Num Channels: %3.1d"% (i, force_plate_id, force_plate_channel_count )) + # Channel Data + for j in range( force_plate_channel_count ): + fp_channel_data = MoCapData.ForcePlateChannelData() + force_plate_channel_frame_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + out_string="\tChannel %3.1d: "%( j ) + out_string+=" %3.1d Frames - Frame Data: "%(force_plate_channel_frame_count) + + # Force plate frames + n_frames_show = min(force_plate_channel_frame_count, n_frames_show_max) + for k in range( force_plate_channel_frame_count ): + force_plate_channel_val = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + fp_channel_data.add_frame_entry(force_plate_channel_val) + + if k < n_frames_show: + out_string += "%3.2f "%(force_plate_channel_val) + if n_frames_show < force_plate_channel_frame_count: + out_string += " showing %3.1d of %3.1d frames"%(n_frames_show, force_plate_channel_frame_count) + trace_mf( "%s"% out_string ) + force_plate.add_channel_data(fp_channel_data) + force_plate_data.add_force_plate(force_plate) + return offset, force_plate_data + + def __unpack_device_data( self, data, packet_size, major, minor): + device_data = MoCapData.DeviceData() + n_frames_show_max = 4 + offset = 0 # Device data (version 2.11 and later) - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] >= 11 ) or self.__natNetStreamVersion[0] > 2 ): - deviceCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + device_count = 0 + if ( major == 2 and minor >= 11 ) or (major > 2) : + device_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - self.__trace( "Device Count:", deviceCount ) - for i in range( 0, deviceCount ): - # ID - deviceID = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - self.__trace( "Device", i, ":", deviceID ) + trace_mf( "Device Count:", device_count ) - # Channel Count - deviceChannelCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + # get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + + for i in range( 0, device_count ): + + # ID + device_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 + device = MoCapData.Device(device_id) + # Channel Count + device_channel_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + + trace_mf( "\tDevice %3.1d ID: %3.1d Num Channels: %3.1d"% (i, device_id, device_channel_count )) # Channel Data - for j in range( 0, deviceChannelCount ): - self.__trace( "\tChannel", j, ":", deviceID ) - deviceChannelFrameCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + for j in range( 0, device_channel_count ): + device_channel_data = MoCapData.DeviceChannelData() + device_channel_frame_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - for k in range( 0, deviceChannelFrameCount ): - deviceChannelVal = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 - self.__trace( "\t\t", deviceChannelVal ) - - # software latency (removed in version 3.0) - if self.__natNetStreamVersion[0] < 3: - latency = FloatValue.unpack( data[offset:offset+4] ) - offset += 4 + out_string="\tChannel %3.1d "% (j) + out_string+=" %3.1d Frames - Frame Data: "%(device_channel_frame_count) - # Timecode + # Device Frame Data + n_frames_show = min(device_channel_frame_count, n_frames_show_max) + for k in range( 0, device_channel_frame_count ): + device_channel_val = int.from_bytes( data[offset:offset+4], byteorder='little' ) + device_channel_val = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + if k < n_frames_show: + out_string += "%3.2f "%(device_channel_val) + + device_channel_data.add_frame_entry(device_channel_val) + if n_frames_show < device_channel_frame_count: + out_string += " showing %3.1d of %3.1d frames"%(n_frames_show, device_channel_frame_count) + trace_mf( "%s"% out_string ) + device.add_channel_data(device_channel_data) + device_data.add_device(device) + return offset, device_data + + def __unpack_frame_suffix_data( self, data, packet_size, major, minor): + frame_suffix_data = MoCapData.FrameSuffixData() + offset = 0 + + # Timecode timecode = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - timecodeSub = int.from_bytes( data[offset:offset+4], byteorder='little' ) + frame_suffix_data.timecode = timecode + + timecode_sub = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 + frame_suffix_data.timecode_sub = timecode_sub - # Timestamp (increased to double precision in 2.7 and later) - if( ( self.__natNetStreamVersion[0] == 2 and self.__natNetStreamVersion[1] >= 7 ) or self.__natNetStreamVersion[0] > 2 ): - timestamp, = DoubleValue.unpack( data[offset:offset+8] ) - offset += 8 + param=0 + #check to see if there is enough data + if((packet_size-offset) <= 0): + print("ERROR: Early End of Data Frame Suffix Data") + print("\tNo time stamp info available") else: - timestamp, = FloatValue.unpack( data[offset:offset+4] ) - offset += 4 + # Timestamp (increased to double precision in 2.7 and later) + if ( major == 2 and minor >= 7 ) or (major > 2 ): + timestamp, = DoubleValue.unpack( data[offset:offset+8] ) + offset += 8 + else: + timestamp, = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + trace_mf("Timestamp : %3.2f"%timestamp) + frame_suffix_data.timestamp = timestamp - # Hires Timestamp (Version 3.0 and later) - if( self.__natNetStreamVersion[0] >= 3 or self.__natNetStreamVersion[0] == 0 ): - stampCameraExposure = int.from_bytes( data[offset:offset+8], byteorder='little' ) - offset += 8 - stampDataReceived = int.from_bytes( data[offset:offset+8], byteorder='little' ) - offset += 8 - stampTransmit = int.from_bytes( data[offset:offset+8], byteorder='little' ) - offset += 8 + # Hires Timestamp (Version 3.0 and later) + if major >= 3 : + stamp_camera_mid_exposure = int.from_bytes( data[offset:offset+8], byteorder='little' ) + trace_mf("Mid-exposure timestamp : %3.1d"%stamp_camera_mid_exposure) + offset += 8 + frame_suffix_data.stamp_camera_mid_exposure = stamp_camera_mid_exposure - # Frame parameters - param, = struct.unpack( 'h', data[offset:offset+2] ) - isRecording = ( param & 0x01 ) != 0 - trackedModelsChanged = ( param & 0x02 ) != 0 - offset += 2 + stamp_data_received = int.from_bytes( data[offset:offset+8], byteorder='little' ) + offset += 8 + frame_suffix_data.stamp_data_received = stamp_data_received + trace_mf("Camera data received timestamp : %3.1d"%stamp_data_received) + + stamp_transmit = int.from_bytes( data[offset:offset+8], byteorder='little' ) + offset += 8 + trace_mf("Transmit timestamp : %3.1d"%stamp_transmit) + frame_suffix_data.stamp_transmit = stamp_transmit + + # Precision Timestamp (Version 4.1 and later) (defaults as 0 if N/A) + if major >= 4: + prec_timestamp_secs = int.from_bytes( data[offset:offset+4], byteorder='little' ) + hours = int(prec_timestamp_secs/3600) + minutes=int(prec_timestamp_secs/60)%60 + seconds=prec_timestamp_secs%60 + out_string="Precision timestamp (h:m:s) - %4.1d:%2.2d:%2.2d"%(hours, minutes, seconds) + trace_mf("%s"%out_string) + trace_mf("Precision timestamp (sec) : %3.1d"%prec_timestamp_secs) + offset += 4 + frame_suffix_data.prec_timestamp_secs = prec_timestamp_secs + + prec_timestamp_frac_secs = int.from_bytes( data[offset:offset+4], byteorder='little' ) + trace_mf("Precision timestamp (frac sec) : %3.1d"%prec_timestamp_frac_secs) + offset += 4 + frame_suffix_data.prec_timestamp_frac_secs = prec_timestamp_frac_secs + + # Frame parameters + param, = struct.unpack( 'h', data[offset:offset+2] ) + offset += 2 + is_recording = ( param & 0x01 ) != 0 + tracked_models_changed = ( param & 0x02 ) != 0 + frame_suffix_data.param = param + frame_suffix_data.is_recording = is_recording + frame_suffix_data.tracked_models_changed = tracked_models_changed + + return offset, frame_suffix_data + + + # Unpack data from a motion capture frame message + def __unpack_mocap_data( self, data : bytes, packet_size, major, minor): + mocap_data = MoCapData.MoCapData() + data = memoryview( data ) + offset = 0 + rel_offset = 0 + + #Frame Prefix Data + rel_offset, frame_prefix_data = self.__unpack_frame_prefix_data(data[offset:]) + offset += rel_offset + mocap_data.set_prefix_data(frame_prefix_data) + frame_number = frame_prefix_data.frame_number + + #Marker Set Data + rel_offset, marker_set_data =self.__unpack_marker_set_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_marker_set_data(marker_set_data) + marker_set_count = marker_set_data.get_marker_set_count() + unlabeled_markers_count = marker_set_data.get_unlabeled_marker_count() + + # Legacy Other Markers + rel_offset, legacy_other_markers =self.__unpack_legacy_other_markers(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_legacy_other_markers(legacy_other_markers) + marker_set_count = legacy_other_markers.get_marker_count() + legacy_other_markers_count = marker_set_data.get_unlabeled_marker_count() + + # Rigid Body Data + rel_offset, rigid_body_data = self.__unpack_rigid_body_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_rigid_body_data(rigid_body_data) + rigid_body_count = rigid_body_data.get_rigid_body_count() + + # Skeleton Data + rel_offset, skeleton_data = self.__unpack_skeleton_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_skeleton_data(skeleton_data) + skeleton_count = skeleton_data.get_skeleton_count() + + # Assets ( Motive 3.1/NatNet 4.1 and greater) + if (((major == 4) and (minor > 0)) or (major > 4)): + rel_offset, asset_data = self.__unpack_asset_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_asset_data(asset_data) + asset_rigid_body_count = asset_data.get_rigid_body_count() + asset_marker_count = asset_data.get_marker_count() + + # Labeled Marker Data + rel_offset, labeled_marker_data = self.__unpack_labeled_marker_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_labeled_marker_data(labeled_marker_data) + labeled_marker_count = labeled_marker_data.get_labeled_marker_count() + + # Force Plate Data + rel_offset, force_plate_data = self.__unpack_force_plate_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_force_plate_data(force_plate_data) + + # Device Data + rel_offset,device_data = self.__unpack_device_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_device_data(device_data) + + # Frame Suffix Data + #rel_offset, timecode, timecode_sub, timestamp, is_recording, tracked_models_changed = \ + rel_offset, frame_suffix_data = self.__unpack_frame_suffix_data(data[offset:], (packet_size - offset),major, minor) + offset += rel_offset + mocap_data.set_suffix_data(frame_suffix_data) + + + timecode = frame_suffix_data.timecode + timecode_sub= frame_suffix_data.timecode_sub + timestamp = frame_suffix_data.timestamp + is_recording = frame_suffix_data.is_recording + tracked_models_changed = frame_suffix_data.tracked_models_changed # Send information to any listener. - if self.newFrameListener is not None: - self.newFrameListener( frameNumber, markerSetCount, unlabeledMarkersCount, rigidBodyCount, skeletonCount, - labeledMarkerCount, timecode, timecodeSub, timestamp, isRecording, trackedModelsChanged ) + if self.new_frame_listener is not None: + data_dict={} + data_dict["frame_number"]=frame_number + data_dict[ "marker_set_count"] = marker_set_count + data_dict[ "unlabeled_markers_count"] = unlabeled_markers_count + data_dict[ "rigid_body_count"] = rigid_body_count + data_dict[ "skeleton_count"] =skeleton_count + data_dict[ "labeled_marker_count"] = labeled_marker_count + data_dict[ "timecode"] = timecode + data_dict[ "timecode_sub"] = timecode_sub + data_dict[ "timestamp"] = timestamp + data_dict[ "is_recording"] = is_recording + data_dict[ "tracked_models_changed"] = tracked_models_changed + + self.new_frame_listener( data_dict ) + + if self.rigid_body_list_listener is not None: + self.rigid_body_list_listener(rigid_body_data, timestamp) + + if self.marker_set_listener is not None: + self.marker_set_listener(marker_set_data) + + return offset, mocap_data - # Send rigid body list and timestamp - if self.rigidBodyListListener is not None: - self.rigidBodyListListener( self.rigidBodyList, timestamp ) # Unpack a marker set description packet - def __unpackMarkerSetDescription( self, data ): + def __unpack_marker_set_description( self, data, major, minor): + ms_desc = DataDescriptions.MarkerSetDescription() + offset = 0 name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) offset += len( name ) + 1 - self.__trace( "Markerset Name:", name.decode( 'utf-8' ) ) - - markerCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) - offset += 4 + trace_dd( "Marker Set Name: %s" % (name.decode( 'utf-8' )) ) + ms_desc.set_name(name) - for i in range( 0, markerCount ): - name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) - offset += len( name ) + 1 - self.__trace( "\tMarker Name:", name.decode( 'utf-8' ) ) - - return offset + marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_dd( "Marker Count : %3.1d" % marker_count) + if(marker_count > 0): + for i in range( 0, marker_count ): + name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( name ) + 1 + trace_dd( "\t%2.1d Marker Name: %s"%(i, name.decode( 'utf-8' ) )) + ms_desc.add_marker_name(name) + + return offset, ms_desc # Unpack a rigid body description packet - def __unpackRigidBodyDescription( self, data ): + def __unpack_rigid_body_description( self, data, major, minor): + rb_desc=DataDescriptions.RigidBodyDescription() offset = 0 # Version 2.0 or higher - if( self.__natNetStreamVersion[0] >= 2 ): + if (major >= 2) or (major == 0): name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) offset += len( name ) + 1 - self.__trace( "\tMarker Name:", name.decode( 'utf-8' ) ) + rb_desc.set_name(name) + trace_dd( "\tRigid Body Name : ", name.decode( 'utf-8' ) ) - id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + # ID + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 + rb_desc.set_id(new_id) + trace_dd( "\tID : ", str(new_id)) - parentID = int.from_bytes( data[offset:offset+4], byteorder='little' ) + #Parent ID + parent_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 + rb_desc.set_parent_id(parent_id) + trace_dd( "\tParent ID : ", parent_id) - timestamp = Vector3.unpack( data[offset:offset+12] ) + # Position Offsets + pos = Vector3.unpack( data[offset:offset+12] ) offset += 12 + rb_desc.set_pos(pos[0],pos[1],pos[2]) + + trace_dd( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (pos[0], pos[1], pos[2] )) + + # Version 3.0 and higher, rigid body marker information contained in description + if (major >= 3) or (major == 0) : + # Marker Count + marker_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_dd( "\tNumber of Markers : ", marker_count ) + + marker_count_range = range( 0, marker_count ) + offset1 = offset + offset2 = offset1 + (12*marker_count) + offset3 = offset2 + (4*marker_count) + # Marker Offsets X,Y,Z + marker_name="" + for marker in marker_count_range: + # Offset + marker_offset = Vector3.unpack(data[offset1:offset1+12]) + offset1 +=12 + + # Active Label + active_label = int.from_bytes(data[offset2:offset2+4],byteorder = 'little') + offset2 += 4 + + #Marker Name + if (major >= 4) or (major == 0): + # markername + marker_name, separator, remainder = bytes(data[offset3:]).partition( b'\0' ) + marker_name = marker_name.decode( 'utf-8' ) + offset3 += len( marker_name ) + 1 + + rb_marker=DataDescriptions.RBMarker(marker_name,active_label,marker_offset) + rb_desc.add_rb_marker(rb_marker) + trace_dd( "\t%3.1d Marker Label: %s Position: [x=%3.2f,y=%3.2f,z=%3.2f] %s" % (marker,active_label,\ + marker_offset[0], marker_offset[1], marker_offset[2],marker_name )) + + offset = offset3 - return offset + trace_dd("\tunpack_rigid_body_description processed bytes: ", offset) + return offset, rb_desc # Unpack a skeleton description packet - def __unpackSkeletonDescription( self, data ): + def __unpack_skeleton_description( self, data, major, minor): + skeleton_desc = DataDescriptions.SkeletonDescription() offset = 0 + #Name name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) offset += len( name ) + 1 - self.__trace( "\tMarker Name:", name.decode( 'utf-8' ) ) + skeleton_desc.set_name(name) + trace_dd( "Name : %s"% name.decode( 'utf-8' ) ) + + #ID + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + skeleton_desc.set_id(new_id) + trace_dd( "ID : %3.1d"% new_id ) + + # # of RigidBodies + rigid_body_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_dd( "Rigid Body (Bone) Count : %3.1d" % rigid_body_count) + + # Loop over all Rigid Bodies + for i in range( 0, rigid_body_count ): + trace_dd("Rigid Body (Bone) ", i) + offset_tmp, rb_desc_tmp = self.__unpack_rigid_body_description( data[offset:], major, minor ) + offset+= offset_tmp + skeleton_desc.add_rigid_body_description(rb_desc_tmp) + return offset, skeleton_desc + + def __unpack_force_plate_description(self, data, major, minor): + fp_desc = None + offset = 0 + if major >= 3: + fp_desc = DataDescriptions.ForcePlateDescription() + # ID + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + fp_desc.set_id(new_id) + trace_dd("\tID : ", str(new_id)) + + # Serial Number + serial_number, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( serial_number ) + 1 + fp_desc.set_serial_number(serial_number) + trace_dd( "\tSerial Number : ", serial_number.decode( 'utf-8' ) ) + + # Dimensions + f_width = FloatValue.unpack( data[offset:offset+4]) + offset += 4 + trace_dd( "\tWidth : %3.2f"% f_width) + f_length = FloatValue.unpack( data[offset:offset+4]) + offset += 4 + fp_desc.set_dimensions(f_width[0], f_length[0]) + trace_dd( "\tLength : %3.2f"% f_length) + + # Origin + origin = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + fp_desc.set_origin(origin[0],origin[1],origin[2]) + trace_dd( "\tOrigin : %3.2f, %3.2f, %3.2f"%( origin[0], origin[1], origin[2] )) + + # Calibration Matrix 12x12 floats + trace_dd("Cal Matrix:") + cal_matrix_tmp= [[0.0 for col in range(12)] for row in range(12)] + + for i in range(0,12): + cal_matrix_row=FPCalMatrixRow.unpack(data[offset:offset+(12*4)]) + trace_dd("\t%3.1d %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e %3.3e" % (i + , cal_matrix_row[0], cal_matrix_row[1], cal_matrix_row[2], cal_matrix_row[3] + , cal_matrix_row[4], cal_matrix_row[5], cal_matrix_row[6], cal_matrix_row[7] + , cal_matrix_row[8], cal_matrix_row[9], cal_matrix_row[10], cal_matrix_row[11])) + cal_matrix_tmp[i] = copy.deepcopy(cal_matrix_row) + offset += (12*4) + fp_desc.set_cal_matrix(cal_matrix_tmp) + # Corners 4x3 floats + corners = FPCorners.unpack(data[offset:offset + (12*4)]) + offset += (12*4) + o_2=0 + trace_dd("Corners:") + corners_tmp = [[0.0 for col in range(3)] for row in range(4)] + for i in range(0,4): + trace_dd("\t%3.1d %3.3e %3.3e %3.3e"%(i, corners[o_2], corners[o_2+1], corners[o_2+2])) + corners_tmp[i][0]=corners[o_2] + corners_tmp[i][1]=corners[o_2+1] + corners_tmp[i][2]=corners[o_2+2] + o_2+=3 + fp_desc.set_corners(corners_tmp) + + # Plate Type int + plate_type = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + fp_desc.set_plate_type(plate_type) + trace_dd ("Plate Type : ", plate_type) + + # Channel Data Type int + channel_data_type = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + fp_desc.set_channel_data_type(channel_data_type) + trace_dd("Channel Data Type : ", channel_data_type) + + # Number of Channels int + num_channels = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + trace_dd("Number of Channels : ", num_channels) + + # Channel Names list of NoC strings + for i in range(0, num_channels): + channel_name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( channel_name ) + 1 + trace_dd( "\tChannel Name %3.1d: %s"%(i, channel_name.decode( 'utf-8' ) )) + fp_desc.add_channel_name(channel_name) + + trace_dd("unpackForcePlate processed ", offset, " bytes") + return offset, fp_desc + + def __unpack_device_description(self, data, major, minor): + device_desc=None + offset = 0 + if major >= 3: + # new_id + new_id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_dd("\tID : ", str(new_id)) + + # Name + name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( name ) + 1 + trace_dd( "\tName : ", name.decode( 'utf-8' ) ) + + # Serial Number + serial_number, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( serial_number ) + 1 + trace_dd( "\tSerial Number : ", serial_number.decode( 'utf-8' ) ) + + + # Device Type int + device_type = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + trace_dd ("Device Type : ", device_type) + + # Channel Data Type int + channel_data_type = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + trace_dd("Channel Data Type : ", channel_data_type) + + device_desc = DataDescriptions.DeviceDescription(new_id,name,serial_number,device_type,channel_data_type) + + # Number of Channels int + num_channels = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset+=4 + trace_dd("Number of Channels ", num_channels) + + # Channel Names list of NoC strings + for i in range(0, num_channels): + channel_name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( channel_name ) + 1 + device_desc.add_channel_name(channel_name) + trace_dd( "\tChannel ",i," Name : ", channel_name.decode( 'utf-8' ) ) + + trace_dd("unpack_device_description processed ", offset, " bytes") + return offset, device_desc + + def __unpack_camera_description(self, data, major, minor): + offset = 0 + # Name + name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( name ) + 1 + trace_dd( "\tName : %s"% name.decode( 'utf-8' ) ) + # Position + position = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + trace_dd( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (position[0], position[1], position[2] )) + + # Orientation + orientation = Quaternion.unpack( data[offset:offset+16] ) + offset += 16 + trace_dd( "\tOrientation: [%3.2f, %3.2f, %3.2f, %3.2f]"% (orientation[0], orientation[1], orientation[2], orientation[3] )) + trace_dd("unpack_camera_description processed %3.1d bytes"% offset) + + camera_desc=DataDescriptions.CameraDescription(name, position, orientation) + return offset, camera_desc + + + def __unpack_marker_description( self, data, major, minor ): + offset = 0 + + # Name + name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( name ) + 1 + trace_dd( "\tName : %s"% name.decode( 'utf-8' ) ) + + # ID + marker_id = data[offset:offset+4] + offset += 4 + trace_dd( "\tID : %d"% (marker_id )) + + # Initial Position + initialPosition = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + trace_dd( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (initialPosition[0], initialPosition[1], initialPosition[2] )) + + # Size + marker_size = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + trace_mf( "\tMarker Size:", marker_size ) + + # Params + marker_params, = struct.unpack( 'h', data[offset:offset+2] ) + offset += 2 + trace_mf( "\tParams :", marker_params ) + + trace_dd("unpack_marker_description processed %3.1d bytes"% offset) + + # Package for return object + marker_desc=DataDescriptions.MarkerDescription(name, marker_id, initialPosition, marker_size, marker_params) + return offset, marker_desc + + def __unpack_asset_rigid_body_data( self, data, major, minor ): + offset = 0 + # ID + rbID = data[offset:offset+4] + offset += 4 + trace_dd( "\tID : %d"% (rbID )) + + # Position: x,y,z + pos = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + trace_mf( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (pos[0], pos[1], pos[2] )) + + # Orientation: qx, qy, qz, qw + rot = Quaternion.unpack( data[offset:offset+16] ) + offset += 16 + trace_mf( "\tOrientation : [%3.2f, %3.2f, %3.2f, %3.2f]"% (rot[0], rot[1], rot[2], rot[3] )) + + # Mean error + mean_error, = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + trace_mf( "\tMean Error : %3.2f"% mean_error ) + + # Params + marker_params, = struct.unpack( 'h', data[offset:offset+2] ) + offset += 2 + trace_mf( "\tParams :", marker_params ) + + trace_dd("unpack_marker_description processed %3.1d bytes"% offset) + + # Package for return object + rigid_body_data=MoCapData.AssetRigidBodyData(rbID, pos, rot, mean_error, marker_params) + + return offset, rigid_body_data + + def __unpack_asset_marker_data( self, data, major, minor ): + # ID + marker_id = data[offset:offset+4] + offset += 4 + trace_dd( "\tID : %d"% (marker_id )) + + # Position: x,y,z + pos = Vector3.unpack( data[offset:offset+12] ) + offset += 12 + trace_mf( "\tPosition : [%3.2f, %3.2f, %3.2f]"% (pos[0], pos[1], pos[2] )) + + # Size + marker_size, = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + trace_mf( "\tMarker Size : %3.2f"% marker_size ) + + # Params + marker_params, = struct.unpack( 'h', data[offset:offset+2] ) + offset += 2 + trace_mf( "\tParams :", marker_params ) + + # Residual + residual, = FloatValue.unpack( data[offset:offset+4] ) + offset += 4 + trace_mf( "\tResidual : %3.2f"% residual ) + + marker_data = DataDescriptions.AssetMarkerData(marker_id, pos, marker_size, marker_params, residual) + return offset, marker_data + + def __unpack_asset_data( self, data, packet_size, major, minor): + asset_data = MoCapData.AssetData() + + offset = 0 + + # Asset Count + asset_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset += 4 + trace_mf( "Asset Count:", asset_count ) + + # Get data size (4 bytes) + offset_tmp, unpackedDataSize = self.__unpack_data_size(data[offset:],major, minor) + offset += offset_tmp + + # Unpack assets + for _ in range( 0, asset_count ): + # Asset ID 4 bytes + assetID = data[offset:offset+4] + offset += 4 + trace_dd( "\tAsset ID : %d"% (assetID )) + asset_data.set_id(assetID) + + # # of RigidBodies + numRBs = data[offset:offset+4] + offset += 4 + trace_dd( "\tRigid Bodies : %d" % (numRBs)) - id = int.from_bytes( data[offset:offset+4], byteorder='little' ) + offset1=0 + for rbNum in range(numRBs): + # # of RigidBodies + offset1,rigid_body = self.__unpack_asset_rigid_body_data(data+offset, major, minor) + offset += offset1 + asset_data.add_rigid_body(rigid_body) + + + # # of Markers + numMarkers = data[offset:offset+4] + offset += 4 + trace_dd( "\tMarkers : %d" % (numMarkers)) + + for markerNum in range(numMarkers): + # # of Markers + offset1,marker = self.__unpack_asset_marker_data( data+offset, major, minor) + offset += offset1 + asset_data.add_marker(marker) + + return offset, asset_data + + def __unpack_asset_description(self, data, major, minor): + offset = 0 + + # Name + name, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + offset += len( name ) + 1 + trace_dd( "\tName : %s"% name.decode( 'utf-8' ) ) + + # Asset Type 4 bytes + assetType = data[offset:offset+4] offset += 4 + trace_dd( "\tType : %d"% (assetType )) - rigidBodyCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + # ID 4 bytes + assetID = data[offset:offset+4] offset += 4 + trace_dd( "\tID : %d"% (assetID )) - for i in range( 0, rigidBodyCount ): - offset += self.__unpackRigidBodyDescription( data[offset:] ) + # # of RigidBodies + numRBs = data[offset:offset+4] + offset += 4 + trace_dd( "\tNumber of RBs: %d" % (numRBs)) + + rigidbodyArray=[] + offset1=0 + for rbNum in range(numRBs): + # # of RigidBodies + offset1,rigidbody = self.__unpack_rigid_body_description(data+offset, major, minor) + offset += offset1 + rigidbodyArray.append(rigidbody) + + + # # of Markers + numMarkers = data[offset:offset+4] + offset += 4 + trace_dd( "\tnumber of Markers: %d" % (numMarkers)) + + markerArray=[] + for markerNum in range(numMarkers): + # # of Markers + offset1,marker = self.__unpack_marker_description( data+offset, major, minor) + offset += offset1 + markerArray.append(marker) + + trace_dd("unpack_asset_description processed %3.1d bytes"% offset) + + # package for output + asset_desc=DataDescriptions.AssetDescription(name, assetType, assetID, rigidbodyArray, markerArray) + return offset, asset_desc - return offset # Unpack a data description packet - def __unpackDataDescriptions( self, data ): + def __unpack_data_descriptions( self, data : bytes, packet_size, major, minor): + data_descs = DataDescriptions.DataDescriptions() offset = 0 - datasetCount = int.from_bytes( data[offset:offset+4], byteorder='little' ) + # # of data sets to process + dataset_count = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - - for i in range( 0, datasetCount ): - type = int.from_bytes( data[offset:offset+4], byteorder='little' ) + trace_dd("Dataset Count : ", str(dataset_count)) + for i in range( 0, dataset_count ): + trace_dd("Dataset ", str(i)) + data_type = int.from_bytes( data[offset:offset+4], byteorder='little' ) offset += 4 - if( type == 0 ): - offset += self.__unpackMarkerSetDescription( data[offset:] ) - elif( type == 1 ): - offset += self.__unpackRigidBodyDescription( data[offset:] ) - elif( type == 2 ): - offset += self.__unpackSkeletonDescription( data[offset:] ) - - def __dataThreadFunction( self, sock ): - sock.settimeout(0.01) - while self.running: + data_tmp=None + if data_type == 0 : + trace_dd("Type: 0 Markerset") + offset_tmp, data_tmp = self.__unpack_marker_set_description( data[offset:], major, minor ) + elif data_type == 1 : + trace_dd("Type: 1 Rigid Body") + offset_tmp, data_tmp = self.__unpack_rigid_body_description( data[offset:], major, minor ) + elif data_type == 2 : + trace_dd("Type: 2 Skeleton") + offset_tmp, data_tmp = self.__unpack_skeleton_description( data[offset:], major, minor ) + elif data_type == 3 : + trace_dd("Type: 3 Force Plate") + offset_tmp, data_tmp = self.__unpack_force_plate_description(data[offset:], major, minor) + elif data_type == 4 : + trace_dd("Type: 4 Device") + offset_tmp, data_tmp = self.__unpack_device_description(data[offset:], major, minor) + elif data_type == 5 : + trace_dd("Type: 5 Camera") + offset_tmp, data_tmp = self.__unpack_camera_description(data[offset:], major, minor) + elif data_type == 6 : + trace_dd("Type: 6 Asset") + offset_tmp, data_tmp = self.__unpack_asset_description(data[offset:], major, minor) + else: + print("Type: Unknown " + str(data_type)) + print("ERROR: Type decode failure" ) + print("\t"+ str(i + 1) +" datasets processed of " + str(dataset_count)) + print("\t "+ str(offset) +" bytes processed of " + str(packet_size) ) + print("\tPACKET DECODE STOPPED") + return offset + offset += offset_tmp + data_descs.add_data(data_tmp) + trace_dd("\t"+ str(i) +" datasets processed of " + str(dataset_count)) + trace_dd("\t "+ str(offset) +" bytes processed of " + str(packet_size) ) + + return offset, data_descs + + # __unpack_server_info is for local use of the client + # and will update the values for the versions/ NatNet capabilities + # of the server. + def __unpack_server_info(self, data, packet_size, major, minor): + offset = 0 + # Server name + #szName = data[offset: offset+256] + self.__application_name, separator, remainder = bytes(data[offset: offset+256]).partition( b'\0' ) + self.__application_name=str(self.__application_name, "utf-8") + offset += 256 + # Server Version info + server_version = struct.unpack( 'BBBB', data[offset:offset+4] ) + offset += 4 + self.__server_version[0] = server_version[0] + self.__server_version[1] = server_version[1] + self.__server_version[2] = server_version[2] + self.__server_version[3] = server_version[3] + + # NatNet Version info + nnsvs = struct.unpack( 'BBBB', data[offset:offset+4] ) + offset += 4 + self.__nat_net_stream_version_server[0]=nnsvs[0] + self.__nat_net_stream_version_server[1]=nnsvs[1] + self.__nat_net_stream_version_server[2]=nnsvs[2] + self.__nat_net_stream_version_server[3]=nnsvs[3] + if (self.__nat_net_requested_version[0] == 0) and\ + (self.__nat_net_requested_version[1] == 0): + trace_mf("resetting requested version to %d %d %d %d from %d %d %d %d"%( + self.__nat_net_stream_version_server[0], + self.__nat_net_stream_version_server[1], + self.__nat_net_stream_version_server[2], + self.__nat_net_stream_version_server[3], + self.__nat_net_requested_version[0], + self.__nat_net_requested_version[1], + self.__nat_net_requested_version[2], + self.__nat_net_requested_version[3])) + + self.__nat_net_requested_version[0] = self.__nat_net_stream_version_server[0] + self.__nat_net_requested_version[1] = self.__nat_net_stream_version_server[1] + self.__nat_net_requested_version[2] = self.__nat_net_stream_version_server[2] + self.__nat_net_requested_version[3] = self.__nat_net_stream_version_server[3] + # Determine if the bitstream version can be changed + if (self.__nat_net_stream_version_server[0] >= 4) and (self.use_multicast == False): + self.__can_change_bitstream_version = True + + + + trace_mf("Sending Application Name: ", self.__application_name) + trace_mf("NatNetVersion " , str(self.__nat_net_stream_version_server[0]), " " + , str(self.__nat_net_stream_version_server[1]), " " + , str(self.__nat_net_stream_version_server[2]), " " + , str(self.__nat_net_stream_version_server[3])) + + trace_mf("ServerVersion " , str(self.__server_version[0]), " " + , str(self.__server_version[1]), " " + , str(self.__server_version[2]), " " + , str(self.__server_version[3]) ) + return offset + + # __unpack_bitstream_info is for local use of the client + # and will update the values for the current bitstream + # of the server. + + def __unpack_bitstream_info(self, data, packet_size, major, minor): + nn_version=[] + inString = data.decode('utf-8') + messageList = inString.split(',') + if( len(messageList) > 1 ): + if( messageList[0] == 'Bitstream'): + nn_version=messageList[1].split('.') + return nn_version + + def __command_thread_function( self, in_socket, stop, gprint_level): + message_id_dict={} + if not self.use_multicast: + in_socket.settimeout(2.0) + data=bytearray(0) + # 64k buffer size + recv_buffer_size=64*1024 + while not stop(): # Block for input try: - data, addr = sock.recvfrom( 32768 ) # 32k byte buffer size - if( len( data ) >= 4): - self.__processMessage( data ) - except socket.timeout: - pass + data, addr = in_socket.recvfrom( recv_buffer_size ) + except socket.error as msg: + if stop(): + #print("ERROR: command socket access error occurred:\n %s" %msg) + #return 1 + print("shutting down") + except socket.herror: + print("ERROR: command socket access herror occurred") + return 2 + except socket.gaierror: + print("ERROR: command socket access gaierror occurred") + return 3 + except socket.timeout: + if(self.use_multicast): + print("ERROR: command socket access timeout occurred. Server not responding") + #return 4 - def __processMessage( self, data ): - self.__trace( "Begin Packet\n------------\n" ) + if len( data ) > 0 : + #peek ahead at message_id + message_id = get_message_id(data) + tmp_str="mi_%1.1d"%message_id + if tmp_str not in message_id_dict: + message_id_dict[tmp_str]=0 + message_id_dict[tmp_str] += 1 + + print_level = gprint_level() + if message_id == self.NAT_FRAMEOFDATA: + if print_level > 0: + if (message_id_dict[tmp_str] % print_level) == 0: + print_level = 1 + else: + print_level = 0 + message_id = self.__process_message( data , print_level) - messageID = int.from_bytes( data[0:2], byteorder='little' ) - self.__trace( "Message ID:", messageID ) - - packetSize = int.from_bytes( data[2:4], byteorder='little' ) - self.__trace( "Packet Size:", packetSize ) - - if not len( data ) - 4 >= packetSize: - # Not enough data - return + data=bytearray(0) + if not self.use_multicast: + if not stop(): + self.send_keep_alive(in_socket, self.server_ip_address, self.command_port) + return 0 + + def __data_thread_function( self, in_socket, stop, gprint_level): + message_id_dict={} + data=bytearray(0) + # 64k buffer size + recv_buffer_size=64*1024 + + while not stop(): + # Block for input + try: + data, addr = in_socket.recvfrom( recv_buffer_size ) + except socket.error as msg: + if not stop(): + print("ERROR: data socket access error occurred:\n %s" %msg) + return 1 + except socket.herror: + print("ERROR: data socket access herror occurred") + #return 2 + except socket.gaierror: + print("ERROR: data socket access gaierror occurred") + #return 3 + except socket.timeout: + #if self.use_multicast: + print("ERROR: data socket access timeout occurred. Server not responding") + #return 4 + if len( data ) > 0 : + #peek ahead at message_id + message_id = get_message_id(data) + tmp_str="mi_%1.1d"%message_id + if tmp_str not in message_id_dict: + message_id_dict[tmp_str]=0 + message_id_dict[tmp_str] += 1 + + print_level = gprint_level() + if message_id == self.NAT_FRAMEOFDATA: + if print_level > 0: + if (message_id_dict[tmp_str] % print_level) == 0: + print_level = 1 + else: + print_level = 0 + message_id = self.__process_message( data , print_level) + + data=bytearray(0) + return 0 + + def __process_message( self, data : bytes, print_level=0): + #return message ID + major = self.get_major() + minor = self.get_minor() + + trace( "Begin Packet\n-----------------" ) + show_nat_net_version = False + if show_nat_net_version: + trace("NatNetVersion " , str(self.__nat_net_requested_version[0]), " "\ + , str(self.__nat_net_requested_version[1]), " "\ + , str(self.__nat_net_requested_version[2]), " "\ + , str(self.__nat_net_requested_version[3])) + + message_id = get_message_id(data) + + packet_size = int.from_bytes( data[2:4], byteorder='little' ) + + #skip the 4 bytes for message ID and packet_size offset = 4 - if( messageID == self.NAT_FRAMEOFDATA ): - self.__unpackMocapData( data[offset:] ) - elif( messageID == self.NAT_MODELDEF ): - self.__unpackDataDescriptions( data[offset:] ) - elif( messageID == self.NAT_PINGRESPONSE ): - offset += 256 # Skip the sending app's Name field - offset += 4 # Skip the sending app's Version info - self.__natNetStreamVersion = struct.unpack( 'BBBB', data[offset:offset+4] ) - offset += 4 - elif( messageID == self.NAT_RESPONSE ): - if( packetSize == 4 ): - commandResponse = int.from_bytes( data[offset:offset+4], byteorder='little' ) + if message_id == self.NAT_FRAMEOFDATA : + trace( "Message ID : %3.1d NAT_FRAMEOFDATA"% message_id ) + trace( "Packet Size : ", packet_size ) + + offset_tmp, mocap_data = self.__unpack_mocap_data( data[offset:], packet_size, major, minor ) + offset += offset_tmp + trace("MoCap Frame: %d\n"%(mocap_data.prefix_data.frame_number)) + # get a string version of the data for output + mocap_data_str=mocap_data.get_as_string() + if print_level >= 1: + print("%s\n"%mocap_data_str) + + elif message_id == self.NAT_MODELDEF : + trace( "Message ID : %3.1d NAT_MODELDEF"% message_id ) + trace( "Packet Size : %d"% packet_size ) + offset_tmp, data_descs = self.__unpack_data_descriptions( data[offset:], packet_size, major, minor) + offset += offset_tmp + print("Data Descriptions:\n") + # get a string version of the data for output + data_descs_str=data_descs.get_as_string() + if print_level>0: + print("%s\n"%(data_descs_str)) + + elif message_id == self.NAT_SERVERINFO : + trace( "Message ID : %3.1d NAT_SERVERINFO"% message_id ) + trace( "Packet Size : ", packet_size ) + offset += self.__unpack_server_info( data[offset:], packet_size, major, minor) + + elif message_id == self.NAT_RESPONSE : + trace( "Message ID : %3.1d NAT_RESPONSE"% message_id ) + trace( "Packet Size : ", packet_size ) + if packet_size == 4 : + command_response = int.from_bytes( data[offset:offset+4], byteorder='little' ) + trace( "Command response: %d - %d %d %d %d"% (command_response, + data[offset], + data[offset+1], + data[offset+2], + data[offset+3])) offset += 4 else: + show_remainder = False message, separator, remainder = bytes(data[offset:]).partition( b'\0' ) + if(len(message) < 30): + tmpString = message.decode('utf-8') + # Decode bitstream version + if( tmpString.startswith('Bitstream') ): + nn_version = self.__unpack_bitstream_info(data[offset:],packet_size, major, minor) + # This is the current server version + if(len(nn_version)>1): + for i in range( len(nn_version) ): + self.__nat_net_stream_version_server[i] = int(nn_version[i]) + for i in range( len(nn_version),4 ): + self.__nat_net_stream_version_server[i] = 0 + offset += len( message ) + 1 - self.__trace( "Command response:", message.decode( 'utf-8' ) ) - elif( messageID == self.NAT_UNRECOGNIZED_REQUEST ): - self.__trace( "Received 'Unrecognized request' from server" ) - elif( messageID == self.NAT_MESSAGESTRING ): + + if(show_remainder): + trace( "Command response:", message.decode( 'utf-8' ),\ + " separator:", separator, " remainder:",remainder ) + else: + trace( "Command response:", message.decode( 'utf-8' )) + elif message_id == self.NAT_UNRECOGNIZED_REQUEST : + trace( "Message ID : %3.1d NAT_UNRECOGNIZED_REQUEST: "% message_id ) + trace( "Packet Size : ", packet_size ) + trace( "Received 'Unrecognized request' from server" ) + elif message_id == self.NAT_MESSAGESTRING : + trace( "Message ID : %3.1d NAT_MESSAGESTRING"% message_id) + trace( "Packet Size : ", packet_size ) message, separator, remainder = bytes(data[offset:]).partition( b'\0' ) offset += len( message ) + 1 - self.__trace( "Received message from server:", message.decode( 'utf-8' ) ) + trace( "Received message from server:", message.decode( 'utf-8' ) ) else: - self.__trace( "ERROR: Unrecognized packet type" ) - - self.__trace( "End Packet\n----------\n" ) + trace( "Message ID : %3.1d UNKNOWN"% message_id ) + trace( "Packet Size : ", packet_size ) + trace( "ERROR: Unrecognized packet type" ) - def sendCommand( self, command, commandStr, socket, address ): + trace( "End Packet\n-----------------" ) + return message_id + + def send_request( self, in_socket, command, command_str, address ): # Compose the message in our known message format - if( command == self.NAT_REQUEST_MODELDEF or command == self.NAT_REQUEST_FRAMEOFDATA ): - packetSize = 0 - commandStr = "" - elif( command == self.NAT_REQUEST ): - packetSize = len( commandStr ) + 1 - elif( command == self.NAT_PING ): - commandStr = "Ping" - packetSize = len( commandStr ) + 1 + packet_size = 0 + if command == self.NAT_REQUEST_MODELDEF or command == self.NAT_REQUEST_FRAMEOFDATA : + packet_size = 0 + command_str = "" + elif command == self.NAT_REQUEST : + packet_size = len( command_str ) + 1 + elif command == self.NAT_CONNECT : + tmp_version=[4,1,0,0] + print("NAT_CONNECT to Motive with %d %d %d %d"%( + tmp_version[0], + tmp_version[1], + tmp_version[2], + tmp_version[3] + )) + #allocate a byte array for 270 bytes + # to connect with a specific version + # The first 4 bytes spell out "Ping" + command_str = [] + command_str = [0 for i in range(270)] + command_str[0] =80 + command_str[1] =105 + command_str[2] =110 + command_str[3] =103 + command_str[264] =0 + command_str[265] =tmp_version[0] + command_str[266] =tmp_version[1] + command_str[267] =tmp_version[2] + command_str[268] =tmp_version[3] + packet_size = len( command_str ) + 1 + elif command == self.NAT_KEEPALIVE: + packet_size = 0 + command_str = "" data = command.to_bytes( 2, byteorder='little' ) - data += packetSize.to_bytes( 2, byteorder='little' ) - - data += commandStr.encode( 'utf-8' ) + data += packet_size.to_bytes( 2, byteorder='little' ) + + if command == self.NAT_CONNECT : + data+=bytearray(command_str) + else: + data += command_str.encode( 'utf-8' ) data += b'\0' - socket.sendto( data, address ) - - def run( self ): - # Set running flag to True - self.running = True + return in_socket.sendto( data, address ) + def send_command( self, command_str): + #print("Send command %s"%command_str) + nTries = 3 + ret_val = -1 + while nTries: + nTries -= 1 + ret_val = self.send_request( self.command_socket, self.NAT_REQUEST, command_str, (self.server_ip_address, self.command_port) ) + if (ret_val != -1): + break; + return ret_val + + #return self.send_request(self.data_socket, self.NAT_REQUEST, command_str, (self.server_ip_address, self.command_port) ) + + def send_commands(self,tmpCommands, print_results: bool =True): + for sz_command in tmpCommands: + return_code = self.send_command(sz_command) + if(print_results): + print("Command: %s - return_code: %d"% (sz_command, return_code) ) + + def send_keep_alive(self,in_socket, server_ip_address, server_port): + return self.send_request(in_socket, self.NAT_KEEPALIVE, "", (server_ip_address, server_port)) + + def get_command_port(self): + return self.command_port + + def refresh_configuration(self): + #query for application configuration + #print("Request current configuration") + sz_command = "Bitstream" + return_code = self.send_command(sz_command) + time.sleep(0.5) + + def get_application_name(self): + return self.__application_name + + def get_nat_net_requested_version(self): + return self.__nat_net_requested_version + + def get_nat_net_version_server(self): + return self.__nat_net_stream_version_server + + def get_server_version(self): + return self.__server_version + + + + def run( self ): # Create the data socket - self.dataSocket = self.__createDataSocket( self.dataPort ) - if( self.dataSocket is None ): + self.data_socket = self.__create_data_socket( self.data_port ) + if self.data_socket is None : print( "Could not open data channel" ) - exit + return False # Create the command socket - self.commandSocket = self.__createCommandSocket() - if( self.commandSocket is None ): + self.command_socket = self.__create_command_socket() + if self.command_socket is None : print( "Could not open command channel" ) - exit + return False + self.__is_locked = True + self.stop_threads = False # Create a separate thread for receiving data packets - dataThread = Thread( target = self.__dataThreadFunction, args = (self.dataSocket, )) - dataThread.start() + self.data_thread = Thread( target = self.__data_thread_function, args = (self.data_socket, lambda : self.stop_threads, lambda : self.print_level, )) + self.data_thread.start() # Create a separate thread for receiving command packets - commandThread = Thread( target = self.__dataThreadFunction, args = (self.commandSocket, )) - commandThread.start() + self.command_thread = Thread( target = self.__command_thread_function, args = (self.command_socket, lambda : self.stop_threads, lambda : self.print_level,)) + self.command_thread.start() - self.sendCommand( self.NAT_REQUEST_MODELDEF, "", self.commandSocket, (self.serverIPAddress, self.commandPort) ) + # Required for setup + # Get NatNet and server versions + self.send_request(self.command_socket, self.NAT_CONNECT, "", (self.server_ip_address, self.command_port) ) - def stop( self ): - self.running = False + + ##Example Commands + ## Get NatNet and server versions + #self.send_request(self.command_socket, self.NAT_CONNECT, "", (self.server_ip_address, self.command_port) ) + ## Request the model definitions + #self.send_request(self.command_socket, self.NAT_REQUEST_MODELDEF, "", (self.server_ip_address, self.command_port) ) + return True + + def shutdown(self): + print("shutdown called") + self.stop_threads = True + # closing sockets causes blocking recvfrom to throw + # an exception and break the loop + self.command_socket.close() + self.data_socket.close() + # attempt to join the threads back. + self.command_thread.join() + self.data_thread.join() diff --git a/sw/ground_segment/python/natnet3.x/display_objects.py b/sw/ground_segment/python/natnet3.x/display_objects.py index 9eb75603cc..b61f85f431 100755 --- a/sw/ground_segment/python/natnet3.x/display_objects.py +++ b/sw/ground_segment/python/natnet3.x/display_objects.py @@ -38,6 +38,8 @@ import argparse # import NatNet client from NatNetClient import NatNetClient +import DataDescriptions +import MoCapData # if PAPARAZZI_HOME not set, then assume the tree containing this # file is a reasonable substitute @@ -108,69 +110,69 @@ def is_moving(old_pos, new_pos): # if failing, update position return True -def receiveMarkerSet(name, posList): +def receiveMarkerSet(data: MoCapData.MarkerSetData): ''' callback for markerset with name and marker position as input ''' global current_index + for marker_data in data.marker_data_list: + # check if name is matching regexp + name = marker_data.model_name.decode('utf-8') + if re.fullmatch(args.name, name) is not None: - # check if name is matching regexp - name = name.decode('utf-8') - if re.fullmatch(args.name, name) is not None: - - # check if message should be sent (first time or period) - send = False - now = time() - if name in markerset: - dt = now - markerset[name]['time'] - dt_refresh = now - markerset[name]['time_refresh'] - if dt >= period: - # period elapsed, check if moved - markerset[name]['time'] = now - moved = is_moving(markerset[name]['pos'], posList) - if moved: + # check if message should be sent (first time or period) + send = False + now = time() + if name in markerset: + dt = now - markerset[name]['time'] + dt_refresh = now - markerset[name]['time_refresh'] + if dt >= period: + # period elapsed, check if moved + markerset[name]['time'] = now + moved = is_moving(markerset[name]['pos'], marker_data.marker_pos_list) + if moved: + send = True + markerset[name]['pos'] = marker_data.marker_pos_list + if dt_refresh >= args.refresh_period: + # refresh period elapsed, send anyway send = True - markerset[name]['pos'] = posList - if dt_refresh >= args.refresh_period: - # refresh period elapsed, send anyway - send = True - else: - send = True - markerset[name] = {'time_refresh': now, 'time': now, 'id': current_index, 'pos': posList } - current_index += 1 - - if args.very_verbose: - print(name, posList, time) - - if send: - if args.verbose and (not args.very_verbose): - print(name, posList, now) - - # build list of 2D points and compute convex hull - points = [(pos[X_AXIS], Y_SIGN*pos[Y_AXIS]) for pos in posList] - hull = ConvexHull(points) - # build lists of polygon corners to display in lat long - latitudes = [ int(1e7 * (lat0 + np.rad2deg(points[i][1] / R_earth))) for i in hull.vertices ] - longitudes = [ int(1e7 * (long0 + np.rad2deg(points[i][0] / R_cos_lat0))) for i in hull.vertices ] - - # send SHAPE message - shape = PprzMessage("ground", "SHAPE") - shape['id'] = markerset[name]['id'] - shape['linecolor'] = '"{}"'.format(args.color) - shape['fillcolor'] = '"{}"'.format(args.color) - shape['opacity'] = 1 # light - shape['shape'] = 1 # polygon - shape['status'] = 0 # create or update - shape['latarr'] = latitudes - shape['lonarr'] = longitudes - shape['radius'] = 0. # not relevant - if args.show_name: - shape['text'] = name else: - shape['text'] = '" "' - ivy.send(shape) - markerset[name]['time_refresh'] = now - sleep(0.01) + send = True + markerset[name] = {'time_refresh': now, 'time': now, 'id': current_index, 'pos': marker_data.marker_pos_list } + current_index += 1 + + if args.very_verbose: + print(name, marker_data.marker_pos_list, time) + + if send: + if args.verbose and (not args.very_verbose): + print(name, marker_data.marker_pos_list, now) + + # build list of 2D points and compute convex hull + points = [(pos[X_AXIS], Y_SIGN*pos[Y_AXIS]) for pos in marker_data.marker_pos_list] + hull = ConvexHull(points) + # build lists of polygon corners to display in lat long + latitudes = [ int(1e7 * (lat0 + np.rad2deg(points[i][1] / R_earth))) for i in hull.vertices ] + longitudes = [ int(1e7 * (long0 + np.rad2deg(points[i][0] / R_cos_lat0))) for i in hull.vertices ] + + # send SHAPE message + shape = PprzMessage("ground", "SHAPE") + shape['id'] = markerset[name]['id'] + shape['linecolor'] = '"{}"'.format(args.color) + shape['fillcolor'] = '"{}"'.format(args.color) + shape['opacity'] = 1 # light + shape['shape'] = 1 # polygon + shape['status'] = 0 # create or update + shape['latarr'] = latitudes + shape['lonarr'] = longitudes + shape['radius'] = 0. # not relevant + if args.show_name: + shape['text'] = name + else: + shape['text'] = '" "' + ivy.send(shape) + markerset[name]['time_refresh'] = now + sleep(0.01) def check_timeout(): ''' @@ -196,16 +198,14 @@ def check_timeout(): # start natnet interface -natnet_version = (3,0,0,0) -if args.old_natnet: - natnet_version = (2,9,0,0) -natnet = NatNetClient( - server=args.server, - markerSetListener=receiveMarkerSet, - dataPort=args.data_port, - commandPort=args.command_port, - verbose=args.very_verbose, - version=natnet_version) +natnet = NatNetClient() +natnet.set_server_address(args.server) +natnet.set_client_address('0.0.0.0') +natnet.marker_set_listener = receiveMarkerSet +if args.verbose: + natnet.set_print_level(1) # print all frames +else: + natnet.set_print_level(0) print("Starting Object Display interface at %s" % (args.server)) @@ -218,11 +218,11 @@ try: check_timeout() except (KeyboardInterrupt, SystemExit): print("Shutting down ivy and natnet interfaces...") - natnet.stop() + natnet.shutdown() ivy.shutdown() except OSError: print("Natnet connection error") - natnet.stop() + natnet.shutdown() ivy.stop() exit(-1) diff --git a/sw/ground_segment/python/natnet3.x/natnet2ivy.py b/sw/ground_segment/python/natnet3.x/natnet2ivy.py index 554b9e716b..506169d65c 100755 --- a/sw/ground_segment/python/natnet3.x/natnet2ivy.py +++ b/sw/ground_segment/python/natnet3.x/natnet2ivy.py @@ -156,6 +156,8 @@ import argparse # import NatNet client from NatNetClient import NatNetClient +import DataDescriptions +import MoCapData # if PAPARAZZI_HOME not set, then assume the tree containing this # file is a reasonable substitute @@ -288,15 +290,18 @@ def performTransformation( pos, vel, quat ): return pos, vel, quat -def receiveRigidBodyList( rigidBodyList, stamp ): - for (ac_id, pos, quat, valid) in rigidBodyList: - if not valid: +def receiveRigidBodyList( rigid_body_data, stamp ): + for rigid_body in rigid_body_data.rigid_body_list: + if not rigid_body.tracking_valid: # skip if rigid body is not valid continue - i = str(ac_id) + i = str(rigid_body.id_num) if i not in id_dict.keys(): continue + + pos = rigid_body.pos + quat = rigid_body.rot store_track(i, pos, stamp) if timestamp[i] is None or abs(stamp - timestamp[i]) < period: @@ -380,32 +385,44 @@ if not run_test_cases: ivy = IvyMessagesInterface("natnet2ivy") # start natnet interface - natnet_version = (3,0,0,0) - if args.old_natnet: - natnet_version = (2,9,0,0) - natnet = NatNetClient( - server=args.server, - rigidBodyListListener=receiveRigidBodyList, - dataPort=args.data_port, - commandPort=args.command_port, - verbose=args.verbose, - version=natnet_version) + natnet = NatNetClient() + natnet.set_server_address(args.server) + natnet.set_client_address('0.0.0.0') + natnet.rigid_body_list_listener = receiveRigidBodyList + if args.verbose: + natnet.set_print_level(1) # print all frames + else: + natnet.set_print_level(0) + if args.old_natnet: + natnet.set_nat_net_version(2,9) print("Starting Natnet3.x to Ivy interface at %s" % (args.server)) try: # Start up the streaming client. # This will run perpetually, and operate on a separate thread. id_dict, timestamp, period, track, q_total, q_nose_correction = process_args(args) - natnet.run() + is_running = natnet.run() + if not is_running: + print("Natnet error: Could not start streaming client.") + exit(-1) + + sleep(1) + if not natnet.connected(): + print("Natnet error: Fail to connect to natnet") + exit(-1) + + if args.verbose: + print_configuration(natnet) + while True: sleep(1) except (KeyboardInterrupt, SystemExit): print("Shutting down ivy and natnet interfaces...") - natnet.stop() + natnet.shutdown() ivy.shutdown() except OSError: print("Natnet connection error") - natnet.stop() + natnet.shutdown() ivy.stop() exit(-1)