# 示例

您成功上传ROSBAG类型的场景文件至“场景管理”模块后,选择该场景文件进行仿真评测时,需要在相应的算法镜像中完成对rosbag处理的逻辑和评测目标数据向protobuf的类型转换,才可进行评测。本节提供一份示例,您可参考示例逻辑进行处理。示例只提供一种处理思路,最终评测需要的是一份符合protobuf messasge规范的数据文件,您可自行决定如何处理。

# 算法镜像

  FROM *********    #基础镜像
  # 创建一个使用 roslaunch 文件的 rospkg, 方便处理 rosbag 的 play 和 record
  RUN mkdir -p /opt/record_ws/src \
  WORKDIR /opt/record_ws/src
  RUN /bin/bash -c "source /ros_entrypoint.sh" \
  && catkin_create_pkg record_pkg std_msgs rospy roscpp \
  && cd ../ \
  && /bin/bash -c '. /opt/ros/melodic/setup.bash; cd /opt/record_ws;
  catkin_make' \
  && echo "source /opt/record_ws/devel/setup.bash" >> ~/.bashrc \
  && /bin/bash -c "source /opt/record_ws/devel/setup.bash" \
  && mkdir /opt/record_ws/src/record_pkg/launch
  WORKDIR /opt/record_ws/src/record_pkg/launch
  ADD record.launch ./
  # 将用于将 record的rosbag数据 转换为 pb文件的操作文件置于合适位置(取决于用户逻辑)
  WORKDIR /opt/bag_to_proto/
  ADD bag_to_proto /opt/bag_to_proto/
  RUN pip3 install -r requirements.txt
  # resim.sh 文件用于管理启动命令
  WORKDIR /
  ADD resim.sh /
  RUN chmod +x /resim.sh

示例镜像构建过程可能涉及到的文件:

  1. record.launch

     <launch>
     <node pkg="rosbag" type="play" name="player" output="screen"
     args="/opt/rosbag/bag/bagname
     /planning/planning_info:=/planning/planning_info_1
     /planning/trajectory:=/planning/trajectory_1
     /planning/trajectory_viz:=/planning/trajectory_viz_1
     /localization_info:=/localization_info_1" required = "true"/>
     <node pkg="rosbag" type="record" name="bag_record"
     args="/localization_info -O /opt/rosbag/bag/new_evaluate.bag"/>
     </launch>
    
  2. pb转换

    1. evaluation_input.proto 定义规范msg的文件;

    2. 使用_protoc compiler_ 将proto文件生成为对应语言的解释msg的文件,示例生成使用的为

      proto/evaluation_input_pb2.py
      
    3. 转换过程

       #!/usr/bin/env python
       import os
       import time
       import math
       from bagpy import bagreader
       import pandas as pd
       from proto.evaluation_input_pb2 import EgoStateResult
       from proto.evaluation_input_pb2 import EgoStateFrame
       start_time, sim_time, simulation_frequency = (0, 0, 0)
       def parse_rosbag(bag_path: str):
       b = bagreader(bag_path)
       msg_localization = b.message_by_topic('/localization_info')
       pandas_localization = pd.read_csv(msg_localization)
       # create pb object of ego_state
       ego_state_object = EgoStateResult()
       handle_localization(pandas_localization, ego_state_object)
       ego_state_data = ego_state_object.SerializeToString()
       # dump pb bytes data to a file
       with open("/opt/rosbag/bag/ego_state.pb", "wb") as fb:
       fb.write(ego_state_data)
       def handle_localization(pandas_localization, ego_state_object):
       secs_time = pandas_localization['header.stamp.secs']
       nsecs_time = pandas_localization['header.stamp.nsecs']
       total_frame = len(secs_time)
       for frame_num in range(total_frame):
       handle_timestamp(frame_num, secs_time[frame_num],
       nsecs_time[frame_num])
       convert_localization_to_pb_ego_state(pandas_localization,
       ego_state_object, frame_num)
       def handle_timestamp(frame_num, secs_time, nsecs_time):
       global start_time, sim_time, simulation_frequency
       secs = secs_time
       nsecs = nsecs_time
       if frame_num == 0:
       start_time = secs + nsecs / 10 ** 9
       sim_time = 0
       elif frame_num == 1:
       period = secs + nsecs - start_time
       simulation_frequency = int(1 / period)
       sim_time = secs + nsecs / 10 ** 9 - start_time
       else:
       sim_time = secs + nsecs / 10 ** 9 - start_time
       def convert_localization_to_pb_ego_state(pandas_localization,
       ego_state_object, frame_num):
       ego_state_frame = EgoStateFrame()
       ego_state_frame.sim_time = sim_time
       ego_state_frame.ego_state.id = 1
       ego_state_frame.ego_state.type = 1
       ego_state_frame.ego_state.geo.dimX = 4.866
       ego_state_frame.ego_state.geo.dimY = 1.832
       ego_state_frame.ego_state.geo.dimZ = 1.464
       ego_state_frame.ego_state.geo.offX = 4.866
       ego_state_frame.ego_state.geo.offY = 1.832
       ego_state_frame.ego_state.geo.offZ = 1.464
       ego_state_frame.ego_state.pos.x =
       pandas_localization['pose.position.x'][frame_num]
       ego_state_frame.ego_state.pos.y =
       pandas_localization['pose.position.y'][frame_num]
       ego_state_frame.ego_state.pos.z =
       pandas_localization['pose.position.z'][frame_num]
       orientation_x = pandas_localization['pose.orientation.x'][frame_num]
       orientation_y = pandas_localization['pose.orientation.y'][frame_num]
       orientation_z = pandas_localization['pose.orientation.z'][frame_num]
       orientation_w = pandas_localization['pose.orientation.w'][frame_num]
       roll_angle, pitch_angle, yaw_angle = to_euler_angles(orientation_w,
       orientation_x, orientation_y, orientation_z)
       ego_state_frame.ego_state.pos.h = yaw_angle
       ego_state_frame.ego_state.pos.p = pitch_angle
       ego_state_frame.ego_state.pos.r = roll_angle
       ego_state_frame.ego_state.speed.x =
       pandas_localization['velocity.linear.x'][frame_num]
       ego_state_frame.ego_state.speed.y =
       pandas_localization['velocity.linear.y'][frame_num]
       ego_state_frame.ego_state.speed.z =
       pandas_localization['velocity.linear.z'][frame_num]
       ego_state_frame.ego_state.speed.h =
       pandas_localization['velocity.angular.z'][frame_num]
       ego_state_frame.ego_state.speed.p =
       pandas_localization['velocity.angular.y'][frame_num]
       ego_state_frame.ego_state.speed.r =
       pandas_localization['velocity.angular.x'][frame_num]
       ego_state_frame.ego_state.acceleration.x =
       pandas_localization['acceleration.linear.x'][frame_num]
       ego_state_frame.ego_state.acceleration.y =
       pandas_localization['acceleration.linear.y'][frame_num]
       ego_state_frame.ego_state.acceleration.z =
       pandas_localization['acceleration.linear.z'][frame_num]
       ego_state_frame.ego_state.acceleration.h =
       pandas_localization['acceleration.linear.z'][frame_num]
       ego_state_frame.ego_state.acceleration.p =
       pandas_localization['acceleration.linear.y'][frame_num]
       ego_state_frame.ego_state.acceleration.r =
       pandas_localization['acceleration.linear.x'][frame_num]
       ego_state_object.ego_state_frame.extend([ego_state_frame])
       def to_euler_angles(w, x, y, z):
       """w、x、y、z to euler angles"""
       roll = math.atan2(2 * (w * x + y * z), 1 - 2 * (x * x + y * y))
       pitch = math.asin(2 * (w * y - z * x))
       yaw = math.atan2(2 * (w * z + x * y), 1 - 2 * (z * z + y * y))
       angles = (roll, pitch, yaw)
       return angles
       if __name__ == "__main__":
       bag_file_path = "/opt/rosbag/bag/new_evaluate.bag"
       while True:
       time.sleep(1)
       print("waiting for bag file")
       if os.path.exists(bag_file_path):
       break
       parse_rosbag(bag_file_path)
      
  3. resim.sh

    #!/bin/bash
    # 处理资源,并确定资源路径
    BAG_NAME=${BAG_FILE_NAME} # /opt/rosbag/bag/xxx.bag
    XODR_PATH=${OPENDRIVE_PATH} # /opt/rosbag/xodr/xxx.xodr
    source /ros_entrypoint.sh
    source /root/demo/devel/setup.bash
    roscore &
    # 启动算法模块
    rosrun planner demo_planning_node &
    rosrun controller demo_controller_node &
    # 预留30s 保证算法模块启动完成
    sleep 30
    # 处理rosbag play & record 中的资源名称
    source /opt/record_ws/devel/setup.bash
    sed -i "s/bagname/${BAG_NAME}/g"
    /opt/record_ws/src/record_pkg/launch/record.launch
    roslaunch record_pkg record.launch &
    # 用于将新 record 的 /opt/rosbag/bag/new_evaluate.bag 转换为pb文件
    python3 /opt/bag_to_proto/rosbag_to_pb.py
    
上次更新: 3/30/2021, 4:12:06 PM