Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
fyctime052 authored Jan 26, 2024
1 parent 6b6ccb0 commit 806c1d3
Show file tree
Hide file tree
Showing 8 changed files with 1,178 additions and 0 deletions.
Empty file.
98 changes: 98 additions & 0 deletions scenario_runner/srunner/autoagents/agent_wrapper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
#!/usr/bin/env python

# Copyright (c) 2019 Intel Corporation
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.

"""
Wrapper for autonomous agents required for tracking and checking of used sensors
"""

from __future__ import print_function

import carla

from srunner.autoagents.sensor_interface import CallBack
from srunner.scenariomanager.carla_data_provider import CarlaDataProvider


class AgentWrapper(object):

"""
Wrapper for autonomous agents required for tracking and checking of used sensors
"""

_agent = None
_sensors_list = []

def __init__(self, agent):
"""
Set the autonomous agent
"""
self._agent = agent

def __call__(self):
"""
Pass the call directly to the agent
"""
return self._agent()

def setup_sensors(self, vehicle, debug_mode=False):
"""
Create the sensors defined by the user and attach them to the ego-vehicle
:param vehicle: ego vehicle
:return:
"""
bp_library = CarlaDataProvider.get_world().get_blueprint_library()
for sensor_spec in self._agent.sensors():
# These are the sensors spawned on the carla world
bp = bp_library.find(str(sensor_spec['type']))
if sensor_spec['type'].startswith('sensor.camera'):
bp.set_attribute('image_size_x', str(sensor_spec['width']))
bp.set_attribute('image_size_y', str(sensor_spec['height']))
bp.set_attribute('fov', str(sensor_spec['fov']))
sensor_location = carla.Location(x=sensor_spec['x'], y=sensor_spec['y'],
z=sensor_spec['z'])
sensor_rotation = carla.Rotation(pitch=sensor_spec['pitch'],
roll=sensor_spec['roll'],
yaw=sensor_spec['yaw'])
elif sensor_spec['type'].startswith('sensor.lidar'):
bp.set_attribute('range', str(sensor_spec['range']))
bp.set_attribute('rotation_frequency', str(sensor_spec['rotation_frequency']))
bp.set_attribute('channels', str(sensor_spec['channels']))
bp.set_attribute('upper_fov', str(sensor_spec['upper_fov']))
bp.set_attribute('lower_fov', str(sensor_spec['lower_fov']))
bp.set_attribute('points_per_second', str(sensor_spec['points_per_second']))
sensor_location = carla.Location(x=sensor_spec['x'], y=sensor_spec['y'],
z=sensor_spec['z'])
sensor_rotation = carla.Rotation(pitch=sensor_spec['pitch'],
roll=sensor_spec['roll'],
yaw=sensor_spec['yaw'])
elif sensor_spec['type'].startswith('sensor.other.gnss'):
sensor_location = carla.Location(x=sensor_spec['x'], y=sensor_spec['y'],
z=sensor_spec['z'])
sensor_rotation = carla.Rotation()

# create sensor
sensor_transform = carla.Transform(sensor_location, sensor_rotation)
sensor = CarlaDataProvider.get_world().spawn_actor(bp, sensor_transform, vehicle)
# setup callback
sensor.listen(CallBack(sensor_spec['id'], sensor, self._agent.sensor_interface))
self._sensors_list.append(sensor)

while not self._agent.all_sensors_ready():
if debug_mode:
print(" waiting for one data reading from sensors...")
CarlaDataProvider.get_world().tick()

def cleanup(self):
"""
Remove and destroy all sensors
"""
for i, _ in enumerate(self._sensors_list):
if self._sensors_list[i] is not None:
self._sensors_list[i].stop()
self._sensors_list[i].destroy()
self._sensors_list[i] = None
self._sensors_list = []
115 changes: 115 additions & 0 deletions scenario_runner/srunner/autoagents/autonomous_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
#!/usr/bin/env python

# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.

"""
This module provides the base class for all autonomous agents
"""

from __future__ import print_function

import carla

from srunner.autoagents.sensor_interface import SensorInterface
from srunner.scenariomanager.timer import GameTime
from srunner.tools.route_manipulation import downsample_route


class AutonomousAgent(object):

"""
Autonomous agent base class. All user agents have to be derived from this class
"""

def __init__(self, path_to_conf_file):
# current global plans to reach a destination
self._global_plan = None
self._global_plan_world_coord = None

# this data structure will contain all sensor data
self.sensor_interface = SensorInterface()

# agent's initialization
self.setup(path_to_conf_file)

def setup(self, path_to_conf_file):
"""
Initialize everything needed by your agent and set the track attribute to the right type:
"""
pass

def sensors(self): # pylint: disable=no-self-use
"""
Define the sensor suite required by the agent
:return: a list containing the required sensors in the following format:
[
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Left'},
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Right'},
{'type': 'sensor.lidar.ray_cast', 'x': 0.7, 'y': 0.0, 'z': 1.60, 'yaw': 0.0, 'pitch': 0.0, 'roll': 0.0,
'id': 'LIDAR'}
]
"""
sensors = []

return sensors

def run_step(self, input_data, timestamp):
"""
Execute one step of navigation.
:return: control
"""
control = carla.VehicleControl()
control.steer = 0.0
control.throttle = 0.0
control.brake = 0.0
control.hand_brake = False

return control

def destroy(self):
"""
Destroy (clean-up) the agent
:return:
"""
pass

def __call__(self):
"""
Execute the agent call, e.g. agent()
Returns the next vehicle controls
"""
input_data = self.sensor_interface.get_data()

timestamp = GameTime.get_time()
wallclock = GameTime.get_wallclocktime()
print('======[Agent] Wallclock_time = {} / Sim_time = {}'.format(wallclock, timestamp))

control = self.run_step(input_data, timestamp)
control.manual_gear_shift = False

return control

def all_sensors_ready(self):
"""
Check if all sensors are ready
Returns true if sensors are ready
"""
return self.sensor_interface.all_sensors_ready()

def set_global_plan(self, global_plan_gps, global_plan_world_coord):
"""
Set the plan (route) for the agent
"""

ds_ids = downsample_route(global_plan_world_coord, 1)
self._global_plan_world_coord = [(global_plan_world_coord[x][0], global_plan_world_coord[x][1])
for x in ds_ids]
self._global_plan = [global_plan_gps[x] for x in ds_ids]
83 changes: 83 additions & 0 deletions scenario_runner/srunner/autoagents/dummy_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#!/usr/bin/env python

# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.

"""
This module provides a dummy agent to control the ego vehicle
"""

from __future__ import print_function

import carla

from srunner.autoagents.autonomous_agent import AutonomousAgent


class DummyAgent(AutonomousAgent):

"""
Dummy autonomous agent to control the ego vehicle
"""

def setup(self, path_to_conf_file):
"""
Setup the agent parameters
"""

def sensors(self):
"""
Define the sensor suite required by the agent
:return: a list containing the required sensors in the following format:
[
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Left'},
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 300, 'height': 200, 'fov': 100, 'id': 'Right'},
{'type': 'sensor.lidar.ray_cast', 'x': 0.7, 'y': 0.0, 'z': 1.60, 'yaw': 0.0, 'pitch': 0.0, 'roll': 0.0,
'id': 'LIDAR'}
"""
sensors = [{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.0, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 0.0,
'width': 800, 'height': 600, 'fov': 100, 'id': 'Center'},
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0,
'yaw': -45.0, 'width': 800, 'height': 600, 'fov': 100, 'id': 'Left'},
{'type': 'sensor.camera.rgb', 'x': 0.7, 'y': 0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0, 'yaw': 45.0,
'width': 800, 'height': 600, 'fov': 100, 'id': 'Right'},
{'type': 'sensor.lidar.ray_cast', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'roll': 0.0, 'pitch': 0.0,
'yaw': -45.0, 'id': 'LIDAR'},
{'type': 'sensor.other.gnss', 'x': 0.7, 'y': -0.4, 'z': 1.60, 'id': 'GPS'},
{'type': 'sensor.can_bus', 'reading_frequency': 25, 'id': 'can_bus'},
{'type': 'sensor.hd_map', 'reading_frequency': 1, 'id': 'hdmap'},
]

return sensors

def run_step(self, input_data, timestamp):
"""
Execute one step of navigation.
"""
print("=====================>")
for key, val in input_data.items():
if hasattr(val[1], 'shape'):
shape = val[1].shape
print("[{} -- {:06d}] with shape {}".format(key, val[0], shape))
else:
print("[{} -- {:06d}] ".format(key, val[0]))
print("<=====================")

# DO SOMETHING SMART

# RETURN CONTROL
control = carla.VehicleControl()
control.steer = 0.0
control.throttle = 0.0
control.brake = 0.0
control.hand_brake = False

return control
Loading

0 comments on commit 806c1d3

Please sign in to comment.