diff --git a/.github/sync-files.yaml b/.github/sync-files.yaml index 7573fcb5b9226..e4682a94906db 100644 --- a/.github/sync-files.yaml +++ b/.github/sync-files.yaml @@ -31,17 +31,29 @@ - repository: autowarefoundation/autoware_common files: - source: .github/workflows/build-and-test.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: .github/workflows/build-and-test-differential.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: .github/workflows/build-and-test-differential-self-hosted.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: .github/workflows/build-and-test-self-hosted.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: .github/workflows/check-build-depends.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: .github/workflows/clang-tidy-pr-comments.yaml - post-commands: 'sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest}' + post-commands: | + sd "container: ros:(\w+)" "container: ghcr.io/autowarefoundation/autoware-universe:\$1-latest-cuda" {dest} + sd -f ms '(rosdistro: humble.*?build-depends-repos): build_depends.repos' '$1: build_depends.humble.repos' {dest} - source: codecov.yaml - repository: autowarefoundation/autoware-documentation diff --git a/.github/workflows/build-and-test-differential-self-hosted.yaml b/.github/workflows/build-and-test-differential-self-hosted.yaml index 3ae9581dcb24a..b8ce6bd47be15 100644 --- a/.github/workflows/build-and-test-differential-self-hosted.yaml +++ b/.github/workflows/build-and-test-differential-self-hosted.yaml @@ -18,7 +18,20 @@ jobs: needs: prevent-no-label-execution if: ${{ needs.prevent-no-label-execution.outputs.run == 'true' }} runs-on: [self-hosted, linux, ARM64] - container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + rosdistro: + - galactic + - humble + include: + - rosdistro: galactic + container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + build-depends-repos: build_depends.repos + - rosdistro: humble + container: ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda + build-depends-repos: build_depends.humble.repos steps: - name: Check out repository uses: actions/checkout@v3 @@ -36,14 +49,14 @@ jobs: if: ${{ steps.get-modified-packages.outputs.modified-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-build@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Test if: ${{ steps.get-modified-packages.outputs.modified-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-test@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} diff --git a/.github/workflows/build-and-test-differential.yaml b/.github/workflows/build-and-test-differential.yaml index b3e6cdabcb22b..b0ec463e1d9ee 100644 --- a/.github/workflows/build-and-test-differential.yaml +++ b/.github/workflows/build-and-test-differential.yaml @@ -6,7 +6,20 @@ on: jobs: build-and-test-differential: runs-on: ubuntu-latest - container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + rosdistro: + - galactic + - humble + include: + - rosdistro: galactic + container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + build-depends-repos: build_depends.repos + - rosdistro: humble + container: ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda + build-depends-repos: build_depends.humble.repos steps: - name: Cancel previous runs uses: styfle/cancel-workflow-action@0.9.1 @@ -27,18 +40,18 @@ jobs: if: ${{ steps.get-modified-packages.outputs.modified-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-build@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Test id: test if: ${{ steps.get-modified-packages.outputs.modified-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-test@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-modified-packages.outputs.modified-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Upload coverage to CodeCov if: ${{ steps.test.outputs.coverage-report-files != '' }} diff --git a/.github/workflows/build-and-test-self-hosted.yaml b/.github/workflows/build-and-test-self-hosted.yaml index 8b3a039e9d3b6..bf6dcbc606b0b 100644 --- a/.github/workflows/build-and-test-self-hosted.yaml +++ b/.github/workflows/build-and-test-self-hosted.yaml @@ -8,7 +8,20 @@ on: jobs: build-and-test-self-hosted: runs-on: [self-hosted, linux, ARM64] - container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + rosdistro: + - galactic + - humble + include: + - rosdistro: galactic + container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + build-depends-repos: build_depends.repos + - rosdistro: humble + container: ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda + build-depends-repos: build_depends.humble.repos steps: - name: Check out repository uses: actions/checkout@v3 @@ -24,14 +37,14 @@ jobs: if: ${{ steps.get-self-packages.outputs.self-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-build@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-self-packages.outputs.self-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Test if: ${{ steps.get-self-packages.outputs.self-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-test@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-self-packages.outputs.self-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} diff --git a/.github/workflows/build-and-test.yaml b/.github/workflows/build-and-test.yaml index 6541af9c05b34..911e05ba1c23a 100644 --- a/.github/workflows/build-and-test.yaml +++ b/.github/workflows/build-and-test.yaml @@ -10,7 +10,20 @@ jobs: build-and-test: if: ${{ github.event_name != 'push' || github.ref_name == github.event.repository.default_branch }} runs-on: ubuntu-latest - container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + container: ${{ matrix.container }} + strategy: + fail-fast: false + matrix: + rosdistro: + - galactic + - humble + include: + - rosdistro: galactic + container: ghcr.io/autowarefoundation/autoware-universe:galactic-latest-cuda + build-depends-repos: build_depends.repos + - rosdistro: humble + container: ghcr.io/autowarefoundation/autoware-universe:humble-latest-cuda + build-depends-repos: build_depends.humble.repos steps: - name: Check out repository uses: actions/checkout@v3 @@ -26,18 +39,18 @@ jobs: if: ${{ steps.get-self-packages.outputs.self-packages != '' }} uses: autowarefoundation/autoware-github-actions/colcon-build@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-self-packages.outputs.self-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Test if: ${{ steps.get-self-packages.outputs.self-packages != '' }} id: test uses: autowarefoundation/autoware-github-actions/colcon-test@v1 with: - rosdistro: galactic + rosdistro: ${{ matrix.rosdistro }} target-packages: ${{ steps.get-self-packages.outputs.self-packages }} - build-depends-repos: build_depends.repos + build-depends-repos: ${{ matrix.build-depends-repos }} - name: Upload coverage to CodeCov if: ${{ steps.test.outputs.coverage-report-files != '' }} diff --git a/build_depends.humble.repos b/build_depends.humble.repos new file mode 100644 index 0000000000000..2aacc0390a97f --- /dev/null +++ b/build_depends.humble.repos @@ -0,0 +1,35 @@ +repositories: + # core + core/autoware_msgs: + type: git + url: https://github.com/tier4/autoware_auto_msgs.git # TODO(Tier IV): Move to autowarefoundation/autoware_msgs + version: tier4/main + core/common: + type: git + url: https://github.com/autowarefoundation/autoware_common.git + version: main + core/autoware: + type: git + url: https://github.com/autowarefoundation/autoware.core.git + version: main + # universe + universe/tier4_autoware_msgs: + type: git + url: https://github.com/tier4/AutowareArchitectureProposal_msgs.git # TODO(Tier IV): Rename to tier4/tier4_autoware_msgs + version: tier4/universe + universe/vendor/grid_map: + type: git + url: https://github.com/tier4/grid_map.git + version: prepare/humble + universe/vendor/navigation2: + type: git + url: https://github.com/tier4/navigation2.git + version: prepare/humble + universe/vendor/mussp: + type: git + url: https://github.com/tier4/muSSP.git + version: tier4/main + universe/vendor/ndt_omp: + type: git + url: https://github.com/tier4/ndt_omp.git + version: tier4/main diff --git a/common/tier4_planning_rviz_plugin/include/path_with_lane_id_footprint/display.hpp b/common/tier4_planning_rviz_plugin/include/path_with_lane_id_footprint/display.hpp index bfad59025b4a7..4cbcb9a47daa2 100644 --- a/common/tier4_planning_rviz_plugin/include/path_with_lane_id_footprint/display.hpp +++ b/common/tier4_planning_rviz_plugin/include/path_with_lane_id_footprint/display.hpp @@ -24,6 +24,7 @@ #include #include #include +#include #include #include @@ -35,6 +36,8 @@ #include #include +#include +#include namespace rviz_plugins { @@ -67,7 +70,8 @@ private Q_SLOTS: rviz_common::properties::FloatProperty * property_vehicle_length_; rviz_common::properties::FloatProperty * property_vehicle_width_; rviz_common::properties::FloatProperty * property_rear_overhang_; - + rviz_common::properties::BoolProperty * property_lane_id_view_; + rviz_common::properties::FloatProperty * property_lane_id_scale_; struct VehicleFootprintInfo { VehicleFootprintInfo(const float l, const float w, const float r) @@ -79,10 +83,16 @@ private Q_SLOTS: std::shared_ptr vehicle_info_; std::shared_ptr vehicle_footprint_info_; + using LaneIdObject = + std::pair, std::unique_ptr>; + std::vector lane_id_obj_ptrs_; + private: autoware_auto_planning_msgs::msg::PathWithLaneId::ConstSharedPtr last_msg_ptr_; bool validateFloats( const autoware_auto_planning_msgs::msg::PathWithLaneId::ConstSharedPtr & msg_ptr); + + void allocateLaneIdObjects(const std::size_t size); }; } // namespace rviz_plugins diff --git a/common/tier4_planning_rviz_plugin/src/path_with_lane_id_footprint/display.cpp b/common/tier4_planning_rviz_plugin/src/path_with_lane_id_footprint/display.cpp index d3378b887bd12..c1eb332b4332d 100644 --- a/common/tier4_planning_rviz_plugin/src/path_with_lane_id_footprint/display.cpp +++ b/common/tier4_planning_rviz_plugin/src/path_with_lane_id_footprint/display.cpp @@ -42,6 +42,11 @@ AutowarePathWithLaneIdFootprintDisplay::AutowarePathWithLaneIdFootprintDisplay() property_vehicle_width_->setMin(0.0); property_rear_overhang_->setMin(0.0); + property_lane_id_view_ = new rviz_common::properties::BoolProperty( + "View LaneId", true, "", this, SLOT(updateVisualization()), this); + property_lane_id_scale_ = new rviz_common::properties::FloatProperty( + "Scale", 0.1, "", property_lane_id_view_, SLOT(updateVisualization()), this); + updateVehicleInfo(); } @@ -65,6 +70,12 @@ void AutowarePathWithLaneIdFootprintDisplay::reset() { MFDClass::reset(); path_footprint_manual_object_->clear(); + + for (const auto & e : lane_id_obj_ptrs_) { + scene_node_->removeChild(e.first.get()); + } + lane_id_obj_ptrs_.clear(); + lane_id_obj_ptrs_.shrink_to_fit(); } bool AutowarePathWithLaneIdFootprintDisplay::validateFloats( @@ -78,6 +89,28 @@ bool AutowarePathWithLaneIdFootprintDisplay::validateFloats( return true; } +void AutowarePathWithLaneIdFootprintDisplay::allocateLaneIdObjects(const std::size_t size) +{ + if (size > lane_id_obj_ptrs_.size()) { + for (std::size_t i = lane_id_obj_ptrs_.size(); i < size; i++) { + std::unique_ptr node_ptr; + node_ptr.reset(scene_node_->createChildSceneNode()); + auto text_ptr = + std::make_unique("not initialized", "Liberation Sans", 0.1); + text_ptr->setVisible(false); + text_ptr->setTextAlignment( + rviz_rendering::MovableText::H_CENTER, rviz_rendering::MovableText::V_ABOVE); + node_ptr->attachObject(text_ptr.get()); + lane_id_obj_ptrs_.push_back(std::make_pair(std::move(node_ptr), std::move(text_ptr))); + } + } else { + for (std::size_t i = lane_id_obj_ptrs_.size() - 1; i >= size; i--) { + scene_node_->removeChild(lane_id_obj_ptrs_.at(i).first.get()); + } + lane_id_obj_ptrs_.resize(size); + } +} + void AutowarePathWithLaneIdFootprintDisplay::processMessage( const autoware_auto_planning_msgs::msg::PathWithLaneId::ConstSharedPtr msg_ptr) { @@ -126,6 +159,8 @@ void AutowarePathWithLaneIdFootprintDisplay::processMessage( path_footprint_manual_object_->begin( "BaseWhiteNoLighting", Ogre::RenderOperation::OT_LINE_LIST); + allocateLaneIdObjects(msg_ptr->points.size()); + for (size_t point_idx = 0; point_idx < msg_ptr->points.size(); point_idx++) { const auto & path_point = msg_ptr->points.at(point_idx); /* @@ -172,6 +207,28 @@ void AutowarePathWithLaneIdFootprintDisplay::processMessage( } } } + + // LaneId + if (property_lane_id_view_->getBool()) { + Ogre::Vector3 position; + position.x = path_point.point.pose.position.x; + position.y = path_point.point.pose.position.y; + position.z = path_point.point.pose.position.z; + auto & node_ptr = lane_id_obj_ptrs_.at(point_idx).first; + node_ptr->setPosition(position); + + const auto & text_ptr = lane_id_obj_ptrs_.at(point_idx).second; + std::string lane_ids_str = ""; + for (const auto & e : path_point.lane_ids) { + lane_ids_str += std::to_string(e) + ", "; + } + text_ptr->setCaption(lane_ids_str); + text_ptr->setCharacterHeight(property_lane_id_scale_->getFloat()); + text_ptr->setVisible(true); + } else { + const auto & text_ptr = lane_id_obj_ptrs_.at(point_idx).second; + text_ptr->setVisible(false); + } } path_footprint_manual_object_->end(); diff --git a/launch/tier4_autoware_api_launch/launch/include/internal_api_relay.launch.xml b/launch/tier4_autoware_api_launch/launch/include/internal_api_relay.launch.xml index 810e6a566bd3a..4723621f84d68 100644 --- a/launch/tier4_autoware_api_launch/launch/include/internal_api_relay.launch.xml +++ b/launch/tier4_autoware_api_launch/launch/include/internal_api_relay.launch.xml @@ -1,26 +1,23 @@ - - - - - - - - - - - - - - - + + + + + + - - - - - + diff --git a/launch/tier4_perception_launch/config/object_recognition/detection/detection_preprocess.param.yaml b/launch/tier4_perception_launch/config/object_recognition/detection/pointcloud_map_filter.param.yaml similarity index 100% rename from launch/tier4_perception_launch/config/object_recognition/detection/detection_preprocess.param.yaml rename to launch/tier4_perception_launch/config/object_recognition/detection/pointcloud_map_filter.param.yaml diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml index e55b64315dfb3..8ab5267ad8230 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/camera_lidar_fusion_based_detection.launch.xml @@ -39,7 +39,7 @@ - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml index b898710d9ae9b..87c9d32ddcc7b 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/lidar_based_detection.launch.xml @@ -9,7 +9,7 @@ - + diff --git a/launch/tier4_perception_launch/launch/object_recognition/detection/detection_preprocess.launch.py b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py similarity index 87% rename from launch/tier4_perception_launch/launch/object_recognition/detection/detection_preprocess.launch.py rename to launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py index ea276a55b6d86..2990429d61ef1 100644 --- a/launch/tier4_perception_launch/launch/object_recognition/detection/detection_preprocess.launch.py +++ b/launch/tier4_perception_launch/launch/object_recognition/detection/pointcloud_map_filter.launch.py @@ -27,17 +27,17 @@ import yaml -class DetectionPreProcessPipeline: +class PointcloudMapFilterPipeline: def __init__(self, context): - detection_preprocess_param_path = os.path.join( - get_package_share_directory("perception_launch"), - "config/object_recognition/detection/detection_preprocess.param.yaml", + pointcloud_map_filter_param_path = os.path.join( + get_package_share_directory("tier4_perception_launch"), + "config/object_recognition/detection/pointcloud_map_filter.param.yaml", ) - with open(detection_preprocess_param_path, "r") as f: - self.detection_preprocess_param = yaml.safe_load(f)["/**"]["ros__parameters"] - self.use_down_sample_filter = self.detection_preprocess_param["use_down_sample_filter"] - self.voxel_size = self.detection_preprocess_param["down_sample_voxel_size"] - self.distance_threshold = self.detection_preprocess_param["distance_threshold"] + with open(pointcloud_map_filter_param_path, "r") as f: + self.pointcloud_map_filter_param = yaml.safe_load(f)["/**"]["ros__parameters"] + self.use_down_sample_filter = self.pointcloud_map_filter_param["use_down_sample_filter"] + self.voxel_size = self.pointcloud_map_filter_param["down_sample_voxel_size"] + self.distance_threshold = self.pointcloud_map_filter_param["distance_threshold"] def create_pipeline(self): if self.use_down_sample_filter: @@ -69,7 +69,7 @@ def create_normal_pipeline(self): ) return components - def create_down_sample_pipeline(self, output_topic): + def create_down_sample_pipeline(self): components = [] down_sample_topic = ( "/perception/obstacle_segmentation/pointcloud_map_filtered/downsampled/pointcloud" @@ -119,7 +119,7 @@ def create_down_sample_pipeline(self, output_topic): def launch_setup(context, *args, **kwargs): - pipeline = DetectionPreProcessPipeline(context) + pipeline = PointcloudMapFilterPipeline(context) components = [] components.extend(pipeline.create_pipeline()) individual_container = ComposableNodeContainer( @@ -150,7 +150,7 @@ def add_launch_arg(name: str, default_value=None): add_launch_arg("use_multithread", "False") add_launch_arg("use_intra_process", "True") add_launch_arg("use_pointcloud_container", "False") - add_launch_arg("container_name", "detection_preprocess_pipeline_container") + add_launch_arg("container_name", "pointcloud_map_filter_pipeline_container") set_container_executable = SetLaunchConfiguration( "container_executable", "component_container", diff --git a/perception/lidar_apollo_instance_segmentation/CMakeLists.txt b/perception/lidar_apollo_instance_segmentation/CMakeLists.txt index 433999051097d..8fff790a64790 100644 --- a/perception/lidar_apollo_instance_segmentation/CMakeLists.txt +++ b/perception/lidar_apollo_instance_segmentation/CMakeLists.txt @@ -65,35 +65,49 @@ else() set(CUDNN_AVAIL OFF) endif() -find_program(GDOWN_AVAIL "gdown") -if(NOT GDOWN_AVAIL) - message("gdown: command not found. External files could not be downloaded.") -endif() if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) # download weight files - set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") - if(NOT EXISTS "${PATH}") - execute_process(COMMAND mkdir -p ${PATH}) - endif() - set(FILE "${PATH}/vlp-16.caffemodel") - message(STATUS "Checking and downloading vlp-16.caffemodel") - if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown "https://drive.google.com/uc?id=1bbO_eeGG5HUqyUiAYjOd6hUn-Zma0mMJ" -O ${PATH}/vlp-16.caffemodel) - endif() - set(FILE "${PATH}/hdl-64.caffemodel") - message(STATUS "Checking and downloading hdl-64.caffemodel") - if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown "https://drive.google.com/uc?id=1ZdB6V3jua3GmtYuY9NR1nc9QZe_ChjkP" -O ${PATH}/hdl-64.caffemodel) - endif() - set(FILE "${PATH}/vls-128.caffemodel") - message(STATUS "Checking and downloading vls-128.caffemodel") - if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown "https://drive.google.com/uc?id=1izpNotNxS6mrYIzJwHQ_EyX_IPDU-MBr" -O ${PATH}/vls-128.caffemodel) + set(DATA_PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") + if(NOT EXISTS "${DATA_PATH}") + execute_process(COMMAND mkdir -p ${DATA_PATH}) endif() + function(download FILE_NAME FILE_HASH) + message(STATUS "Checking and downloading ${FILE_NAME}") + set(FILE_PATH ${DATA_PATH}/${FILE_NAME}) + set(STATUS_CODE 0) + message(STATUS "start ${FILE_NAME}") + if(EXISTS ${FILE_PATH}) + message(STATUS "found ${FILE_NAME}") + file(MD5 ${FILE_PATH} EXISTING_FILE_HASH) + if(${FILE_HASH} STREQUAL ${EXISTING_FILE_HASH}) + message(STATUS "same ${FILE_NAME}") + message(STATUS "File already exists.") + else() + message(STATUS "diff ${FILE_NAME}") + message(STATUS "File hash changes. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + else() + message(STATUS "not found ${FILE_NAME}") + message(STATUS "File doesn't exists. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + if(${STATUS_CODE} EQUAL 0) + message(STATUS "Download completed successfully!") + else() + message(FATAL_ERROR "Error occurred during download: ${ERROR_MESSAGE}") + endif() + endfunction() + + download(vlp-16.caffemodel f79f56a835893eb5289182dd06ce9905) + download(hdl-64.caffemodel f79f56a835893eb5289182dd06ce9905) + download(vls-128.caffemodel e5864f65c42d7d69a58fa7b01970d078) + find_package(PCL REQUIRED) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_MWAITXINTRIN_H_INCLUDED") diff --git a/perception/lidar_centerpoint/CMakeLists.txt b/perception/lidar_centerpoint/CMakeLists.txt index 991583b697d3d..a67484b7353ea 100644 --- a/perception/lidar_centerpoint/CMakeLists.txt +++ b/perception/lidar_centerpoint/CMakeLists.txt @@ -66,45 +66,51 @@ else() set(CUDNN_AVAIL OFF) endif() +message(STATUS "start to download") if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) # Download trained models - find_program(GDOWN_AVAIL "gdown") - if(NOT GDOWN_AVAIL) - message("gdown: command not found. External files could not be downloaded.") - endif() - set(DATA_PATH ${CMAKE_CURRENT_SOURCE_DIR}/data) execute_process(COMMAND mkdir -p ${DATA_PATH}) - function(download FILE_NAME GFILE_ID FILE_HASH) - # https://drive.google.com/file/d/GFILE_ID/view + function(download FILE_NAME FILE_HASH) message(STATUS "Checking and downloading ${FILE_NAME}") set(FILE_PATH ${DATA_PATH}/${FILE_NAME}) + set(STATUS_CODE 0) + message(STATUS "start ${FILE_NAME}") if(EXISTS ${FILE_PATH}) + message(STATUS "found ${FILE_NAME}") file(MD5 ${FILE_PATH} EXISTING_FILE_HASH) - if(${FILE_HASH} EQUAL ${EXISTING_FILE_HASH}) + if("${FILE_HASH}" STREQUAL "${EXISTING_FILE_HASH}") + message(STATUS "same ${FILE_NAME}") message(STATUS "File already exists.") else() + message(STATUS "diff ${FILE_NAME}") message(STATUS "File hash changes. Downloading now ...") - execute_process(COMMAND gdown --quiet https://drive.google.com/uc?id=${GFILE_ID} -O ${FILE_PATH}) - # file(MD5 ${FILE_PATH} DOWNLOADED_FILE_HASH) # disable to pass ci - message(STATUS "Downloaded file hash: ${DOWNLOADED_FILE_HASH}") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) endif() else() + message(STATUS "not found ${FILE_NAME}") message(STATUS "File doesn't exists. Downloading now ...") - execute_process(COMMAND gdown --quiet https://drive.google.com/uc?id=${GFILE_ID} -O ${FILE_PATH}) - # file(MD5 ${FILE_PATH} DOWNLOADED_FILE_HASH) # disable to pass ci - message(STATUS "Downloaded file hash: ${DOWNLOADED_FILE_HASH}") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + if(${STATUS_CODE} EQUAL 0) + message(STATUS "Download completed successfully!") + else() + message(FATAL_ERROR "Error occurred during download: ${ERROR_MESSAGE}") endif() endfunction() # default model - download(pts_voxel_encoder_default.onnx 1KFhmA4oFT6CtZx5806QeMzn5H2tKa3oD 410f730c537968cb27fbd70c941849a8) - download(pts_backbone_neck_head_default.onnx 1iyk5VoQ4uNBGPZwypVZIMjSuSYAI1RxP e97c165c7877222c0e27e44409a07517) + download(pts_voxel_encoder_default.onnx 410f730c537968cb27fbd70c941849a8) + download(pts_backbone_neck_head_default.onnx e97c165c7877222c0e27e44409a07517) # aip_x2 model - download(pts_voxel_encoder_aip_x2.onnx 13aYPRHx17Ge4BqxzW9drAUSWTppjtUV5 3ae5e9efd7b2ed12115e6f0b28cac58d) - download(pts_backbone_neck_head_aip_x2.onnx 14PJ_L3Jpz6Yi8GzoctVOEbGWcaCLArGp 6a406a19e05660677c162486ab332de8) + download(pts_voxel_encoder_aip_x2.onnx 3ae5e9efd7b2ed12115e6f0b28cac58d) + download(pts_backbone_neck_head_aip_x2.onnx 6a406a19e05660677c162486ab332de8) find_package(ament_cmake_auto REQUIRED) ament_auto_find_build_dependencies() diff --git a/perception/tensorrt_yolo/CMakeLists.txt b/perception/tensorrt_yolo/CMakeLists.txt index e8ac60c30ed37..56d444bd6347a 100755 --- a/perception/tensorrt_yolo/CMakeLists.txt +++ b/perception/tensorrt_yolo/CMakeLists.txt @@ -68,100 +68,50 @@ else() endif() # Download onnx -find_program(GDOWN_AVAIL "gdown") -if(NOT GDOWN_AVAIL) - message("gdown: command not found. External files could not be downloaded.") -endif() -set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") -if(NOT EXISTS "${PATH}") - execute_process(COMMAND mkdir -p ${PATH}) -endif() -# yolov3 -set(FILE "${PATH}/yolov3.onnx") -message(STATUS "Checking and downloading yolov3.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1ZYoBqVynmO5kKntyN56GEbELRpuXG8Ym" -O ${PATH}/yolov3.onnx - ERROR_QUIET - ) -endif() - -# yolov4 -set(FILE "${PATH}/yolov4.onnx") -message(STATUS "Checking and downloading yolov4.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1vkNmSwcIpTkJ_-BrKhxtit0PBJeJYTVX" -O ${PATH}/yolov4.onnx - ERROR_QUIET - ) -endif() - -# yolov4-tiny -set(FILE "${PATH}/yolov4-tiny.onnx") -message(STATUS "Checking and downloading yolov4.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1rUHjV_JfkmlVFgb20XXrOMWo_HZAjrFh" -O ${PATH}/yolov4-tiny.onnx - ERROR_QUIET - ) -endif() - -# yolov5s -set(FILE "${PATH}/yolov5s.onnx") -message(STATUS "Checking and downloading yolov5s.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1CF21nQWigwCPTr5psQZXg6cBQIOYKbad" -O ${PATH}/yolov5s.onnx - ERROR_QUIET - ) -endif() - -# yolov5m -set(FILE "${PATH}/yolov5m.onnx") -message(STATUS "Checking and downloading yolov5m.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1a1h50KJH6slwmjKZpPlS-errukF-BrgG" -O ${PATH}/yolov5m.onnx - ERROR_QUIET - ) -endif() - -# yolov5l -set(FILE "${PATH}/yolov5l.onnx") -message(STATUS "Checking and downloading yolov5l.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1xO8S92Cq7qrmx93UHHyA7Cd7-dJsBDP8" -O ${PATH}/yolov5l.onnx - ERROR_QUIET - ) -endif() - -# yolov5x -set(FILE "${PATH}/yolov5x.onnx") -message(STATUS "Checking and downloading yolov5x.onnx") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=1kAHuNJUCxpD-yWrS6t95H3zbAPfClLxI" -O ${PATH}/yolov5x.onnx - ERROR_QUIET - ) -endif() - -set(FILE "${PATH}/coco.names") -message(STATUS "Checking and downloading coco.names") -if(NOT EXISTS "${FILE}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process( - COMMAND gdown "https://drive.google.com/uc?id=19wXD23PE16kJDkZ7j2W3Ijvx5I1kO9kJ" -O ${PATH}/coco.names - ERROR_QUIET - ) +set(DATA_PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") +if(NOT EXISTS "${DATA_PATH}") + execute_process(COMMAND mkdir -p ${DATA_PATH}) endif() +function(download FILE_NAME FILE_HASH) + message(STATUS "Checking and downloading ${FILE_NAME}") + set(FILE_PATH ${DATA_PATH}/${FILE_NAME}) + set(STATUS_CODE 0) + message(STATUS "start ${FILE_NAME}") + if(EXISTS ${FILE_PATH}) + message(STATUS "found ${FILE_NAME}") + file(MD5 ${FILE_PATH} EXISTING_FILE_HASH) + if(${FILE_HASH} STREQUAL ${EXISTING_FILE_HASH}) + message(STATUS "same ${FILE_NAME}") + message(STATUS "File already exists.") + else() + message(STATUS "diff ${FILE_NAME}") + message(STATUS "File hash changes. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 3600) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + else() + message(STATUS "not found ${FILE_NAME}") + message(STATUS "File doesn't exists. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 3600) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + if(${STATUS_CODE} EQUAL 0) + message(STATUS "Download completed successfully!") + else() + message(FATAL_ERROR "Error occurred during download: ${ERROR_MESSAGE}") + endif() +endfunction() + +download(yolov3.onnx bde3b120f51db251d9a89cd25205e062) +download(yolov4.onnx 99ed8c5d18acad2ebb5ee80860d9cbaf) +download(yolov4-tiny.onnx d16dfd56dddfce75f25d5aaf4c8f4c40) +download(yolov5s.onnx 646b295db6089597e79163446d6eedca) +download(yolov5m.onnx 8fc91d70599ac59ae92c6490001fd84b) +download(yolov5l.onnx 99ff1fc966bda8efbc45c3b8bf00eea2) +download(yolov5x.onnx c51beebed276735cb16a18ca32959e9f) +download(coco.names 8fc50561361f8bcf96b0177086e7616c) # create calib image directory for int8 calibration set(CALIB_IMAGE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/calib_image") diff --git a/perception/traffic_light_classifier/CMakeLists.txt b/perception/traffic_light_classifier/CMakeLists.txt index 8ebd5f32db99e..1b61e755567a2 100644 --- a/perception/traffic_light_classifier/CMakeLists.txt +++ b/perception/traffic_light_classifier/CMakeLists.txt @@ -66,44 +66,43 @@ else() endif() # Download caffemodel and prototxt -set(PRETRAINED_MODEL_LINK "https://drive.google.com/uc?id=15CQceCn9TZDU6huKJacQvUnDiLHionb3") -set(PRETRAINED_MODEL_HASH 7dc31c696b0400ddfc2cc5521586fa51) -set(LAMP_LABEL_LINK "https://drive.google.com/uc?id=1D7n3oGSWLkWgxET6PcWqEzOhmmPcqM52") -set(LAMP_LABEL_HASH 20167c8e9a1f9d2ec7b0b0088c4100f0) - -find_program(GDOWN_AVAIL "gdown") -if(NOT GDOWN_AVAIL) - message(STATUS "gdown: command not found. External files could not be downloaded.") +set(DATA_PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") +if(NOT EXISTS "${DATA_PATH}") + execute_process(COMMAND mkdir -p ${DATA_PATH}) endif() -set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") -if(NOT EXISTS "${PATH}") - execute_process(COMMAND mkdir -p ${PATH}) -endif() -set(FILE "${PATH}/traffic_light_classifier_mobilenetv2.onnx") -message(STATUS "Checking and downloading traffic_light_classifier_mobilenetv2.onnx") -if(EXISTS "${FILE}") - file(MD5 "${FILE}" EXISTING_FILE_HASH) - if(NOT "${PRETRAINED_MODEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") - message(STATUS "... file hash changed. Downloading now ...") - execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/traffic_light_classifier_mobilenetv2.onnx) +function(download FILE_NAME FILE_HASH) + message(STATUS "Checking and downloading ${FILE_NAME}") + set(FILE_PATH ${DATA_PATH}/${FILE_NAME}) + set(STATUS_CODE 0) + message(STATUS "start ${FILE_NAME}") + if(EXISTS ${FILE_PATH}) + message(STATUS "found ${FILE_NAME}") + file(MD5 ${FILE_PATH} EXISTING_FILE_HASH) + if(${FILE_HASH} STREQUAL ${EXISTING_FILE_HASH}) + message(STATUS "same ${FILE_NAME}") + message(STATUS "File already exists.") + else() + message(STATUS "diff ${FILE_NAME}") + message(STATUS "File hash changes. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + else() + message(STATUS "not found ${FILE_NAME}") + message(STATUS "File doesn't exists. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) endif() -else() - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/traffic_light_classifier_mobilenetv2.onnx) -endif() - -set(FILE "${PATH}/lamp_labels.txt") -message(STATUS "Checking and downloading lamp_labels.txt") -if(EXISTS "${FILE}") - file(MD5 "${FILE}" EXISTING_FILE_HASH) - if(NOT "${LAMP_LABEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/lamp_labels.txt) + if(${STATUS_CODE} EQUAL 0) + message(STATUS "Download completed successfully!") + else() + message(FATAL_ERROR "Error occurred during download: ${ERROR_MESSAGE}") endif() -else() - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/lamp_labels.txt) -endif() +endfunction() +download(traffic_light_classifier_mobilenetv2.onnx 7dc31c696b0400ddfc2cc5521586fa51) +download(lamp_labels.txt 20167c8e9a1f9d2ec7b0b0088c4100f0) find_package(ament_cmake_auto REQUIRED) ament_auto_find_build_dependencies() diff --git a/perception/traffic_light_ssd_fine_detector/CMakeLists.txt b/perception/traffic_light_ssd_fine_detector/CMakeLists.txt index eb172b7faf981..a531dce2a194b 100644 --- a/perception/traffic_light_ssd_fine_detector/CMakeLists.txt +++ b/perception/traffic_light_ssd_fine_detector/CMakeLists.txt @@ -68,45 +68,46 @@ else() endif() # Download caffemodel and prototxt -set(PRETRAINED_MODEL_LINK "https://drive.google.com/uc?id=1USFDPRH9JrVdGoqt27qHjRgittwc0kcO") set(PRETRAINED_MODEL_HASH 34ce7f2cbacbf6da8bc35769f027b73f) -set(LAMP_LABEL_LINK "https://drive.google.com/uc?id=1hPcKvKgKz0fqEo0cNAXH7roEletqZErL") set(LAMP_LABEL_HASH e9f45efb02f2a9aa8ac27b3d5c164905) -find_program(GDOWN_AVAIL "gdown") -if(NOT GDOWN_AVAIL) - message(STATUS "gdown: command not found. External files could not be downloaded.") +set(DATA_PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") +if(NOT EXISTS "${DATA_PATH}") + execute_process(COMMAND mkdir -p ${DATA_PATH}) endif() -set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") -if(NOT EXISTS "${PATH}") - execute_process(COMMAND mkdir -p ${PATH}) -endif() - -set(FILE "${PATH}/mb2-ssd-lite-tlr.onnx") -message(STATUS "Checking and downloading mb2-ssd-lite-tlr.onnx") -if(EXISTS "${FILE}") - file(MD5 "${FILE}" EXISTING_FILE_HASH) - if(NOT "${PRETRAINED_MODEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") - message(STATUS "... file hash changed. Downloading now ...") - execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/mb2-ssd-lite-tlr.onnx) +function(download FILE_NAME FILE_HASH) + message(STATUS "Checking and downloading ${FILE_NAME}") + set(FILE_PATH ${DATA_PATH}/${FILE_NAME}) + set(STATUS_CODE 0) + message(STATUS "start ${FILE_NAME}") + if(EXISTS ${FILE_PATH}) + message(STATUS "found ${FILE_NAME}") + file(MD5 ${FILE_PATH} EXISTING_FILE_HASH) + if(${FILE_HASH} STREQUAL ${EXISTING_FILE_HASH}) + message(STATUS "same ${FILE_NAME}") + message(STATUS "File already exists.") + else() + message(STATUS "diff ${FILE_NAME}") + message(STATUS "File hash changes. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) + endif() + else() + message(STATUS "not found ${FILE_NAME}") + message(STATUS "File doesn't exists. Downloading now ...") + file(DOWNLOAD https://awf.ml.dev.web.auto/perception/models/${FILE_NAME} ${FILE_PATH} STATUS DOWNLOAD_STATUS TIMEOUT 300) + list(GET DOWNLOAD_STATUS 0 STATUS_CODE) + list(GET DOWNLOAD_STATUS 1 ERROR_MESSAGE) endif() -else() - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/mb2-ssd-lite-tlr.onnx) -endif() - -set(FILE "${PATH}/voc_labels_tl.txt") -message(STATUS "Checking and downloading voc_labels_tl.txt") -if(EXISTS "${FILE}") - file(MD5 "${FILE}" EXISTING_FILE_HASH) - if(NOT "${LAMP_LABEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/voc_labels_tl.txt) + if(${STATUS_CODE} EQUAL 0) + message(STATUS "Download completed successfully!") + else() + message(FATAL_ERROR "Error occurred during download: ${ERROR_MESSAGE}") endif() -else() - message(STATUS "... file does not exist. Downloading now ...") - execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/voc_labels_tl.txt) -endif() +endfunction() +download(mb2-ssd-lite-tlr.onnx 34ce7f2cbacbf6da8bc35769f027b73f) +download(voc_labels_tl.txt e9f45efb02f2a9aa8ac27b3d5c164905) if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) include_directories( diff --git a/planning/behavior_velocity_planner/src/scene_module/stop_line/scene.cpp b/planning/behavior_velocity_planner/src/scene_module/stop_line/scene.cpp index 7ab118518ab04..37d8b5ba0b257 100644 --- a/planning/behavior_velocity_planner/src/scene_module/stop_line/scene.cpp +++ b/planning/behavior_velocity_planner/src/scene_module/stop_line/scene.cpp @@ -231,7 +231,7 @@ bool StopLineModule::modifyPathVelocity( // If no collision found, do nothing if (!collision) { - RCLCPP_WARN(logger_, "is no collision"); + RCLCPP_WARN_THROTTLE(logger_, *clock_, 5000 /* ms */, "is no collision"); return true; } diff --git a/planning/scenario_selector/launch/dummy_scenario_selector_lane_driving.launch.xml b/planning/scenario_selector/launch/dummy_scenario_selector_lane_driving.launch.xml index b43d648b7c011..d6a269f3a94c3 100644 --- a/planning/scenario_selector/launch/dummy_scenario_selector_lane_driving.launch.xml +++ b/planning/scenario_selector/launch/dummy_scenario_selector_lane_driving.launch.xml @@ -9,11 +9,7 @@ - - - - - + diff --git a/planning/scenario_selector/launch/dummy_scenario_selector_parking.launch.xml b/planning/scenario_selector/launch/dummy_scenario_selector_parking.launch.xml index 01f6ab194d905..f72eb4bd55195 100644 --- a/planning/scenario_selector/launch/dummy_scenario_selector_parking.launch.xml +++ b/planning/scenario_selector/launch/dummy_scenario_selector_parking.launch.xml @@ -9,11 +9,7 @@ - - - - - + diff --git a/sensing/pointcloud_preprocessor/launch/preprocessor.launch.xml b/sensing/pointcloud_preprocessor/launch/preprocessor.launch.xml index f8301e6327990..bba1a1b1c6a8c 100644 --- a/sensing/pointcloud_preprocessor/launch/preprocessor.launch.xml +++ b/sensing/pointcloud_preprocessor/launch/preprocessor.launch.xml @@ -4,14 +4,14 @@ To subscribe multiple topics, write as: "['/points_raw0', '/points_raw1', '/points_raw2', ...]" This syntax is also available from command line --> - + - - + + - - - + + + diff --git a/sensing/pointcloud_preprocessor/src/blockage_diag/blockage_diag_nodelet.cpp b/sensing/pointcloud_preprocessor/src/blockage_diag/blockage_diag_nodelet.cpp index 373e7a6266218..3f9f427a8f93d 100644 --- a/sensing/pointcloud_preprocessor/src/blockage_diag/blockage_diag_nodelet.cpp +++ b/sensing/pointcloud_preprocessor/src/blockage_diag/blockage_diag_nodelet.cpp @@ -77,27 +77,29 @@ void BlockageDiagComponent::onBlockageChecker(DiagnosticStatusWrapper & stat) // TODO(badai-nguyen): consider sky_blockage_ratio_ for DiagnosticsStatus." [todo] auto level = DiagnosticStatus::OK; + std::string msg; if (ground_blockage_ratio_ < 0) { level = DiagnosticStatus::STALE; + msg = "STALE"; } else if ( (ground_blockage_ratio_ > blockage_ratio_threshold_) && (ground_blockage_count_ > blockage_count_threshold_)) { level = DiagnosticStatus::ERROR; + msg = "ERROR"; } else if (ground_blockage_ratio_ > 0.0f) { level = DiagnosticStatus::WARN; + msg = "WARN"; } else { level = DiagnosticStatus::OK; + msg = "OK"; } - std::string msg; - if (level == DiagnosticStatus::OK) { - msg = "OK"; - } else if (level == DiagnosticStatus::WARN) { - msg = "WARNING: LiDAR blockage"; - } else if (level == DiagnosticStatus::ERROR) { - msg = "ERROR: LiDAR blockage"; - } else if (level == DiagnosticStatus::STALE) { - msg = "STALE"; + if ((ground_blockage_ratio_ > 0.0f) && (sky_blockage_ratio_ > 0.0f)) { + msg = msg + ": LIDAR both blockage"; + } else if (ground_blockage_ratio_ > 0.0f) { + msg = msg + ": LIDAR ground blockage"; + } else if (sky_blockage_ratio_ > 0.0f) { + msg = msg + ": LIDAR sky blockage"; } stat.summary(level, msg); }