diff --git a/docs/leo-rover/addons/high-capacity-battery.mdx b/docs/leo-rover/addons/high-capacity-battery.mdx index af8c32e6..f5fa8ef7 100644 --- a/docs/leo-rover/addons/high-capacity-battery.mdx +++ b/docs/leo-rover/addons/high-capacity-battery.mdx @@ -17,6 +17,7 @@ import Product from '@site/src/products/high-capacity-battery.mdx'; import useBaseUrl from '@docusaurus/useBaseUrl'; import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; import FusionEmbed from '@site/src/components/FusionEmbed'; +import CenterContent from '@site/src/components/CenterContent'; # High-Capacity Battery Specification @@ -45,12 +46,20 @@ capacity batteries within inland Europe. ## Main parameters -| Parameter | Value | -| -------------------------------- | ------------------------------------------------------------------ | -| Battery type | Li-Ion 3S1P | -| Battery capacity | 15 mAh | -| Estimated charging time (0-100%) | 3 hours (with 5A charger) \ 8 hours (with standard Leo 2A charger) | -| Main materials | Aluminum, PLA | + + +| Parameter | Value | +| -------------------- | :---------------------------------------------------: | +| Battery type | Li-Ion | +| Battery pack type | 3S3P | +| Nominal voltage | 10.89 V | +| Charge capacity | 14.7 Ah | +| Energy capacity | $\approx$ 160.1 Wh | +| Maximum output power | $\approx$ 120 W | +| Battery cell | Samsung INR21700-50E | +| Safety systems | Overcurrent, Undervoltage, Reverse polarity, Overheat | + + ## Dimensions diff --git a/docs/leo-rover/documentation/specification.mdx b/docs/leo-rover/documentation/specification.mdx index ff74e246..5571cda3 100644 --- a/docs/leo-rover/documentation/specification.mdx +++ b/docs/leo-rover/documentation/specification.mdx @@ -234,8 +234,8 @@ Whole drive assembly has following characteristics: ### Battery & charging -Leo Rover is powered by a 3S Li-Ion battery with a capacity of 7000 mAh. The -battery is equipped with an internal battery management system (BMS) that +Leo Rover is powered by a 3S2P Li-Ion battery with a total capacity of 6800 mAh. +The battery is equipped with an internal battery management system (BMS) that provides protection against overcharging, over-discharging, and short circuits. The battery is designed to be easily replaceable, allowing for quick swaps during extended use. @@ -244,15 +244,16 @@ Each battery pack has following characteristics: -| Parameter | Value | -| -------------------- | :--------------------------------------: | -| Voltage | 11.1 V (nominal) | -| Battery type | Li-Ion | -| Battery cell | 18650 (Samsung INR18650-35E) | -| Capacity | 7 Ah (77,7 Wh - flight safe) | -| Battery pack type | 3S2P | -| Maximum output power | $\approx$ 120 W | -| Safety systems | Overcurrent, Reverse polarity protection | +| Parameter | Value | +| -------------------- | :---------------------------------------------------: | +| Battery type | Li-Ion | +| Battery pack type | 3S2P | +| Nominal voltage | 10.8 V | +| Charge capacity | 6.8 Ah | +| Energy capacity | $\approx$ 73.2 Wh | +| Maximum output power | $\approx$ 120 W | +| Battery cell | Samsung INR18650-35E | +| Safety systems | Overcurrent, Undervoltage, Reverse polarity, Overheat | diff --git a/docs/leo-rover/guides/software-update.mdx b/docs/leo-rover/guides/software-update.mdx index 050edc9f..712ec08b 100644 --- a/docs/leo-rover/guides/software-update.mdx +++ b/docs/leo-rover/guides/software-update.mdx @@ -69,6 +69,20 @@ download the image (.img.xz file) you want. We recommend the `full` version. The `lite` version comes without a desktop environment, so it is an alternative if you don't need all the extra packages. +:::info + +ROS 2 versions of LeoOS come preinstalled with ROS 2 Jazzy. If you are looking +to run ROS 2 Humble on Leo Rover 1.9, we have created a custom LeoOS image for +that purpose. + +Inside this image, a [Podman](https://docs.podman.io/en/latest) container runs +ROS 2 Humble with all Leo Rover ROS packages available in their newest versions. +You can find this image +[in the releases page](https://github.com/LeoRover/leo_os/releases), tagged with +`2.x.x-humble`. + +::: + ### Flash image to microSD card #### Using Etcher diff --git a/docs/leo-rover/leo-examples/_category_.json b/docs/leo-rover/leo-examples/_category_.json new file mode 100644 index 00000000..185ab25d --- /dev/null +++ b/docs/leo-rover/leo-examples/_category_.json @@ -0,0 +1,12 @@ +{ + "label": "Leo Examples", + "position": 4, + "collapsible": true, + "collapsed": true, + "link": { + "type": "generated-index", + "title": "Leo Examples", + "description": "Learn about software examples that you can run on the stock Leo Rover.", + "slug": "leo-examples" + } +} diff --git a/docs/leo-rover/leo-examples/follow-marker.mdx b/docs/leo-rover/leo-examples/follow-marker.mdx new file mode 100644 index 00000000..da54643e --- /dev/null +++ b/docs/leo-rover/leo-examples/follow-marker.mdx @@ -0,0 +1,256 @@ +--- +title: 'Leo Rover Example: How to Follow an Aruco Marker' +sidebar_label: Follow Aruco Marker +sidebar_position: 1 +keywords: + - aruco + - detection + - follow + - opencv + - example + - stock + - leo + - rover + - tutorial +description: >- + Learn how to make a Leo Rover mobile robot follow a printed Aruco Marker. + Print the marker and run the example code to get started. +image: /img/robots/leo/integrations/follow-aruco-marker/follow-aruco-1.webp +--- + +# Example: How to Follow an Aruco Marker + +import LiteYouTubeEmbed from 'react-lite-youtube-embed'; + +In this example, we will show you how to make the Leo Rover mobile robot follow +an Aruco marker. + +## What to expect? + +After completing this tutorial, your rover should be able to follow a printed +Aruco Marker. + +
+ +
+ +## Prerequisites + + + + + +## List of components + +1. Any computer which you can connect to the rover via `ssh`. +2. Stock Leo Rover. + +## Mechanical integration + +The only physical thing you'll need to do is to print an Aruco marker: + +- You can generate and download an Aruco marker from + [here](https://chev.me/arucogen) (you'll need to select the 4x4 dictionary), + or you can follow our instructions on generating markers from the tutorial + linked below. It gives you more control regarding marker size and other + parameters, which might be preferable for some users. + + + +- The printed marker **must** have a white border around it. +- System default configuration expects marker with id 0 and 15cm in size, but + those parameters can be easily changed for your setup. + +Here's an example of our aruco marker that we used for this task: + + + +:::info + +Remember that the white border around the marker is necessary. You can leave the +printed marker on the piece of paper, but we do recommend to attach it to some +harder material like cardboard - this way your marker won't bend, so it will be +easier for the software to notice it. + +::: + +## Software integration + +{/* TODO */} {/* :::tip */} + +{/* Since LeoOS x.x.x release, leo_examples package is installed by default. If the */} +{/* system is updated to at least x.x.x release you can skip software integration */} +{/* part. */} + +{/* ::: */} + +:::info + +To complete those steps, you need to connect to the rover's network first, and +then log in using ssh (both covered in prerequisites). + +::: + +### Installing using apt + +You can install the package using `apt` by typing **on the rover**: + +```bash +sudo apt install ros-${ROS_DISTRO}-leo-examples +``` + +Then you just need to source the `ROS` workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +### Building from source + +You can also get all needed software from our +[leo_examples](https://github.com/LeoRover/leo_examples-ros2) github repository. +You need to clone it on the rover in the ros workspace directory (if there's no +such a directory, first go through the +[ROS development](../advanced-guides/ros-development) tutorial): + +```bash +cd ~/ros_ws/src +git clone https://github.com/LeoRover/leo_examples-ros2.git +``` + +Now, you need to install all the dependencies for the downloaded packages: + +```bash +cd ~/ros_ws +sudo apt update +rosdep update +rosdep install --from-paths src -ir +``` + +Then, you need to source the directory and build the packages: + +```bash +cd ~/ros_ws +source install/setup.bash +colcon build +``` + +:::info + +If your installation went without any errors, then you have successfully +installed required software. + +::: + +## Examples + +To run the example, you need to be connected to the rover via ssh. Once you do +this, type in the terminal: + +```bash +ros2 launch leo_example_follow_aruco_marker follow_aruco_marker.launch.xml +``` + +The rover will start following the marker once it detects it. + +:::warning + +By default the rover follows only the marker with id 0, and has marker tracker +configured for a marker of size 15cm. If your printed marker doesn't comply with +the requirements, you need to follow the configuration instructions. + +::: + +### Configuration + +All nodes launched in the example have configurable parameters, therefore it's +easy to adjust the system for your conditions. To be able to change anything you +need to have ROS installed on your computer and be connected to the rover's +network. + +If you don't have ROS installed, you can follow this guide: + + + +Being connected and having ROS installed, next you have to source the `ROS` +workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +Now just start the `Dynamic Reconfigure` plugin in rqt: + +```bash +rqt -s rqt_reconfigure +``` + +In the displayed window on the left side choose the node which parameters you +want to change. + + + +:::tip + +If you don't see your target node, try using the `Refresh` option. + +::: + +The `aruco_follower` and `aruco_tracker` nodes are the core components that make +this example work. As their names suggest, one handles marker detection, while +the other uses that information to command the robot's movement. + +#### Tracker + +The most important parameter in the `aruco_tracker` node is `marker_size`. If +your printed marker is not exactly 15 cm, adjust this value accordingly.
+Other parameters generally don't require any modification, but you're free to +experiment if you'd like. Keep in mind that once you restart the rover's +programs, the system will automatically reload the default configuration. + +:::warning + +Only parameters within the aruco namespace (those named like `aruco.xyz`) are +safe to modify. Changing anything else may prevent the rover from following the +marker correctly.
For details on each of those parameters, check the +[OpenCV ArUco documentation](https://docs.opencv.org/4.6.0/d5/dae/tutorial_aruco_detection.html). + +::: + +#### Follower + +Each parameter of the `aruco_follower` node includes a built-in description - +just hover your mouse over a parameter to see it. The most important one is +`follow_id`, which specifies the ID of the marker the rover should follow. You +can also use the `follow_enabled` checkbox to quickly enable or disable the +rover's movement. + +The remaining parameters control the rover's driving and rotation speeds, as +well as the distance thresholds that determine when it should move or stop. + +## What next? + +After completing this tutorial, you can try other examples from the leo_examples +repository ([line follower](line-follower) and +[object detection](object-detection)), or try other integration from our site. diff --git a/docs/leo-rover/leo-examples/line-follower.mdx b/docs/leo-rover/leo-examples/line-follower.mdx new file mode 100644 index 00000000..2b020413 --- /dev/null +++ b/docs/leo-rover/leo-examples/line-follower.mdx @@ -0,0 +1,606 @@ +--- +title: 'Leo Rover Example: How to Follow a Line using TensorFlow Lite' +sidebar_label: Line follower +sidebar_position: 2 +keywords: + - line + - follower + - track + - tensorflow + - example + - stock + - leo + - rover + - tutorial +description: >- + Discover a step-by-step guide on setting up a line follower example for the + stock Leo Rover, including creating a track and installing TensorFlow Lite. +image: /img/robots/leo/integrations/line-follower/line-follower-1.webp +--- + +# Example: How to Follow a Line + +import LiteYouTubeEmbed from 'react-lite-youtube-embed'; + +In this example, we will show you how to run a line follower on the Leo Rover +mobile robot. + +## What to expect? + +After completing this tutorial, your rover should be able to navigate a +two-lined track by itself. You will also be able to gather and train neural +network model for this task on your own data. Here's an example of our rover +driving on the designated track: + +
+ +
+ +## Prerequisites + + + +{/* */} + + + +## List of components + +### General requirements + +1. Any computer which you can connect to the rover via `ssh`. +2. Stock Leo Rover + +### For running our model (driving rover on track) + +1. Insulating tape of any color (the more contrast with the ground, the better) + +### For gathering and training on your data + +1. Account on a website providing online environment for Jupyter Notebooks (we + have used [kaggle](https://www.kaggle.com/)) +2. Game pad for driving the rover **(not needed but recommended)** + +## Mechanical integration + +As this is one of our examples for stock Leo Rover, you don't have to do any +mechanical stuff regarding the rover. The only "mechanical" thing you need to do +is to make a two-lined track with insulating tape. Below, there's an example of +our track that we used for training the neural network model - image taken from +the rover. Try to end up with something like this: + +- two lines far enough from each other, so that the rover can drive in between +- color of the lines different from the ground + + + +:::info + +Actually you don't need to use insulating tape. As you will learn later in the +tutorial, you only need to provide two lines for the rover, which have different +color than the ground. So for example one solution is drawing/printing the lines +on paper and stick it to the ground with adhesive tape. + +::: + +## Software integration + +{/* TODO */} {/* :::tip */} + +{/* Since LeoOS 1.1.0 release, leo_examples package is installed by default. If the */} +{/* system is updated to at least 1.1.0 release you can skip software integration */} +{/* part. */} + +{/* ::: */} + +:::info + +To complete those steps, you need to connect to the rover's network first, and +then log in using ssh. + +::: + +Our program uses a neural network model converted to +[TensorFlow Lite](https://www.tensorflow.org/lite), so you need to install it on +your rover too. As you shouldn't install python packages system-wide, we will +create virtual environment and install the module inside it: + +```bash +sudo apt install python3-pip python3-venv +python3 -m venv env_ros --system-site-packages +source env_ros/bin/activate +pip3 install ai-edge-litert +``` + +### Installing using apt + +You can install the package using `apt` by typing **on the rover**: + +```bash +sudo apt install ros-${ROS_DISTRO}-leo-examples +``` + +Then you just need to source the `ROS` workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +### Building from source + +You can also get all needed software from our +[leo_examples](https://github.com/LeoRover/leo_examples-ros2) GitHub repository. +You need to clone it on the rover in the ros workspace directory (if there's no +such a directory, then go through the +[ROS Development](../advanced-guides/ros-development) tutorial first): + +```bash +cd ~/ros_ws/src +git clone https://github.com/LeoRover/leo_examples-ros2.git +``` + +Now, you need to install all the dependencies for the downloaded packages: + +```bash +cd ~/ros_ws +sudo apt update +rosdep update +rosdep install --from-paths src -i +``` + +Then, you need to source the directory and build the packages: + +```bash +cd ~/ros_ws +source install/setup.bash +colcon build +``` + +:::warning + +This instructions will fail if you don't have a ros workspace on the rover. + +::: + +:::info + +If your installation went without any errors, then you have successfully +installed required software. + +::: + +## Examples + +### Running nodes + +:::info + +You need to source the python virtual environment in each terminal on the rover +you want to use for running the nodes. Othervise the nodes will fail because +they won't find the TensorFlow Lite library.
You can source the +environment with command: + +```bash +source /bin/activate +``` + +::: + +#### Color Mask + +Our approach for this task was to get the specified color from the image (color +of the tape), and train the neural network on such a mask. + + + +So, the first thing you need to do is to get the color mask values. We have +prepared ROS node for this task. To run it, type in the terminal on the rover: + +```bash +ros2 run leo_example_line_follower color_mask --ros-args --params-file $(ros2 pkg prefix leo_example_line_follower)/share/leo_example_line_follower/config/blue.yaml +``` + +To be able to visualize the color mask and choose the values you need to have +ROS installed on your computer and be connected to the rover's network. + +If you don't have ROS installed, you can follow this guide: + + + +Being connected and having ROS installed, next you have to source the `ROS` +workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +Now just run `rqt` on your computer: + +```bash +rqt +``` + +There, you need to run two things: + +- `Image View` (Plugins -> Visualization -> Image View) +- `Dynamic Reconfigure` (Plugins -> Configuration -> Dynamic Reconfigure) + +In `Image View`, from the topic drop down choose `color_mask` topic - this is +the live view of the color mask sampled from the rover's view with current +values for the color mask. + +In `Dynamic Reconfigure`, choose `color_mask_finder`. You'll see something like +this: + + + +Our color capturing works on thresholding each of HSV components in the image. +In the `Parameter Reconfigure` you can see sliders for choosing the min and max +threshold values for each component.
If you want to see what colors are +currently in the mask, switch the topic in the `Image View` to +`colors_caught/compressed`. + +:::note + +- When choosing the color mask values from scratch, start by setting all MIN + sliders to 0, and all MAX sliders to maximum values. Then adjust the sliders + one by one, until the only white object in the mask is your track (tape). + Color mask visible in rqt is already preprocessed image as neural network + input - this is what the model will base its prediction on. +- When you inspect the command we used for starting the color mask node you can + notice we specify the params file. It holds starting values for the + thresholding parameters. You can use this way to provide your own file with + starting setup. +- The node supports dual thresholding ranges of hue component. The color mask + captures: + - objects with hue value in range \[hue_min, hue_max\], when `hue_min` is less + than `hue_max` + - objects with hue value in either \[0, hue_max\] or \[hue_min, 179\] range, + when `hue_min` is greater than `hue_max` - such solution is useful for + capturing colors with wide hue spectrum - e.g. red. + +::: + +When you are satisfied with your color mask, you can stop both rqt and the node +(with **ctrl+c**). Your chosen values will be printed in the terminal. + + + +You need to save them in the yaml file (best if you place it in the config +directory of the leo_example_line_follower package). You can do this with nano. +Copy the printed values (using the mouse or **ctrl+shift+c**) and type on the +rover + +```bash +touch ~/ros_ws/src/leo_examples-ros2/leo_example_line_follower/config/my_mask.yaml +nano ~/ros_ws/src/leo_examples-ros2/leo_example_line_follower/config/my_mask.yaml +``` + +Then, paste the values (**ctrl+shift+v** or use the mouse), save the file +(**ctrl+o**) and close it (**ctrl+x**). + +:::warning + +It's important that the file has to have the same structure, as our config +files. All the indentation and namings are really important for the node to +successfully load the values. It will be best if you start with such file +contents and swap values with the ones obtained from the color mask node. + +```yaml +/**: + ros__parameters: + hue_min: 94 + hue_max: 136 + sat_min: 38 + sat_max: 141 + val_min: 65 + val_max: 172 +``` + +::: + +:::info + +If you didn't build the package from source and therefore you can't create your +file in the destination we recommend, just create the file anywhere on the +rover. You will be able to provide it anyway for all the nodes that need it. You +just need to know the absolute path to your file. + +::: + +#### Line Follower + +Running the follower is very simple. You need to be connected to the rover via +`ssh` (the same as with color mask node). Once you are logged in to the rover, +you can launch the program using `ros2 launch` command: + +```bash +ros2 launch leo_example_line_follower line_follower.launch.xml +``` + +:::info + +As you run this command, the robot won't move until you change the +`follow_enabled` parameter. It's a feature that allows to easily stop the rover +and by default it's set to `false`.
To make the robot start driving +automatically you need to call the program like this: + +```bash +ros2 launch leo_example_line_follower line_follower.launch.xml follow:=true +``` + +::: + +The file has a few `launch` arguments provided to run the model with your data +(e.g. your color mask file). + +The most important ones are as follows: + +- `color_mask_file` - path to file with the color mask (HSV) values +- `pub_mask` - flag specifying whether or not to publish color mask while + driving (might slow the payload but useful for debugging) +- `model` - path to neural network model (there is a `models` directory with + couple models prepared by us, which you can choose from) +- `follow` - flag specifying if the robot should start following the track from + the node startup. By default it's set to `false` which means, the robot will + wait for manual change of the parameter to start driving. + +Every argument is documented, you can see them, their descriptions and default +values by running: + +```bash +ros2 launch leo_example_line_follower line_follower.launch.xml -s +``` + +:::note + +Every argument has default value, so you don't need to enter every argument when +running the line follower. + +When you want to change the value for specific argument, you can do this by +adding to the command _\:=\_. + +::: + +As a nice feature you can run `rqt` the same way as in color mask node but from +the node list choose `line_follower`. This way you will be able to see the +current color mask that works as input to the neural network model and also +change the color mask if needed during runtime.
In the available parameters +you can also use `follow_enabled` and `publish_mask` to quickly enable or +disable the robot's movement and color mask publishing. + +:::warning + +Remember that due to light reflection from the ground, the rover won't stay on +the track forever, so make sure you are ready to stop it (physically or with +`follow_enabled` parameter), or help it, when it goes off the track. + +::: + +## What next? + +### Making your own model + +#### Gathering data + +For gathering the data, you'll need to run our `data_saver` node. You need to +run it on the rover by using the `ros2 launch` command. + +The node has one required argument - `duration` which specifies the time period +(in seconds) the data will be recorded in. You can also specify the output +directory for the recorded data using the `output_dir` argument. So, for +example, your command can look like this: + +```bash +ros2 launch leo_example_line_follower record_data.launch.xml duration:=30 output_dir:=test_drive +``` + +This will record data for 30 seconds and place all the recorded data in the +`test_drive` directory (the node will create the directory if it doesn't exist). + +:::tip + +You don't have to record all the data into one directory. You can record the +data to many directories, as you will need to process them later anyway. So you +can run this command multiple times with changed arguments. + +::: + +First, the node waits for `twist` messages from `cmd_vel` topic, and after it +gets any message on this topic, it'll start recording data (the only data that +will be recorded is when the rover is moving - if you're staying in place, no +data will be recorded). + +After recording the data, in your output directory, you'll find images saved +from the rover, and one file `labels.txt`. The file contains multiple lines of +format `img_name:label`, where the label is a tuple of two floats representing +linear and angular (respectively) velocity values of the rover in the situation +visible in the specified image. + +:::note + +This is the only part, where you might need game controller. It's just easier to +drive over track, and stay in the lines - collect good data for the neural +network - with game controller, than the joystick on the web page, but you can +still do this using the Leo UI and keyboard. + +::: + +:::warning + +If the name for the output directory, that you have provided is not an absolute +path (starting with "/"), the directory will be put under home directory (by +default _/home/pi_). If you want it to be somewhere else, you need to give the +absolute path. + +::: + +#### Preparing the data + +When you have your data recorded, you have to collect it in the correct +structure. To do so, you need to run our `prepare_data` script with the +`ros2 run` command. + +The node has three flags which you have to specify: + +- `-t` / `--train_data` - paths to directories with data for training the neural + network +- `-v` / `--valid_data` - paths to directories with data for validation during + the training +- `-z` / `--zip_file` - name of the zip file with your data that will be created + in the end + +So, for example, your command can look like this (if you're running it in the +home directory): + +```bash +ros2 run leo_example_line_follower prepare_data -t train_dir1 train_dir2 -v val1 val2 -z my_dataset.zip +``` + +:::warning + +Unlike `launch`, `ros2 run` commands run in your current working directory, so +for example: If you run the node in _/home/pi/test_ directory and provide names +for the `-t` and `-v` flags as _train_ and _valid_, then the node will look for +_/home/pi/test/train_ and _/home/pi/test/valid_ directories. + +::: + +As the script finishes, it will create your zip file with processed data in the +same directory that it was run. Such data is ready to be uploaded to your +notebook. + +:::note + +As we have used kaggle, we know that providing zip file is enough for the +dataset as it get s unpacked automatically. If you use other platform you may +need to unpack the files by hand. + +::: + +#### Training the data + +:::info + +This part might be a bit different if you use different platform than kaggle +(regarding uploading a file), but most of them will be similar, as you may need +to change some lines of code, and run some cells. + +::: + +Having your data ready, you need to upload it to your notebook. You get a copy +of our notebook when cloning repository, but you can also get it under this +[link](https://www.kaggle.com/code/aleksanderszymaski/leorover-line-follower). + +Once you have the notebook, you can upload the data using the **Upload** button +in the `Input` section in upper-right corner. + +Then, just select the **New Dataset** button, and drag (or browse for) your zip +file (you also need to provide a name for the dataset). + + + +:::info + +If you have used the `Copy & Edit` on Kaggle to get our notebook, then in the +datasets you have our dataset that has been added automatically.
To add it +manually use **Add input** option with **Datasets** selected and type in the +search bar _LeoRover_ and you will see, our dataset. Click the plus button, and +the dataset will be added to your notebook. + +::: + +Once it's uploaded to the notebook, you should see something like this: + + + +Now, you just have to run all the cells up to the "Custom tests" section to +begin the training. + +:::warning + +There is one cell, with variables that might need to change. Each of them has +provided description in a comment. Go through it before running, and check if +you need to change something. + +::: + +When the training is finished, you'll see your `tflite` model (the name may +differ if you've changed the correct variable in the correct section) in the +output section. + + + +The only thing that you need now is to download the file and place it on the +rover. You can download the model by clicking the three dots that show up when +you go with the cursor over the file. + + + +Then, just press **Download** and the model will be downloaded. Now, you have to +place it on the rover, you can follow the instructions from +[this tutorial](../guides/upload-files-to-rover). + +:::info + +In the last section, notebook provides two functions to visualize model +features. Using them, you can visualize kernels from convolutional layers and +feature maps. Both functions are documented in the notebook, so just read them +to see all their parameters + +::: diff --git a/docs/leo-rover/leo-examples/object-detection.mdx b/docs/leo-rover/leo-examples/object-detection.mdx new file mode 100644 index 00000000..c0cbea23 --- /dev/null +++ b/docs/leo-rover/leo-examples/object-detection.mdx @@ -0,0 +1,280 @@ +--- +title: 'Leo Rover Example: Real-Time Object Detection with TensorFlow Lite' +sidebar_label: Object detection +sidebar_position: 3 +keywords: + - object + - detection + - ai + - tensorflow + - example + - stock + - leo + - rover + - tutorial +description: >- + Detect objects in real-time on the Leo Rover using pre-trained models, with + TensorFlow Lite library. Step-by-step tutorial for stock Leo Rover. +image: /img/robots/leo/integrations/object-detection/object-detection-2.webp +--- + +# Example: How to Detect Objects + +import LiteYouTubeEmbed from 'react-lite-youtube-embed'; + +In this example, we will show you how to run Object Detection on the Leo Rover +mobile robot. + +## What to expect? + +After completing this tutorial, your rover should be able to recognize 91 +objects from the COCO dataset +([listed here](https://blog.roboflow.com/coco-dataset/#coco-dataset-class-list)), +and display an image with drawn bounding boxes around the detected objects. + +
+ +
+ +## Prerequisites + + + + + + +## List of components + +1. Any computer which you can connect to the rover via `ssh`. +2. Stock Leo Rover + +## Software integration + +{/* TODO */} {/* :::tip */} + +{/* Since LeoOS x.x.x release, leo_examples package is installed by default. If the */} +{/* system is updated to at least x.x.x release you can skip software integration */} +{/* part. */} + +{/* ::: */} + +:::info + +To complete those steps, you need to connect to the rover's network first, and +then log in using ssh (both covered in prerequisites). + +::: + +Our neural network model was converted to +[TensorFlow Lite](https://www.tensorflow.org/lite), so you need to install it on +your rover too. As you shouldn't install python packages system-wide, we will +create virtual environment and install the module inside it: + +```bash +sudo apt install python3-pip python3-venv +python3 -m venv env_ros --system-site-packages +source env_ros/bin/activate +pip3 install ai-edge-litert +``` + +### Installing using apt + +You can install the package using `apt` by typing **on the rover**: + +```bash +sudo apt install ros-${ROS_DISTRO}-leo-examples +``` + +Then you just need to source the `ROS` workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +### Building from source + +You can also get all needed software from our +[leo_examples](https://github.com/LeoRover/leo_examples-ros2) github repository. +You need to clone it on the rover in the ros workspace directory (if there's no +such a directory, first go through the +[ROS Development](../advanced-guides/ros-development) tutorial): + +```bash +cd ~/ros_ws/src +git clone https://github.com/LeoRover/leo_examples-ros2.git +``` + +Now, you need to install all the dependencies for the downloaded packages: + +```bash +cd ~/ros_ws +sudo apt update +rosdep update +rosdep install --from-paths src -ir +``` + +Then, you need to source the directory and build the packages: + +```bash +cd ~/ros_ws +source install/setup.bash +colcon build +``` + +:::info + +If your installation went without any errors, then you have successfully +installed required software. + +::: + +## Examples + +### Using given models + +Running the object detection node is very simple. First, you need to connect to +the rover via `ssh`. Once you are logged in to the rover, you can launch the +node using `ros2 launch` command: + +```bash +ros2 launch leo_example_object_detection detector.launch.xml +``` + +The given launch file has a few arguments: + +- `camera_topic` - the name of the topic with the `Image` messages (you can + specify it if you have changed basic setup on the rover or maybe have two + cameras) +- `labels` - a path to the file with labels for the model (a parameter provided + in case you want to try other neural network models that were trained on other + datasets than `COCO`) +- `model` - a path to neural network model for object detection (the models + given by us are in the `models` directory of the + `leo_example_object_detection` package) +- `config_file` - a path to the yaml file with defined colors for given labels. + +:::note + +- Every argument has default value, so when launching the node, you don't need + to specify any of them. Those are for your use, if you want to change the + default functionality. You can see the default values with command: + ```bash + ros2 launch leo_example_object_detection detector.launch.xml -s + ``` +- You can create your own `config_file`, or modify the one provided by us. What + matters is that the file you provide has specific structure - it must start + with `labels` keyword, and each consecutive line must be of form `key: value`, + where `key` is a label specified in labels file for the model, and `value` is + a list of 3 integers representing RGB values, that will be used for displaying + given object. For each label, that is not defined in this file, there is a + default color, so you don't need to define all the labels. Here is our example + configuration which you can use as a base for your label coloring: + ```yaml title="labels_config.yaml" + labels: + person: [255, 0, 0] + chair: [0, 0, 255] + backpack: [0, 255, 0] + cell_phone: [153, 0, 153] + ``` + The indentation for each defined label is important - they all need to be in + `labels` "namespace". + +::: + +So, with some arguments your line can look like this: + +```bash +ros2 launch leo_example_object_detection detector.launch.xml config_file:= +``` + +To be able to display the output of the model you need to have ROS installed on +your computer and be connected to the rover's network. + +If you don't have ROS installed, you can follow this guide: + + + +Being connected and having ROS installed, next you have to source the `ROS` +workspace: + +```bash +source /opt/ros/${ROS_DISTRO}/setup.bash +``` + +Now just run `rqt` on your computer: + +```bash +rqt +``` + +There, you need to run two things: + +- `Image View` (Plugins -> Visualization -> Image View) +- `Dynamic Reconfigure` (Plugins -> Configuration -> Dynamic Reconfigure) + +In `Image View`, from the topic drop down choose `/detections/compressed` - this +is the processed image with drawn detections on it. In `Dynamic Reconfigure`, +choose `object_detector`. + +You should see something like this: + + + +In the `Parameter Reconfigure` window you are able to change the node's +parameters values. The only important one is `confidence` - it specifies the +confidence threshold for the neural network guesses (only the detections with +confidence higher than the specified will be displayed). You can change the +value to see how the detections change. + +Place objects inside the view of the camera and if they are a part of the +dataset, and the algorithm recognizes them with enough confidence, boxes around +the item and a text description will appear. + + + +### Adding models of your choice + +It's possible to run the node with your models (either made from scratch or +found on the internet). To launch the node with your files, you have the launch +arguments. You can specify their values to make the node use your files. + +:::info + +If you provide model, that was trained on other dataset than COCO, then you will +need to give the node labels for your model too. + +::: + +:::warning + +Not every object detection model will be compatible with our node. The models +that we have provided in the _models_ directory of the package, are pretrained, +single-shot detector models, converted to TensorFlow Lite from the TensorFlow +repository. + +So if you want the model of your choice to be compatible with our node, the +model needs to follow the same output signature as the TF models. + +::: + +## What next? + +After completing this tutorial, you can try other examples from the leo_examples +repository ([line follower](line-follower) and +[follow Aruco Marker](follow-marker)), or try other integration from our site. diff --git a/docs/leo-rover/manuals/wheel-assembly.mdx b/docs/leo-rover/manuals/wheel-assembly.mdx index c4ec807d..add4ca09 100644 --- a/docs/leo-rover/manuals/wheel-assembly.mdx +++ b/docs/leo-rover/manuals/wheel-assembly.mdx @@ -267,7 +267,7 @@ screw**. Use a drop of loctite to make sure it stays in place. }} /> -## Step 2 +## Step 3 diff --git a/docs/raph-rover/documentation/_user-manual.mdx b/docs/raph-rover/documentation/_user-manual.mdx deleted file mode 100644 index c5d46225..00000000 --- a/docs/raph-rover/documentation/_user-manual.mdx +++ /dev/null @@ -1,509 +0,0 @@ ---- -title: Raph Rover User Manual | Operation and Maintenance -sidebar_label: User manual -sidebar_position: 2 -keywords: - - raph - - rover - - mobile - - robot - - user - - manual -description: >- - User manual for Raph Rover -image: /img/robots/raph/raph-rover.webp -toc_max_heading_level: 4 ---- - -# Raph Rover User Manual - -import styles from './styles.module.css'; -import useBaseUrl from '@docusaurus/useBaseUrl'; -import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; -import CenterContent from '@site/src/components/CenterContent'; - - - -TODO: - -- Split this file into more smaller ones - User Manual and Developer Guides -- Review data, text, shorten, make it sound less like GPT wrote it -- Remove stuff thats copied from specification -- Remove all circlejerk text - - - -### What's included - - - -| Item | Count | -| -------------------------- | :---: | -| Raph Rover | 1 | -| Raph Rover battery pack | 2 | -| Raph Rover battery charger | 1 | -| Li-Ion charger 6S 4A | 1 | -| Tools | 1 | - - - -### Seals - - - Both to keep the rover looking cleaner, and to provide extra weather and dust - resistance a system of seals around the rover can be used. - - -To ensure weather-tightness while accommodating additional payloads, the Raph -Rover is equipped with specialized cable seals. To insert additional cables, it -is necessary to dismantle the entire panel by loosening two thumb screws. Once -the panel is disassembled, individual seals can be removed from the frame, -allowing for the insertion of new cables. - -Raph Rover comes with different seals, labeled as QT [Z]/[X] or QT [X], where Z -indicates the number of holes in the seal and X indicates the diameter of the -cable in millimeters. - - - -### Block diagram - -### Drivetrain - - - {' '} - An image is worth a 1000 words - remove text, replace with image showing the - movement - - -- turning in place ( around middle point between frontal wheels) -- turning around specific frontal wheel -- turning around a curve - - - -| Parameter | Value | -| ----------------------------- | :---------: | -| Track Width | 385 mm | -| Wheelbase length | 379 mm | -| Ground clearance | 105 mm | -| Climb grade (no payload) | 30° ( ~58%) | -| Climb grade with 5kg payload | 20° ( ~36%) | -| Climb grade with 10kg payload | 10° ( ~18%) | -| Hill grade traversal | 30° ( ~58%) | -| Nominal torque | 4 Nm | -| Maximum torque | 8 Nm | - - - -The standard configuration of the Raph Rover features a drivetrain comprising -two fixed wheels at the front and two rotary wheels at the rear. This design -enhances the mobility of the Rover, enabling it to execute precise maneuvers -such as: - -- turning in place ( around middle point between frontal wheels) -- turning around specific frontal wheel -- turning around a curve - -This versatile drivetrain configuration optimizes the Rover's navigational -capabilities, ensuring adaptability in various operational scenarios. - -#### Traction parameters - - - -| Parameter | Value | -| ----------------------------- | :---------: | -| Track Width | 385 mm | -| Wheelbase length | 379 mm | -| Ground clearance | 105 mm | -| Climb grade (no payload) | 30° ( ~58%) | -| Climb grade with 5kg payload | 20° ( ~36%) | -| Climb grade with 10kg payload | 10° ( ~18%) | -| Hill grade traversal | 30° ( ~58%) | -| Nominal torque | 4 Nm | -| Maximum torque | 8 Nm | - - - -#### Motors - - - {' '} - Do ve really want to tell people that we are using chinese motors? Just say - that they are high quality, direct drive, brushless motors with integrated - encoder and stop there. - - -The Raph Rover is equipped with 6 DirectDriveTech M0601C-111 direct-drive BLDC -motors, serving both the drive and turn functions. These motors enable the rover -to attain a speed of approximately 1.8 m/s when equipped with standard diameter -tires. Each motor is outfitted with an integrated incremental encoder with a -resolution of 12 bits. - -To mitigate potential operational challenges, a mechanical bumper restricts the -turn angle of each steering wheel to 180°. This bumper not only acts as a limit -for wheel rotation but also serves to establish the wheel's position when the -rover is restarted, ensuring consistent and reliable performance. - -#### Wheels - -As standard, Raph Rover comes with rubber tires with a diameter of about 170 mm. -Whole drive assembly has following characteristics: - - - -| Parameter | Value | -| -------------------------------- | :----------------: | -| Tire size (diameter x thickness) | ~ 170 mm x ~ 80 mm | -| Tire lock type | Beadlock | -| Tire insert | Hard foam | -| Wheel rim diameter | 120 mm | - - - -### Battery & charging - - - Copy what Getting Started says about batteries here - - -Raph Rover boasts an advanced battery management system, a feature enabling the -simultaneous utilization of two battery packs. This innovative system empowers -the rover to seamlessly operate for approximately four continuous hours under -decent conditions (e.g. relatively flat terrain). The dual-battery configuration -enhances the overall endurance and sustained performance of the rover. - -A notable feature of the design is the ability to detach the battery packs -without requiring a power-off cycle. By connecting the charger to the rear slot -of the Rover, users can safely remove both batteries. This functionality -streamlines the process of swift and efficient battery replacements, resulting -in minimal downtime during operations. The seamless swapping of battery packs -not only extends the operational range of the rover but also mitigates the -necessity for frequent service stops, thereby elevating the overall efficiency -and productivity of the robotic platform. - -Each battery pack has following characteristics: - - - -| Parameter | Value | -| -------------------- | :--------------------------------------: | -| Voltage | 22.2 V (nominal) | -| Battery type | Li-Ion | -| Capacity | 4 Ah (96 Wh - flight safe) | -| Battery pack type | 6S1P | -| Maximum output power | ~650 W | -| Safety systems | Overcurrent, Reverse polarity protection | - - - -#### Battery charging - - - {' '} - Battery and charging -> battery charging - do we really need a whole section - about this? - - -By default Raph Rover comes with 2A 25,2V Li-Ion charger. It can charge 1 -battery pack in about 2,5 hours. - -Raph Rover has 2 options for charging batteries: - -- Outside the Rover, by plugging battery into charger -- Inside the Rover, by plugging charger directly into rear charging slot of the - Rover - -The charger is outfitted with an LED status indicator designed to provideS -information about the charging status of the battery. A green LED signifies that -the battery has reached full charge, while a red LED indicates that the battery -is currently in the charging process. - -:::note - -When charging the batteries with the Rover powered on, the LED indicator will -not transition to green during the charging cycle. This is due to the fact that -the charger will be used to sustain the operational state of the rover after the -charging process. - -::: - -## Software specification - - - Software specification - this section should be called "Software overview" and - have its content reviewed. - - -### Overview - -Raph Rover's software heavily relies on The -[Robot Operating System (ROS)](https://ros.org/) which offers the robot the -following functionalities: - -- Abstraction layer facilitating communication between software components. -- Open-source software components, maintained by the community. -- A collection of standard message interfaces. -- Tools for introspection. - -The primary segment of the software stack is partitioned into several -[ROS Nodes](https://wiki.ros.org/Nodes), treated as computational units, each -doing one logical thing. The nodes interact via: - -- **Topics** - Named buses enabling message exchange between nodes. They are - strongly typed and employ anonymous publish/subscribe semantics. -- **Services** - A client/server mechanism for remote procedure calls between - nodes. The service server accepts remote procedure requests identified by name - and type, which must be known to the service client beforehand. -- **Parameters** - Sets of key/value pairs maintained separately by each node, - utilized for node configuration during startup and runtime without - necessitating code modifications. -- **TF transforms** - A single transform describes the relationship between 2 - coordinate frames at a specific point in time. TF transforms are distributed - between nodes using topics, but, for the sake of clarity, we will refer to - them as separate entities. - -There are two important software components which don't run as native ROS nodes: - -- **Controller firmware** - The firmware itself acts as a ROS node but uses - eProsima's - [Micro XRCE-DDS](https://micro-xrce-dds.docs.eprosima.com/en/latest/) as its - middleware. Thus, it requires the presence of the Micro-ROS Agent on the - built-in computer to communicate with other ROS nodes. -- **Web User Interface** - The WebUI establishes a connection with the Rosbridge - Server via WebSocket transport layer and employs the rosbridge protocol for - communication with the ROS nodes. - - - -### ROS nodes - - - Table to be formatted as all the other tables in the documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ROS NodeDescription
Controller Node - The node spawned by the Controller firmware. Provides access to functionalities - via topics and services for the management of the drivetrain, LED panels, Power - System and many more. Additionally, it publishes relevant information, - including: - - - State of the dynamic joints controlled by RaphCore. - - Data retrieved from the onboard IMU sensor. - - Power system status, comprising: - - Battery voltages. - - Connection status of batteries or the charger to the robot. - - Current output power. - - Computed odometry. - - Diagnostic information for various system components. -
Robot State Publisher - Parses the kinematic tree model of the robot in URDF format and broadcasts the - robot state using TF transforms. Here's how it operates: - - - Fixed joints, like sensor positions, are published as static transforms. - - Movable joints, such as wheel states, are published as dynamic transforms, - based on the current joint states published by the Controller Node. - - It also publishes the robot URDF description on a designated topic, making it - easily accessible to other nodes. -
Camera Driver - Publishes the data provided by the stereoscopic camera. In the default - configuration used in Raph Rover, it includes: - - - Images captured by the left, right and RGB (center) camera image sensors. - - Calibration parameters for each image sensor. - - Data from the camera's IMU sensor. - - Depth images computed from the left and right sensors. -
Depth Processing Node - Publishes point cloud data computed from depth images and camera calibration - parameters. - - Employs “lazy subscription”, meaning it refrains from processing any depth - images until at least one node subscribes to the Point Cloud topic. -
LIDAR Driver - Publishes data from the LIDAR sensor. - - Additionally, it provides services for starting and stopping the sensor's motor. -
- - - -Different robot applications will require running different sets of additional -nodes on top of the core ones. For example, consider a situation where we want -to autonomously drive to predefined coordinates in an unknown terrain. Typical -solution can be simplified to 2 nodes: - -- SLAM (Simultaneous localization and mapping) node - generates a 2D terrain map - and localizes the robot within it. -- Autonomous Navigation node - Retrieves navigation goals and outputs drive - commands for the controller. Uses 2D terrain map to plan an optimized path and - data from LiDAR and stereo camera to detect dynamic obstacles. - - - -These nodes can be configured to start alongside the core ones, extending the -robot with autonomous navigation capabilities. - -### Firmware - - - {' '} - Actually finish the section {' '} - - - -{/* TODO */} - -_Work in progress_ - -#### LED controller - - - Review - shorten stuff, make sure that all led states are described and - ACTUALLY USED by the rover. Decide if we should add priority levels to the - states - if so, add a table with priorities. - - -The built-in LED controller manages the LED states of the robot by processing -user-defined states and animations, integrating them with ongoing animations, -and updating the LEDs accordingly. - -Users can control the LED strip by specifying colors, duration, and priority of -specific LEDs. This allows for in-depth control, enabling users to determine -which LED states are the most important. Critical states, such as errors, are -displayed over less important uses, like illuminating the surroundings. - -The robot features four LED panels that combine to form one long LED strip. LEDs -can be controlled either as a full strip or as specific panels, providing -flexibility for different use cases: - -- **Full Strip Control**: Ideal for animations that span the entire perimeter of - the robot, such as police lights. -- **Panel-Specific Control**: Useful for targeted functions, such as using all - front LEDs as a flashlight or using side LEDs to indicate battery levels. - -#### Robot status Indicators descriptions - -##### Normal states - - - -| LED | LED description | State description | -| :-------------------------------------------------------: | :----------------------------: | :----------------------------------------------------------: | -| | White, flashing | Robot booting up\* | -| | White, breathing | Idle, rover on, motors relaxed\* | -| | White, constant | Rover on, motors relaxed\* | -| | Blue, breathing | Turning wheels not calibrated\* | -| | Blue, constant | Turning wheels calibration | -| | Yellow, constant | Maintenance mode | -| | Green, constant - side panels | Battery charge indicator | -| | Green, breathing - side panels | Battery charging indicator, turns solid when fully charged\* | - - - - \* - not implemented currently - - \* - Use images to show battery charge indicator - -##### Fault states - - - -| LED | LED description | State description | -| :-----------------------------------------------------: | :-----------------: | :------------------------: | -| | Red, flashing | Rover error detected\* | -| | Red, 3 fast flashes | Multiple errors detected\* | - - - - \* - not implemented currently - -### Web User Interface - - - Mention that cameras connected to the rover can easily be viewed in the UI. - - -### Network - -_Work in progress_ - -{/* TODO */} diff --git a/docs/raph-rover/documentation/getting-started.mdx b/docs/raph-rover/documentation/getting-started.mdx index 091e95b0..21f208f7 100644 --- a/docs/raph-rover/documentation/getting-started.mdx +++ b/docs/raph-rover/documentation/getting-started.mdx @@ -17,6 +17,7 @@ import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; import FlexTable from '@site/src/components/FlexTable'; import FlexTableItem from '@site/src/components/FlexTableItem'; import CenterContent from '@site/src/components/CenterContent'; +import LinkButton from '@site/src/components/LinkButton'; import Support from '@site/docs/raph-rover/partial/_support.mdx'; @@ -260,79 +261,14 @@ driving, preview stream from on-board camera. To access it, open your web browser and navigate to [10.10.0.2](http://10.10.0.2). -From here, you can control the rover, access the camera feed, and configure -various settings. You can control the rover using the on-screen joystick. +From here, you can control the rover using keyboard and on-screen joystick, +access the camera feed, and configure various settings. -:::tip - -You can use gamepads, including Xbox and PlayStation controllers, with the Web -UI. First, plug the controller into your device. The UI will confirm the -connection with a "Gamepad connected" notification. You can then immediately -start controlling the robot with the gamepad. - -::: - -_More inforation coming soon_ - -{/* TODO: ADD UI Pictures */} - -## Maintenance - -### Charging batteries - -Raph Rover comes with a 4A 25,2V Li-Ion charger. It can charge 1 battery pack in -about 1.5 hour via the dedicated charging slot. - - - -The charger (not the dock) is outfitted with an LED status indicator designed to -provide information about the charging status of the battery. A green LED -signifies that the battery has reached full charge, while a red LED indicates -that the battery is currently in the charging process. - -As an alternative, the batteries can be charged via the rover, by leaving them -inside the battery ports and connecting the charger to the rover's power input -located at the back. - - - -This way it's possible to charge both batteries at the same time, however the -charging current will be split between them, resulting in a longer charging time -of around 2.5 h. The Rover does not need to be powered off for this process. +:::info -:::note +To find out more about using the Web UI, visit the documentation page below: -When working on the rover while charging the batteries the LED indicator of -power adapter will not transition to green when charging is complete. Power draw -of the rover will keep the charger in active (red) state. To check battery -level, press the power button once. Fully illuminated LED side panels will -indicate fully charged batteries. + ::: @@ -341,9 +277,7 @@ indicate fully charged batteries. After initial testing, you can explore what else Raph Rover has to offer. Start developing your own robotic application with help of our tutorials: -_Coming soon_ - -{/* TODO: Add link to guides */} + ### Support diff --git a/docs/raph-rover/documentation/ros-api.mdx b/docs/raph-rover/documentation/ros-api.mdx index b367a769..2834981d 100644 --- a/docs/raph-rover/documentation/ros-api.mdx +++ b/docs/raph-rover/documentation/ros-api.mdx @@ -24,27 +24,28 @@ image: /img/robots/raph/raph-rover.webp Steers the robot when operating in the Ackermann steering mode. -- `controller/cmd_turn_in_place` (?) **Not implemented yet** +- `controller/cmd_led_panel` ([raph_interfaces/msg/LedStripState]) - Steers the robot when operating in the Turn-In-Place steering mode. + Sets a new user state for all the LEDs in the LED strip. -- `controller/cmd_vel` ([geometry_msgs/msg/Twist]) **Not implemented yet** +- `controller/cmd_led_strip` ([raph_interfaces/msg/LedPanelState]) - Steers the robot using `Twist` commands. More standardized way to control the - robot. \ - Useful for some navigation stacks. Not recommended for manual teleoperation. + Sets a new user state for all the LEDs in the specified LED panel. -- `controller/led_strip_state` ([raph_interfaces/msg/LedStripState]) +- `controller/cmd_turn_in_place` ([raph_interfaces/msg/TurnInPlaceDrive]) - Sets a new user state for all the LEDs in the LED strip. + Sets the target angular velocity when operating in the turn-in-place steering + mode. $[rad]$ -- `controller/led_panel_state` ([raph_interfaces/msg/LedPanelState]) +- `controller/cmd_vel` ([geometry_msgs/msg/Twist]) - Sets a new user state for all the LEDs in the specified LED panel. + Steers the robot using Twist commands. More standardized way to control the + robot. Useful for some navigation stacks. Not recommended for manual + teleoperation. Only uses `linear.x` and `angular.z` components. ## Published Topics -- `controller/imu` ([sensor_msgs/msg/Imu]) +- `controller/imu/data_raw` ([sensor_msgs/msg/Imu]) Current gyroscope and accelerometer readings from the onboard IMU. @@ -52,13 +53,6 @@ image: /img/robots/raph/raph-rover.webp Odometry calculated from wheel encoders and the onboard IMU. -- `controller/steering_mode` ([raph_interfaces/msg/SteeringMode]) **Not - implemented yet** - - Current steering mode. Published whenever the mode changes. \ - Uses transient local durability to deliver last message to joining - subscriptions. - - `controller/diagnostics/imu` ([raph_interfaces/msg/ImuDiagnostics]) Diagnostic information about the IMU module. \ @@ -69,10 +63,22 @@ image: /img/robots/raph/raph-rover.webp Diagnostic information about the motors. \ Includes motor temperatures, power consumption and fault data. +- `controller/drivetrain_state` ([raph_interfaces/msg/DrivetrainState]) + + Current drivetrain state, including steering mode and calibration status. + +- `controller/led_strip_state` ([raph_interfaces/msg/LedStripState]) + + Current state of the whole LED strip. + - `controller/power_system_state` ([raph_interfaces/msg/PowerSystemState]) Current information about the power system. +- `imu/data` ([sensor_msgs/msg/Imu]) + + Filtered IMU readings including orientation. + - `oak/imu/data` ([sensor_msgs/msg/Imu]) Current gyroscope and accelerometer readings from the Oak-D IMU. @@ -118,7 +124,7 @@ image: /img/robots/raph/raph-rover.webp Raw laser scans from the onboard 2D LIDAR. -- `rplidar/scan_filtered` ([sensor_msgs/msg/LaserScan]) **Not implemented yet** +- `rplidar/scan_filtered` ([sensor_msgs/msg/LaserScan]) Laser scans from the onboard 2D LIDAR with points lying on the robot's footprint filtered out. @@ -180,11 +186,11 @@ image: /img/robots/raph/raph-rover.webp Set the current steering mode. -- `system/shutdown` ([std_srvs/srv/Trigger]) +- `raph_system/shutdown` ([std_srvs/srv/Trigger]) Perform onboard computer's system shutdown. -- `system/reboot` ([std_srvs/srv/Trigger]) +- `raph_system/reboot` ([std_srvs/srv/Trigger]) Perform onboard computer's system reboot. @@ -192,6 +198,14 @@ image: /img/robots/raph/raph-rover.webp ### controller +- `drivetrain.ackermann_deceleration` (type: `float`, default: `5.0`) + + Deceleration to use when operating in ackermann mode. $[\frac{m}{s^2}]$ + +- `drivetrain.turn_in_place_deceleration` (type: `float`, default: `8.0`) + + Deceleration to use when operating in turn-in-place mode. $[\frac{rad}{s^2}]$ + - `drivetrain.wheel_base` (type: `float`, default: `0.382`) Distance between front and rear wheels. $[m]$ @@ -218,6 +232,16 @@ image: /img/robots/raph/raph-rover.webp Steering angle velocity to use when changing steering mode. $[\frac{rad}{s}]$ +- `drivetrain.cmd_vel_acceleration` (type: `float`, default: `2.0`) + + Acceleration to use when sending cmd_vel commands. + $[\frac{m}{s^2}, \frac{rad}{s^2}]$ + +- `drivetrain.cmd_vel_steering_angle_velocity` (type: `float`, default: `5.0`) + + Steering angle velocity to use when sending cmd_vel commands. + $[\frac{rad}{s}]$ + - `drivetrain.max_steering_angle` (type: `float`, default: `1.08`) Maximum steering angle used in ackermann mode. $[rad]$ @@ -240,6 +264,25 @@ image: /img/robots/raph/raph-rover.webp Offset added to the right servo position. $[rad]$ +- `drivetrain.servo_calibration_speed` (type: `float`, default: `2.0`) + + Speed of servos during calibration. $[\frac{rad}{s}]$ + +- `drivetrain.servo_calibration_timeout` (type: `float`, default: `6.0`) + + Timeout for servo calibration procedure. $[s]$ + +- `drivetrain.servo_calibration_torque_samples` (type: `int`, default: `5`) + + Number of consecutive servo torque readings over the threshold required during + calibration. + +- `drivetrain.servo_calibration_torque_threshold` (type: `float`, default: + `1.0`) + + Minimal motor torque that indicates the servo presses against the bumper + during calibration. $[Nm]$ + - `drivetrain.servo_calibration_power_threshold` (type: `float`, default: `20.0`) @@ -258,12 +301,45 @@ image: /img/robots/raph/raph-rover.webp Raw acceleration divider value passed to wheel motors. +- `power_manager.output_12v_enabled` (type: `bool`, default: `true`) + + Whether the 12V output is enabled. + +- `power_manager.output_5v_enabled` (type: `bool`, default: `true`) + + Whether the 5V output is enabled. + +- `power_manager.output_bat_enabled` (type: `bool`, default: `true`) + + Whether the BAT output is enabled. + {/* TODO: Other nodes parameters */} ## Custom message types {/* TODO: Remove this section after raph_interfaces message definitions start being hosted in docs.ros2.org */} +- `raph_interfaces/msg/DrivetrainState` + + + +```bash title="Represents the state of the drivetrain" + +# Current steering mode. +raph_interfaces/SteeringMode steering_mode + +uint8 OPERATING_STATE_DISABLED=0 +uint8 OPERATING_STATE_ENABLED=1 +uint8 OPERATING_STATE_CALIBRATING_SERVOS=2 +uint8 OPERATING_STATE_CHANGING_STEERING_MODE=3 + +# Current operating state. +uint8 operating_state + +# Indicates if the servos are calibrated. +bool is_servos_calibrated +``` + - `raph_interfaces/msg/SteeringMode` @@ -463,6 +539,16 @@ float64 gyro_bias_y float64 gyro_bias_z ``` +- `raph_interfaces/msg/TurnInPlaceDrive` + + + +```bash title="A command to turn the robot in place." + +float64 angular_velocity +float64 acceleration +``` + ## Custom service types {/* TODO: Remove this section after raph_interfaces message definitions start being hosted in docs.ros2.org */} @@ -531,6 +617,8 @@ string status_message https://docs.ros2.org/latest/api/tf2_msgs/msg/TFMessage.html [raph_interfaces/msg/BatteryMode]: #msg-battery-mode [raph_interfaces/msg/BatteryState]: #msg-battery-state +[raph_interfaces/msg/DrivetrainState]: #msg-drivetrain-state +[raph_interfaces/msg/TurnInPlaceDrive]: #msg-turn-in-place-drive [raph_interfaces/msg/Led]: #msg-led [raph_interfaces/msg/LedColor]: #msg-led-color [raph_interfaces/msg/LedState]: #msg-led-state diff --git a/docs/raph-rover/documentation/specification.mdx b/docs/raph-rover/documentation/specification.mdx index 4dc991e6..4e8f8d18 100644 --- a/docs/raph-rover/documentation/specification.mdx +++ b/docs/raph-rover/documentation/specification.mdx @@ -16,7 +16,6 @@ image: /img/robots/raph/raph-rover.webp toc_max_heading_level: 4 --- -import styles from './styles.module.css'; import useBaseUrl from '@docusaurus/useBaseUrl'; import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; import CenterContent from '@site/src/components/CenterContent'; @@ -211,6 +210,22 @@ as: +#### Wheels + +As standard, Raph Rover comes with rubber tires with a diameter of about 170 mm. +Whole drive assembly has following characteristics: + + + +| Parameter | Value | +| ---------------------------- | :----------------: | +| Tire size (diameter x width) | ~ 170 mm x ~ 80 mm | +| Tire lock type | Beadlock | +| Tire insert | Hard foam | +| Wheel rim diameter | 120 mm | + + + ### Components @@ -255,15 +270,16 @@ and has built-in protections. -| Parameter | Value | -| -------------------- | :---------------------------------------------------------------------: | -| Battery type | Li-Ion | -| Battery pack type | 6S1P | -| Cell type | Samsung 40T | -| Nominal voltage | 22.2 V | -| Capacity | 4000 mAh
88.8 Wh (flight safe) | -| Maximum output power | ~650 W | -| Safety systems | Overcurrent
Undercurrent
Reverse polarity
Overheat
| +| Parameter | Value | +| -------------------- | :---------------------------------------------------: | +| Battery type | Li-Ion | +| Battery pack type | 6S1P | +| Nominal voltage | 21.6 V | +| Charge capacity | 4 Ah | +| Energy capacity | $\approx$ 86.4 Wh | +| Maximum output power | $\approx$ 650 W | +| Battery cell | Samsung INR21700-40T | +| Safety systems | Overcurrent, Undervoltage, Reverse polarity, Overheat |
diff --git a/docs/raph-rover/documentation/styles.module.css b/docs/raph-rover/documentation/styles.module.css deleted file mode 100644 index cd83629b..00000000 --- a/docs/raph-rover/documentation/styles.module.css +++ /dev/null @@ -1,129 +0,0 @@ -.circle { - height: 50px; - width: 50px; - border-radius: 50%; - display: block; -} - -[data-theme='light'] .LEDcell { - background-color: var(--ifm-color-gray-200); -} - -#flashWhite { - animation: whiteAnimation 1.5s steps(1, end) infinite; -} - -#breathWhite { - animation: whiteAnimation 3s linear infinite; -} - -#white { - background: white; -} - -#flashBlue { - animation: blueAnimation 1.5s steps(1, end) infinite; -} - -#breathBlue { - animation: blueAnimation 3s linear infinite; -} - -#breathGreen { - animation: greenAnimation 3s linear infinite; -} - -#blue { - background: blue; -} - -#yellow { - background: yellow; -} - -#green { - background: limegreen; -} - -#flashRed { - animation: redAnimation 1.5s steps(1, end) infinite; -} - -#flashRed3 { - animation: red3flashAnimation 2s steps(1, start) infinite; -} - -@keyframes whiteAnimation { - 0% { - background: white; - } - 50% { - background: none; - } - 100% { - background: white; - } -} - -@keyframes blueAnimation { - 0% { - background: blue; - } - 50% { - background: none; - } - 100% { - background: blue; - } -} - -@keyframes greenAnimation { - 0% { - background: limegreen; - } - 50% { - background: none; - } - 100% { - background: limegreen; - } -} - -@keyframes redAnimation { - 0% { - background: red; - } - 50% { - background: none; - } - 100% { - background: red; - } -} - -@keyframes red3flashAnimation { - 0% { - background: none; - } - 10% { - background: red; - } - 20% { - background: none; - } - 40% { - background: red; - } - 50% { - background: none; - } - 60% { - background: red; - } - 70% { - background: none; - } - 100% { - background: none; - } -} diff --git a/docs/raph-rover/documentation/user-manual/_category_.json b/docs/raph-rover/documentation/user-manual/_category_.json new file mode 100644 index 00000000..0edd143b --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/_category_.json @@ -0,0 +1,10 @@ +{ + "label": "User Manual", + "position": 2, + "collapsible": true, + "collapsed": false, + "link": { + "type": "doc", + "id": "user-manual" + } +} diff --git a/docs/raph-rover/documentation/user-manual/control-and-basic-operations.mdx b/docs/raph-rover/documentation/user-manual/control-and-basic-operations.mdx new file mode 100644 index 00000000..8220b9a4 --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/control-and-basic-operations.mdx @@ -0,0 +1,150 @@ +--- +title: Control & Basic Operations +sidebar_label: Control & Basic Operations +sidebar_position: 2 +keywords: + - raph + - rover + - mobile + - robot + - control + - teleoperation +description: >- + Documentation on how to control and operate Raph Rover, including + teleoperation methods and basic functionalities. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 4 +--- + +import styles from './styles.module.css'; +import LinkButton from '@site/src/components/LinkButton'; +import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import CenterContent from '@site/src/components/CenterContent'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Control & Basic Operations + +## Connectivity & network configuration + +Raph Rover comes with a built-in router that provides a Wi-Fi access point and +can be configured to suit the user's needs. The router supports both 2.4 GHz and +5 GHz frequency bands, allowing for flexible connectivity options. Users can +connect their devices to the rover's Wi-Fi network to access the Web User +Interface and rover's onboard computer. + +To learn how to connect to the Raph Rover's Access Point, follow guide below: + + + +The table below describes rover's network configuration: + +| Device | IP Address or hostname | Description | +| ------------ | :------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Router | `10.0.0.1` | The built-in router providing Wi-Fi access point. | +| UPBoard 7000 | `10.0.0.2` or `raph.local` | The built-in computer running ROS and other software components. Uses mDNS for hostname resolution. | +| RaphCore | `raphcore.local` | The microcontroller managing low-level hardware functions. Its IP address is not consistent across the rovers but can be accessed via mDNS under the name `raphcore.local`. | + +You can learn how to connect the Raph Rover to an existing Wi-Fi network or +configure network settings in the guides below: + + + + +## Understanding LED panel signals + +Raph Rover indicates its current state using LED panels. Understanding these +signals can help users diagnose issues and monitor the rover's status. + +### Normal states + + + +| LED | LED description | State action | State description | Priority | +| :---------------------------------------------------------------: | -------------------------------------------------------------- | ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------: | +| | Green, constant - side panels | Battery state | Indicates current battery level. Can be activated by pressing the **POWER** button when the charger is disconnected. Each side panel corresponds to a battery on the robot. | 106 | +| | Color corresponding to battery charge, breathing - side panels | Battery charging | Indicates charging and shows current battery charge. When fully charged, the side panel light up green and stops breathing. Charging animation can be disabled by pressing **POWER** button when charging. | 51 | +| | Color corresponding to battery charge - side panels | Battery connected | Indicates that a battery was connected successfully. | 108 | +| | Red, fading out - side panels | Battery disconnected | Indicates that a battery was disconnected or is fully depleted. | 108 | +| | Blue segment going back and forth - side panels | Micro-ROS connection animation | Indicates that the robot is waiting for Micro-ROS connection. | 60 | +| | White, constant | Rover fully on | Shows that RaphCore is connected to onboard computer. When motors are relaxed a light blue segment will go around the whole LED strip. | 1 | +| | Yellow, constant | Maintenance mode | Indicates that maintenance mode is active. | 90 | +| | Blue, constant | Servo calibration | Indicates that servo calibration is in progress. | 90 | + + + +### Fault states + + + +| LED | LED description | State description | Priority | +| :---------------------------------------------------------------: | :------------------------: | :-----------------------: | :------: | +| | Red, flashing - rear panel | Servo calibration failure | 102 | +| | Red, flashing and fading | Motor communication error | 104 | +| | Red, flashing | Motor fault | 104 | + + + +Fault states also indicate which motor has failed by illuminating the segment of +the LED strip that corresponds to the position of the faulty motor, with side +panels indicating wheel motors, and rear panel indicating servos. + +## Controlling the Rover + +Raph Rover can be controlled in multiple ways, depending on the needs and +preferences. The available control methods are: + +- **Web User Interface** - From the Web UI Raph Rover can be controlled using a + gamepad, keyboard or virtual joystick. +- **ROS topics** - Users can publish commands to the appropriate ROS topics to + control the rover's movement programmatically. To see available topics refer + to the [ROS API documentation](./../ros-api.mdx). +- **Joystick teleoperation** - By using the `joy_teleop` node from + [raph_teleop](https://github.com/RaphRover/raph_common/tree/jazzy/raph_teleop) + ROS package, users can control the rover using a gamepad such as an Xbox or + PlayStation controller. + +Raph Rover supports multiple steering modes: + +- **Ackermann steering** - Suitable for smooth navigation in open areas. +- **Turn in place** - Ideal for tight spaces and precise maneuvers. + + + + + + + + + + +Steering modes can be switched via the Web UI or by calling the +`controller/set_steering_mode` [ROS service](./../ros-api.mdx#services). diff --git a/docs/raph-rover/documentation/user-manual/maintenance-and-troubleshooting.mdx b/docs/raph-rover/documentation/user-manual/maintenance-and-troubleshooting.mdx new file mode 100644 index 00000000..323d0307 --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/maintenance-and-troubleshooting.mdx @@ -0,0 +1,262 @@ +--- +title: Maintenance & Troubleshooting +sidebar_label: Maintenance & Troubleshooting +sidebar_position: 4 +keywords: + - raph + - rover + - mobile + - robot + - maintenance + - troubleshooting +description: >- + Documentation on maintenance procedures and troubleshooting tips for Raph + Rover. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 4 +--- + +import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import FlexTable from '@site/src/components/FlexTable'; +import FlexTableItem from '@site/src/components/FlexTableItem'; + +# Maintenance & Troubleshooting + +{/* In case of any issues with the Raph Rover, please refer to the **FAQ** and **Known Issues** pages on our docs. If issue isn't listed on these pages please [reach to us](#community--resources) for support. */} + +{/* TODO: Link to FAQ */} + +{/* TODO: Link to Known Issues */} + +## Power & Battery management + +### Power On and Shutdown procedures + +- **Power On**: Press and hold the **POWER** + button until the rear LED panel fill with green color. The system will proceed + through its startup sequence. +- **Shutdown**: Press and hold the **POWER** + button until the rear LED panel fill with red color. + +:::tip + +Power button is located on the **Left battery drawer** of the robot. + +::: + +### Battery Hot-Swap + +The dual-battery architecture of the Raph Rover facilitates hot-swapping, +enabling battery exchange without powering down the system. + +Before initiating a hot-swap, confirm the following conditions: + +- The Rover is stationary on a level surface. +- The battery not being replaced contains a sufficient charge level. + +:::tip + +A hot-swap can be performed without second battery while a charger is connected. +To prevent overloading the charger, ensure the motors are disabled beforehand. + +::: + +To execute the hot-swap procedure: + +1. Remove the depleted battery from its drawer. +2. Inspect the battery drawer for any debris and clear if necessary. +3. Insert a fully charged battery into the empty drawer. + +:::info + +The side LED panel provides status notifications for battery removal and +insertion. + +- **Removal:** Indicated by a red flash. +- **Insertion:** Indicated by the LEDs illuminating to the current charge + percentage of the new battery. + +::: + + + + + + + + + + +:::warning + +A battery must seat fully within the drawer. Insertion should require minimal +force, culminating in a tactile click. If resistance is encountered, abort the +insertion immediately and inspect the drawer for obstructions. + +::: + +### Battery Charging + +Raph Rover comes with a 4A 25,2V Li-Ion charger. It can charge 1 battery pack in +about 1.5 hour via the dedicated charging slot. + + + +The charger (not the dock) is outfitted with an LED status indicator designed to +provide information about the charging status of the battery. A green LED +signifies that the battery has reached full charge, while a red LED indicates +that the battery is currently in the charging process. + +As an alternative, the batteries can be charged via the rover, by leaving them +inside the battery ports and connecting the charger to the rover's power input +located at the back. + + + +This way it's possible to charge both batteries at the same time, however the +charging current will be split between them, resulting in a longer charging time +of around 2.5 h. The Rover does not need to be powered off for this process. + +:::note + +When working on the rover while charging the batteries the LED indicator of +power adapter will not transition to green when charging is complete. Power draw +of the rover will keep the charger in active (red) state. To check battery +level, press the power button once. Fully illuminated LED side panels will +indicate fully charged batteries. + +::: + +## Inspection & Cleaning + +Regular inspection and cleaning of the Raph Rover are essential to ensure +optimal performance and longevity. Follow these guidelines to maintain your +rover: + +### Internal inspection + +To inspect the internals of the rover it is required to dismantle payload and +electronics cover. + +Before dismantling ensure that the robot is: + +- stationary on a level surface, +- powered off and batteries removed. + +To open electronics bay: + +1. Unscrew the payload mounting plate (4 M5 round head screws). +2. Remove the payload mounting plate. +3. Unscrew 6 torx screws next to the side LED panels. +4. Remove top electronics cover (you can use handle next to the lidar). +5. Inspect for any loose cables water ingress etc. + +{/* TODO: drawing? */} + +### Electronics removal + +In case of serious problems or issues with the robot it is possible to remove +whole internal electronics from the robot. It can be helpful with debugging +problems. + +Before dismantling ensure that: + +- [top electronics cover is removed](#internal-inspection), +- robot is stationary on a level surface, +- robot is powered off and batteries removed. + +To remove internal electronics follow instructions below: + +1. Unplug all cables connected to the internal electronics + - Interface panel (Power, USB & Ethernet) + - All of the motors + - Batteries + - Lidar + - OAK-D camera + - Power & Control buttons + - Antennas +2. Unscrew the front cover (2 screws below the front LED panel) +3. Remove the front cover +4. Unplug the USB-C cable from the OAK-D camera +5. Rotate the robot upside down +6. Unscrew 8 torx screws on the bottom of the robot +7. Carefully remove the floor with electronics + +{/* #### Steering wheel's bumper inspection */} + +{/* #### Induction sensor adjustments */} + +{/* #### Battery port cleaning */} diff --git a/docs/raph-rover/documentation/user-manual/payload-integration-basics.mdx b/docs/raph-rover/documentation/user-manual/payload-integration-basics.mdx new file mode 100644 index 00000000..f33bbe16 --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/payload-integration-basics.mdx @@ -0,0 +1,118 @@ +--- +title: Payload Integration Basics +sidebar_label: Payload Integration Basics +sidebar_position: 3 +keywords: + - raph + - rover + - mobile + - robot + - payload + - mounting +description: >- + Documentation on the basics of integrating payloads with Raph Rover, including + mounting options and power considerations. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 4 +--- + +import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + +# Payload integration basics + +## Mechanical mountings + +Payloads can be attached to the top mounting plate using standard hardware. The +plate is designed for M5 bolts and nuts and features a regular 25 mm x 25 mm +hole grid (center-to-center spacing). This grid enables modular, repeatable +mounting of payloads and accessories while preserving precise alignment. + +Use appropriate washers and locking nuts (or thread-locking inserts) as required +for vibration resistance. Ensure bolt length provides sufficient thread +engagement without contacting internal components. + +If your payload uses a different hole layout, there are two common options: + +- Drill additional holes in the mounting plate to match your payload pattern. + When doing so, verify hole positions against internal components and use + appropriate corrosion-resistant fasteners. Avoid over-drilling and check that + new holes do not penetrate or interfere with wiring or structural elements. +- Design a 3D-printed adapter or sub-plate that mates to the 25 mm x 25 mm grid. + An adapter lets you retain the original plate integrity while providing a + custom hole pattern or mounting features for non-standard payloads. + +:::tip + +If you prefer not to drill the plate or design a custom adapter yourself, you +can order a complete sensor integration from us. We offer mechanical hardware +and software integration delivered as a ready-to-mount kit or installed by our +team — [contact us](#community--resources) for options and pricing. + +::: + +## Using external data & power ports + +The external data and power ports are located within the rear compartment. This +compartment must be opened for access. The interfaces are provided for +connecting external payloads or for establishing a direct connection to the +rover's internal network. + + + +:::warning + +The Raph Rover is not weatherproof while the rear compartment is open. To +maintain the IP54 protection rating with external connections, please use the +[external cable seals](#external-cable-seals). + +::: + +## External cable seals + +Both to keep the rover looking cleaner, and to provide extra weather and dust +resistance a system of seals around the rover can be used. They can ensure that +the whole robot with additional payload will maintain the IP54 protection +rating. + +To insert additional cables, it is necessary to dismantle the entire panel by +loosening two thumb screws. Once the panel is disassembled, individual seals can +be removed from the frame, allowing for the insertion of new cables. + + + +Raph Rover comes with different seals, labeled as QT [Z]/[X] or QT [X], where Z +indicates the number of holes in the seal and X indicates the diameter of the +cable in millimeters. + +:::tip + +Each cable seal has splitted holes, so it is possible to route pre-terminated +cables through them. + +::: diff --git a/docs/raph-rover/documentation/user-manual/software-and-hardware.mdx b/docs/raph-rover/documentation/user-manual/software-and-hardware.mdx new file mode 100644 index 00000000..e0f10cdb --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/software-and-hardware.mdx @@ -0,0 +1,211 @@ +--- +title: Software & Hardware +sidebar_label: Software & Hardware +sidebar_position: 1 +keywords: + - raph + - rover + - mobile + - robot + - software + - hardware +description: >- + Documentation providing an overview of Raph Rover's software and hardware + architectures and components. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 4 +--- + +import CenterContent from '@site/src/components/CenterContent'; +import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + +# Software and Hardware components + +## Hardware + +Raph Rover integrates several key hardware components that work together to +provide a robust mobile robotics platform: + + + +These components are integrated into a compact chassis designed for durability +and ease of maintenance. The modular architecture allows for customization while +maintaining core functionality. + +## Software + +### Overview + +Raph Rover's software heavily relies on The +[Robot Operating System (ROS)](https://ros.org/) which offers many +functionalities that are used on the robot, such as: + +- Abstraction layer facilitating communication between software components. +- Open-source software components, maintained by the community. +- A collection of standard message interfaces. +- Tools for introspection and debugging. + +The primary segment of the software stack is partitioned into several +[ROS Nodes](https://wiki.ros.org/Nodes), treated as computational units, each +performing one logical task. The nodes interact via: + +- **Topics** - Named buses enabling message exchange between nodes. They are + strongly typed and employ anonymous publish/subscribe semantics. +- **Services** - A client/server mechanism for remote procedure calls between + nodes. The service server accepts remote procedure requests identified by name + and type, which must be known to the service client beforehand. +- **Parameters** - Sets of key/value pairs maintained separately by each node, + utilized for node configuration during startup and runtime without + necessitating code modifications. +- **TF transforms** - A single transform describes the relationship between 2 + coordinate frames at a specific point in time. TF transforms are distributed + between nodes using topics, but, for the sake of clarity, we will refer to + them as separate entities. + +There are two important software components which don't run as native ROS nodes: + +- **Controller firmware** - The firmware itself acts as a ROS node but uses + eProsima's + [Micro XRCE-DDS](https://micro-xrce-dds.docs.eprosima.com/en/latest/) as its + middleware. Thus, it requires the presence of the Micro-ROS Agent on the + built-in computer to communicate with other ROS nodes. +- **Web User Interface** - The WebUI establishes a connection with the Rosbridge + Server via WebSocket transport layer and employs the rosbridge protocol for + communication with the ROS nodes. + + + +### ROS nodes + + + +| ROS Node | Description | +| --------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Controller Node | The node created by the Controller firmware. Provides access to functionalities via topics and services for the management of the drivetrain, LED panels, Power System and many more. Additionally, it publishes relevant information, including:
  • State of the dynamic joints controlled by RaphCore.
  • Data retrieved from the onboard IMU sensor.
  • Power system status, comprising:
    • Battery voltages.
    • Connection status of batteries or the charger to the robot.
    • Current output power.
  • Robot odometry.
  • Diagnostic information for various system components.
| +| Robot State Publisher | Parses the kinematic tree model of the robot in URDF format and broadcasts the robot state using TF transforms. Here's how it operates:
  • Fixed joints, like sensor positions, are published as static transforms.
  • Movable joints, such as wheel states, are published as dynamic transforms, based on the current joint states published by the Controller Node.
It also publishes the robot URDF description on a designated topic, making it easily accessible to other nodes. | +| Camera Driver | Publishes the data provided by the stereoscopic camera. In the default configuration used in Raph Rover, it includes:
  • Images captured by the left, right and RGB (center) camera image sensors.
  • Calibration parameters for each image sensor.
  • Data from the camera's IMU sensor.
  • Depth images computed from the left and right sensors.
| +| Depth Processing Node | Publishes point cloud data computed from depth images and camera calibration parameters.

Employs “lazy subscription”, meaning it doesn't process any depth images until at least one node subscribes to the Point Cloud topic. | +| LIDAR Driver | Publishes data from the LIDAR sensor.

Additionally, it provides services for starting and stopping the sensor's motor. | +| Raph System Node | Enables rebooting and shutting down the onboard computer via ROS services. | +| LIDAR Filter Node | Filters out LIDAR measurements from the robot's footprint. | +| ROS API Node | Exposes ROS API services so the Web UI can inspect the ROS graph, query/set parameters and call services. | +| Rosbridge Server | Provides a WebSocket interface for communication with the Web User Interface using the rosbridge protocol. | +| Web Video Server | Streams camera images to the Web User Interface. | + +
+ + + +Custom ROS nodes can be created to extend the robot's capabilities even further. +Raph Rover's software stack provides a robust framework for developing and +integrating custom software and other sensors. + +You can learn how to create custom ROS nodes for Raph Rover in the +[ROS Development Tutorial](./../../advanced-guides/ros-development.mdx). + +### Firmware + +Rover's low-level hardware functions are managed by the RaphCore +microcontroller, which runs an RTOS app (firmware) based on FreeRTOS. +Communication between the firmware and the onboard computer is facilitated by +Micro-ROS. + +The firmware is responsible for: + +- Controlling the wheel motors and servos. +- Managing the power system, including battery monitoring and charging. +- Operating the LED panels. +- Publishing data from the IMU sensor. +- Publishing odometry calculated from wheel encoders and IMU. + +The firmware also uses a custom bootloader which is responsible for loading and +updating the firmware itself. To learn how to flash or update the firmware, +follow the guide below: + +{/* TODO: Add firmware flashing tutorial */} +{/* */} + +#### LED controller + +The built-in LED controller manages the LED states of the robot by processing +user-defined states and animations, integrating them with ongoing animations, +and updating the LEDs accordingly. + +Users can control the LED strip by specifying colors, duration, and priority of +specific LEDs via ROS topics. + +The robot features four LED panels that combine to form one long LED strip. LEDs +can be controlled either as a full strip or as specific panels, providing +flexibility for different use cases: + +- **Full Strip Control**: Ideal for animations that span the entire perimeter of + the robot, such as police lights. Can be set using + [controller/led_strip_state](./../ros-api#msg-led-strip-state) ROS topic. +- **Panel-Specific Control**: Useful for targeted functions, such as using all + front LEDs as a floodlight or using side LEDs to indicate battery levels. Can + be set using [controller/led_panel_state](./../ros-api#msg-led-panel-state) + ROS topic. + +LED priorities range from 1 to 127. Higher priority animations override lower +priority ones. Sending an LED state with priority -1 will reset the LED (turn it +off). Messages with priority 0 are ignored. + +### Web User Interface + +Web User Interface can be used to control Raph Rover, view camera streams and +monitor robot status. It utilizes the rosbridge protocol to communicate with ROS +nodes on the robot via WebSocket. + + + +:::info + +To find out how to connect to and use the Web UI, visit the documentation page +below: + + + +::: diff --git a/docs/raph-rover/documentation/user-manual/styles.module.css b/docs/raph-rover/documentation/user-manual/styles.module.css new file mode 100644 index 00000000..a86cac80 --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/styles.module.css @@ -0,0 +1,236 @@ +.circle { + height: 50px; + width: 50px; + border-radius: 50%; + display: block; + margin: 0 auto; +} + +.bar { + height: 16px; + width: 120px; + border-radius: 8px; + display: block; + margin: 0 auto; +} + +[data-theme='light'] .LEDcell { + background-color: var(--ifm-color-gray-200); +} + +#flashWhite { + animation: whiteAnimation 1.5s steps(1, end) infinite; +} + +#breathWhite { + animation: whiteAnimation 3s linear infinite; +} + +#white { + background: white; +} + +#flashBlue { + animation: blueAnimation 1.5s steps(1, end) infinite; +} + +#breathBlue { + animation: blueAnimation 3s linear infinite; +} + +#breathGreen { + animation: greenAnimation 3s linear infinite; +} + +#blue { + background: blue; +} + +#yellow { + background: yellow; +} + +#green { + background: limegreen; +} + +#red { + background: red; +} + +#chargeCycle { + animation: chargeCycleAnimation 4s ease-in-out infinite; +} + +#batteryConnected { + position: relative; + overflow: hidden; + background: rgba(255, 255, 255, 0.12); +} + +#batteryConnected::after { + content: ''; + position: absolute; + inset: 0; + background: red; + transform: scaleX(0); + transform-origin: left center; + animation: batteryConnectedFill 2.5s ease-in-out infinite; +} + +#batteryDisconnected { + animation: batteryDisconnectedAnimation 2s ease-in-out infinite; +} + +#microRos { + background: linear-gradient( + 90deg, + rgba(10, 20, 40, 0.35) 0%, + rgba(10, 20, 40, 0.35) 37.5%, + #1d6fff 37.5%, + #1d6fff 62.5%, + rgba(10, 20, 40, 0.35) 62.5%, + rgba(10, 20, 40, 0.35) 100% + ); + background-size: 200% 100%; + animation: microRosAnimation 1.8s ease-in-out infinite alternate; +} + +#flashRed { + animation: redAnimation 1.5s steps(1, end) infinite; +} + +#flashRed3 { + animation: red3flashAnimation 2s steps(1, start) infinite; +} + +@keyframes whiteAnimation { + 0% { + background: white; + } + 50% { + background: none; + } + 100% { + background: white; + } +} + +@keyframes blueAnimation { + 0% { + background: blue; + } + 50% { + background: none; + } + 100% { + background: blue; + } +} + +@keyframes greenAnimation { + 0% { + background: limegreen; + } + 50% { + background: none; + } + 100% { + background: limegreen; + } +} + +@keyframes redAnimation { + 0% { + background: red; + } + 50% { + background: none; + } + 100% { + background: red; + } +} + +@keyframes chargeCycleAnimation { + 0% { + background: red; + } + 25% { + background: rgba(255, 0, 0, 0.15); + } + 50% { + background: #ffea00; + } + 75% { + background: rgba(50, 205, 50, 0.15); + } + 100% { + background: limegreen; + } +} + +@keyframes batteryConnectedFill { + 0% { + transform: scaleX(0); + background: red; + } + 50% { + background: #ff9900; + } + 100% { + transform: scaleX(1); + background: limegreen; + } +} + +@keyframes batteryDisconnectedAnimation { + 0% { + background: red; + } + 60% { + background: rgba(255, 0, 0, 0); + } + 99% { + background: rgba(255, 0, 0, 0); + } + 100% { + background: red; + } +} + +@keyframes microRosAnimation { + 0% { + background-position: 0% 50%; + } + 100% { + background-position: 100% 50%; + } +} + +@keyframes red3flashAnimation { + 0% { + background: none; + } + 10% { + background: red; + } + 20% { + background: none; + } + 40% { + background: red; + } + 50% { + background: none; + } + 60% { + background: red; + } + 70% { + background: none; + } + 100% { + background: none; + } +} diff --git a/docs/raph-rover/documentation/user-manual/user-manual.mdx b/docs/raph-rover/documentation/user-manual/user-manual.mdx new file mode 100644 index 00000000..68386d7d --- /dev/null +++ b/docs/raph-rover/documentation/user-manual/user-manual.mdx @@ -0,0 +1,230 @@ +--- +title: Raph Rover User Manual | Operation and Maintenance +sidebar_label: User manual +sidebar_position: 2 +keywords: + - Raph Rover + - ROS + - mobile robot + - operation + - payload integration + - maintenance +description: >- + A comprehensive user manual for the Raph Rover, detailing its hardware + architecture, ROS-based software stack, operational procedures, and + maintenance. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 3 +--- + +import DocCardList from '@theme/DocCardList'; +import styles from './styles.module.css'; +import useBaseUrl from '@docusaurus/useBaseUrl'; +import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; +import CenterContent from '@site/src/components/CenterContent'; +import LinkButton from '@site/src/components/LinkButton'; +import Support from '@site/docs/raph-rover/partial/_support.mdx'; +import FlexTable from '@site/src/components/FlexTable'; +import FlexTableItem from '@site/src/components/FlexTableItem'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + + +# Raph Rover User Manual + +--- + +:::note + +This manual assumes that you already unboxed and launched the Raph Rover for the +first time. If not, please complete the Getting started guide. + + + +::: + +## Introduction + +This user manual provides concise, practical guidance for safely operating, +maintaining, and troubleshooting the Raph Rover. Use this manual as the primary +reference for day-to-day use and field maintenance. For in-depth technical +details, refer to the [Raph Rover Specification](specification). + +If you seek more detailed information about using and developing your +applications on the Raph Rover, please refer to the other sections of our +documentation, including: + +- [Guides](../guides) +- [Advanced Guides](../advanced-guides) + +:::warning + +Always observe local safety regulations and the safety guidelines in this +document before operating the rover. + +::: + +:::tip + +If you are seeking more information about Raph Rover features check our Overview +page: + + + +::: + +## User manual contents + + + +## Safety guidelines + +:::danger + +Mobile robots like Raph Rover are inherently dangerous if not used properly. +Improper use can lead to injury, property damage, or environmental harm. Please +follow all safety guidelines and operational precautions outlined in this manual +to ensure safe and responsible use of the rover. + +::: + +### Intended use + +- Raph Rover is designed for outdoor and indoor use on light off-road terrain. + Avoid operating on steep slopes, loose gravel, mud, or waterlogged areas. +- Avoid using the rover in hazardous environments, such as areas with flammable + gases, explosive materials, or extreme temperatures. +- The rover is intended for research, education, and light payload transport. Do + not use it for heavy lifting, towing, or hazardous material transport. +- The rover is not a toy; keep it out of reach of children and pets. +- Always operate the rover within its specified weight limits and environmental + conditions. +- Do not modify the rover's hardware or software in ways not authorized by the + manufacturer, as this may compromise safety and void warranties. +- Always supervise the rover during operation to prevent accidents or damage. + +### Operational precautions + +- Before each use, inspect the rover for any signs of damage or wear, including + tires, sensors, and battery connections. +- Ensure the battery is sufficiently charged and securely connected before + operation. +- Operate the rover at safe speeds, especially in crowded or obstacle-rich + environments. +- Keep a safe distance from the rover while it is in motion to avoid injury. +- Be cautious when operating the rover on uneven terrain to prevent tipping or + loss of control. +- Avoid exposing the rover to extreme weather conditions, such as heavy rain, + snow, or low/high temperatures. +- Follow all local regulations regarding the use of autonomous or remotely + operated vehicles. + +### Mechanical safety + +- Do not place hands or objects near moving parts, such as wheels and motors, + while the rover is powered on. +- Use appropriate lifting techniques when handling the rover to avoid injury. +- Do not lift the rover by its sensors or other delicate components. +- Ensure all payloads are securely fastened to prevent shifting during movement. +- Ensure that any added payloads do not exceed the rover's weight capacity or + obstruct sensors. +- Ensure that mounted payloads do not interfere with the rover's balance or + center of gravity. + +### Electrical safety + +#### General precautions + +- Avoid exposing the rover's electrical components to water or moisture. +- Do not attempt to open or modify the rover's battery or electrical systems + without proper certification and training. +- Disconnect the battery before performing any maintenance or repairs on the + rover. +- Keep the rover's electrical components away from metal objects to prevent + short circuits. + +#### Battery handling + +- Use caution when handling the lithium-ion battery; avoid puncturing, crushing, + exposing it to fire or water. +- Charge the battery in a well-ventilated area away from flammable materials. +- Store the battery in a cool, dry place when not in use, away from direct + sunlight and heat sources. +- Store the battery at a partial charge (around 50%) for long-term storage to + prolong its lifespan. +- In case of battery damage or leakage, avoid contact with skin and dispose of + according to local regulations. +- Dispose of used batteries according to local regulations; do not incinerate or + throw them in regular trash. + +#### Charger and power supply + +- Only use the charger and power supply provided with the rover or approved + alternatives. +- Do not overcharge or leave the battery connected to the charger for extended + periods after reaching full charge. + +### Safety systems + +- The rover is equipped with a button to disable motors; familiarize yourself + with its location and operation. +- The rover will automatically stop if it loses connection with the remote + controller. +- The remote connection has a limited range; ensure you operate within this + range to maintain control. +- The remote connection has latency; be prepared for slight delays in response + time during operation. + +### Sensor & Radiation safety + +- The rover is equipped with class 1 LiDAR and cameras with IR illuminators that + are safe for general use. +- Avoid direct eye exposure to any active sensors, such as LiDAR or cameras, to + prevent potential eye damage. +- Rover emits safe levels of electromagnetic radiation on 2.4 GHz and 5 GHz + bands for Wi-Fi communication; potential interference with sensitive medical + devices is unlikely but possible. +- Configure the rover's router to use only allowed Wi-Fi channels and power + levels in your region to minimize interference with other devices. + +### Environmental considerations + +- Operate the rover in accordance with local environmental regulations. +- Rover has IP54 rating; it can handle limited dust ingress and water splashes + but should not be submerged. +- Do not operate the rover in environmentally sensitive areas without proper + authorization. +- Do not expose the rover to extreme temperatures, as this may affect + performance and safety. Raph Rover is designed to operate within a temperature + range of -10°C to 45°C (14°F to 113°F). +- The rover contains recyclable materials; dispose of components according to + local recycling regulations. + +:::note + +Some of the information in this manual may be subject to change as the rover is +updated; always refer to the latest version of the manual for current safety +guidelines. + +::: + +## Community & resources + + + + + diff --git a/docs/raph-rover/documentation/web-user-interface.mdx b/docs/raph-rover/documentation/web-user-interface.mdx new file mode 100644 index 00000000..2c54ffd9 --- /dev/null +++ b/docs/raph-rover/documentation/web-user-interface.mdx @@ -0,0 +1,141 @@ +--- +title: Raph Rover Web User Interface +sidebar_label: Web User Interface +sidebar_position: 5 +keywords: + - raph + - rover + - mobile + - robot + - user + - manual +description: >- + Documentation for Raph Rover's web user interface.. +image: /img/robots/raph/raph-rover.webp +toc_max_heading_level: 4 +--- + +import useBaseUrl from '@docusaurus/useBaseUrl'; +import Support from '@site/docs/raph-rover/partial/_support.mdx'; + + + +# Raph Rover Web User Interface + +Raph Rover UI is a responsive web-based user interface for controlling and +monitoring the Raph Rover robot. Built with **React** and **TypeScript**, it +provides real-time communication with **ROS** (Robot Operating System) through +rosbridge. + +## Main features + +- **Real-time camera feed**: View live video stream from the robot's camera. +- **Robot status monitoring**: Monitor battery level, connection status, imu + readings, and other vital statistics. +- **Teleoperation controls**: Control the robot's movement using on-screen + joystick or keyboard/gamepad inputs. +- **Responsive design**: Access the UI from various devices, including desktops, + tablets, and smartphones. + +## Quick start + +To access the Raph Rover Web UI, ensure your computer or mobile device is +connected to the same network as the Raph Rover. + +Open a web browser of your choice and navigate to: +[raph.local](http://raph.local) or [10.10.0.2](http://10.10.0.2). + +UI will automatically connect to the robot and display the camera preview as +well as navigation bar with robot status. + + + +After each reboot it is necessary to calibrate the servo motors. Toggle +hamburger menu in the top right corner and select **Calibrate wheels** option. + +To start driving with the robot you can use either the on-screen joystick or +keyboard/gamepad inputs. Toggle hamburger menu in the top right corner and +select **Enable driving** option as well as your control method of choice. + +:::tip + +You can use gamepads, including **Xbox** and **PlayStation** controllers, with +the Web UI. First, connect your gamepad to your device (via cable or Bluetooth). +The UI will confirm the connection with a "_Gamepad connected_" notification. +After enabling driving you will be able to start controlling the robot with the +gamepad. + +::: + +Your Raph Rover is now ready to be driven around! + +## Overview of the UI components + +### Main view + + + +- **Camera feed** (1): The main area displays the live video stream from the + robot's camera. +- **Status bar** (2): Located at the top, it shows vital statistics such as + battery level, connection status, and imu readings. +- **Steering mode switcher** (3): Toggle between different steering modes + (ackermann or turn in place) using the selection buttons in the top right + corner. +- **Hamburger menu** (4): Access additional options such as calibration, driving + enable/disable, and settings. + +### Hamburger menu view + + + +- **Enable driving** (1): Toggle robot driving controls on or off. +- **Calibrate wheels** (2): Calibrate the servo motors of the steering wheels. +- **Stream selection** (3): Choose between different camera streams from OAK-D + cameras. +- **Settings** (4): Access additional settings and configurations for the UI. +- **Fullscreen mode** (5): Enable or disable fullscreen mode. +- **Toggle controls button** (6): Enable or disable the on-screen joystick and + keyboard controls. +- **Service options** (7): Access additional services such as restarting or + shutting down the on-board computer. + +## Extending the UI + +Raph Rover Web UI is an open-source project available on +[GitHub](https://github.com/RaphRover/raph_ui). Feel free to explore the +codebase, contribute to the project, or customize the UI to suit your specific +needs. + +For detailed instructions on setting up the development environment, building, +and deploying the UI, please refer to the readme file in the GitHub repository. + + + +## Community & resources + + diff --git a/docs/raph-rover/index.mdx b/docs/raph-rover/index.mdx index f6cde0a0..bddca12e 100644 --- a/docs/raph-rover/index.mdx +++ b/docs/raph-rover/index.mdx @@ -1,13 +1,16 @@ --- -title: 'Raph Rover: Heavy-Duty Mobile Robotic Platform' +title: 'Raph Rover: Accelerate Robotics R&D with a ROS 2 Platform' sidebar_label: Overview keywords: - - raph - - rover - - overview + - Raph Rover + - ROS 2 + - mobile robot + - R&D platform + - autonomous navigation sidebar_position: 1 description: >- - A quick set of informations regarding Raph Rover + Raph Rover is a pre-configured ROS 2 R&D platform with integrated Lidar and 3D + camera, letting you focus on algorithms, not hardware setup. image: /img/robots/raph/raph-rover.webp hide_title: true --- diff --git a/docusaurus.config.ts b/docusaurus.config.ts index 6c92b665..8ecda010 100644 --- a/docusaurus.config.ts +++ b/docusaurus.config.ts @@ -225,21 +225,30 @@ const config: Config = { { title: 'Find us on:', items: [ + { + label: 'Facebook', + href: 'https://www.facebook.com/fictionlabpl', + className: 'footer-facebook-link', + }, { label: 'Youtube', href: 'https://www.youtube.com/@leorover6230', + className: 'footer-youtube-link', }, { label: 'Linkedin', - href: 'https://www.linkedin.com/company/leorover/', + href: 'https://www.linkedin.com/company/fictionlab1', + className: 'footer-linkedin-link', }, { - label: 'Online forum', - href: 'https://forum.fictionlab.pl/', + label: 'Github', + href: 'https://github.com/fictionlab', + className: 'footer-github-link', }, { - label: 'Facebook Community Group', - href: 'https://www.facebook.com/groups/leorover', + label: 'Discord Community', + href: 'https://discord.gg/57DdtCnhCc', + className: 'footer-discord-link', }, ], }, diff --git a/leo-rover_versioned_docs/version-1.8/addons/high-capacity-battery.mdx b/leo-rover_versioned_docs/version-1.8/addons/high-capacity-battery.mdx index ac02c2a2..aeaa0481 100644 --- a/leo-rover_versioned_docs/version-1.8/addons/high-capacity-battery.mdx +++ b/leo-rover_versioned_docs/version-1.8/addons/high-capacity-battery.mdx @@ -17,6 +17,7 @@ import Product from '@site/src/products/high-capacity-battery.mdx'; import useBaseUrl from '@docusaurus/useBaseUrl'; import ThemedImageZoom from '@site/src/components/ThemedImageZoom'; import FusionEmbed from '@site/src/components/FusionEmbed'; +import CenterContent from '@site/src/components/CenterContent'; + +| Parameter | Value | +| -------------------- | :---------------------------------------------------: | +| Battery type | Li-Ion | +| Battery pack type | 3S3P | +| Nominal voltage | 10.89 V | +| Charge capacity | 14.7 Ah | +| Energy capacity | $\approx$ 160.1 Wh | +| Maximum output power | $\approx$ 120 W | +| Battery cell | Samsung INR21700-50E | +| Safety systems | Overcurrent, Undervoltage, Reverse polarity, Overheat | + +
## Dimensions diff --git a/src/css/custom.css b/src/css/custom.css index b3fe9441..d2a83c2b 100644 --- a/src/css/custom.css +++ b/src/css/custom.css @@ -57,6 +57,45 @@ html[data-theme='dark'] { background-color: #242526; } +[class*='footer-'][class$='-link']::before { + content: ''; + width: 24px; + height: 24px; + display: inline-flex; + background-color: var(--ifm-navbar-link-color); + mask-size: contain; + mask-repeat: no-repeat; + mask-position: center; + vertical-align: middle; + transition: background-color var(--ifm-transition-fast) + var(--ifm-transition-timing-default); + margin-right: 0.4rem; +} + +[class*='footer-'][class$='-link']:hover::before { + background-color: var(--ifm-navbar-link-hover-color); +} + +.footer-facebook-link::before { + mask-image: url('/icons/facebook-icon.svg'); +} + +.footer-youtube-link::before { + mask-image: url('/icons/youtube-icon.svg'); +} + +.footer-linkedin-link::before { + mask-image: url('/icons/linkedin-icon.svg'); +} + +.footer-github-link::before { + mask-image: url('/icons/github-icon.svg'); +} + +.footer-discord-link::before { + mask-image: url('/icons/discord-icon.svg'); +} + div[class^='announcementBar_'] { font-size: 20px; font-weight: bold; diff --git a/static/_redirects b/static/_redirects index c94877a5..3822a213 100644 --- a/static/_redirects +++ b/static/_redirects @@ -48,8 +48,7 @@ # temp redirects for leo examples and avanced guides (REMOVE after adding new examples) -/leo-rover/leo-examples /leo-rover/1.8/leo-examples 302 -/leo-rover/leo-examples/* /leo-rover/1.8/leo-examples/:splat 302 +/leo-rover/leo-examples/follow-artag /leo-rover/leo-examples/follow-marker 302 /leo-rover/advanced-guides/ros-graph-in-rqt /leo-rover/1.8/advanced-guides/ros-graph-in-rqt 302 /leo-rover/advanced-guides/rviz /leo-rover/1.8/advanced-guides/rviz 302 /leo-rover/advanced-guides/joystick /leo-rover/1.8/advanced-guides/joystick 302 diff --git a/static/icons/discord-icon.svg b/static/icons/discord-icon.svg new file mode 100644 index 00000000..afade271 --- /dev/null +++ b/static/icons/discord-icon.svg @@ -0,0 +1,6 @@ + + + +discord + + \ No newline at end of file diff --git a/static/icons/facebook-icon.svg b/static/icons/facebook-icon.svg new file mode 100644 index 00000000..0ef37d6d --- /dev/null +++ b/static/icons/facebook-icon.svg @@ -0,0 +1,6 @@ + + + +facebook + + \ No newline at end of file diff --git a/static/icons/github-icon.svg b/static/icons/github-icon.svg new file mode 100644 index 00000000..3568b1c1 --- /dev/null +++ b/static/icons/github-icon.svg @@ -0,0 +1,6 @@ + + + +github + + \ No newline at end of file diff --git a/static/icons/linkedin-icon.svg b/static/icons/linkedin-icon.svg new file mode 100644 index 00000000..0579cb4d --- /dev/null +++ b/static/icons/linkedin-icon.svg @@ -0,0 +1,6 @@ + + + +linkedin + + \ No newline at end of file diff --git a/static/icons/youtube-icon.svg b/static/icons/youtube-icon.svg new file mode 100644 index 00000000..11e3a171 --- /dev/null +++ b/static/icons/youtube-icon.svg @@ -0,0 +1,6 @@ + + + +youtube + + \ No newline at end of file diff --git a/static/img/robots/leo/integrations/follow-aruco-marker/aruco-reconfigure.webp b/static/img/robots/leo/integrations/follow-aruco-marker/aruco-reconfigure.webp new file mode 100644 index 00000000..7af94df7 Binary files /dev/null and b/static/img/robots/leo/integrations/follow-aruco-marker/aruco-reconfigure.webp differ diff --git a/static/img/robots/leo/integrations/follow-aruco-marker/follow-aruco-1.webp b/static/img/robots/leo/integrations/follow-aruco-marker/follow-aruco-1.webp new file mode 100644 index 00000000..69a6961a Binary files /dev/null and b/static/img/robots/leo/integrations/follow-aruco-marker/follow-aruco-1.webp differ diff --git a/static/img/robots/leo/integrations/line-follower/line-follower-5.webp b/static/img/robots/leo/integrations/line-follower/line-follower-5.webp index 6fac65a7..adacd020 100644 Binary files a/static/img/robots/leo/integrations/line-follower/line-follower-5.webp and b/static/img/robots/leo/integrations/line-follower/line-follower-5.webp differ diff --git a/static/img/robots/raph/user-manual/raph_ui.webp b/static/img/robots/raph/user-manual/raph_ui.webp new file mode 100644 index 00000000..13caf4c0 Binary files /dev/null and b/static/img/robots/raph/user-manual/raph_ui.webp differ diff --git a/static/img/robots/raph/user-manual/raph_ui_main_view.webp b/static/img/robots/raph/user-manual/raph_ui_main_view.webp new file mode 100644 index 00000000..4adebd8e Binary files /dev/null and b/static/img/robots/raph/user-manual/raph_ui_main_view.webp differ diff --git a/static/img/robots/raph/user-manual/raph_ui_menu.webp b/static/img/robots/raph/user-manual/raph_ui_menu.webp new file mode 100644 index 00000000..df151da4 Binary files /dev/null and b/static/img/robots/raph/user-manual/raph_ui_menu.webp differ diff --git a/static/img/robots/raph/user-manual/raph_ui_showcase.webp b/static/img/robots/raph/user-manual/raph_ui_showcase.webp new file mode 100644 index 00000000..7c01ee75 Binary files /dev/null and b/static/img/robots/raph/user-manual/raph_ui_showcase.webp differ