From 796d0bddd993da7c6fcc1550b4e56105e4292bee Mon Sep 17 00:00:00 2001 From: Antonin RAFFIN Date: Sat, 15 Sep 2018 15:04:02 +0200 Subject: [PATCH 1/3] Add base doc files --- .gitignore | 2 + docs/Makefile | 20 ++++ docs/README.md | 25 +++++ docs/conf.py | 169 ++++++++++++++++++++++++++++++++++ docs/getting_started.rst | 29 ++++++ docs/index.rst | 47 ++++++++++ docs/make.bat | 36 ++++++++ docs/modules/localization.rst | 89 ++++++++++++++++++ docs/modules/mapping.rst | 43 +++++++++ docs/modules/slam.rst | 104 +++++++++++++++++++++ 10 files changed, 564 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/README.md create mode 100644 docs/conf.py create mode 100644 docs/getting_started.rst create mode 100644 docs/index.rst create mode 100644 docs/make.bat create mode 100644 docs/modules/localization.rst create mode 100644 docs/modules/mapping.rst create mode 100644 docs/modules/slam.rst diff --git a/.gitignore b/.gitignore index 28921ba1..9c4e9521 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ __pycache__/ *.py[cod] *$py.class +_build/ +.idea/ # C extensions *.so diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..9296811e --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = PythonRobotics +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..de982ed4 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,25 @@ +## Python Robotics Documentation + +This folder contains documentation for the Python Robotics project. + + +### Build the Documentation + +#### Install Sphinx and Theme + +``` +pip install sphinx sphinx-autobuild sphinx-rtd-theme +``` + +#### Building the Docs + +In the `docs/` folder: +``` +make html +``` + +if you want to building each time a file is changed: + +``` +sphinx-autobuild . _build/html +``` diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..20bcca12 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'PythonRobotics' +copyright = '2018, Atsushi Sakai' +author = 'Atsushi Sakai' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +# Fix for read the docs +on_rtd = os.environ.get('READTHEDOCS') == 'True' +if on_rtd: + html_theme = 'default' +else: + html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +html_logo = '../icon.png' +html_theme_options = { + 'display_version': False, +} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PythonRoboticsdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'PythonRobotics.tex', 'PythonRobotics Documentation', + 'Atsushi Sakai', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'pythonrobotics', 'PythonRobotics Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'PythonRobotics', 'PythonRobotics Documentation', + author, 'PythonRobotics', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Extension configuration ------------------------------------------------- diff --git a/docs/getting_started.rst b/docs/getting_started.rst new file mode 100644 index 00000000..153c3d9c --- /dev/null +++ b/docs/getting_started.rst @@ -0,0 +1,29 @@ +.. _getting_started: + +Getting Started +=============== + +Requirements +------------- + +- Python 3.6.x +- numpy +- scipy +- matplotlib +- pandas +- `cvxpy`_ + +.. _cvxpy: http://www.cvxpy.org/en/latest/ + + +How to use +---------- + +1. Install the required libraries. You can use environment.yml with + conda command. + +2. Clone this repo. + +3. Execute python script in each directory. + +4. Add star to this repo if you like it 😃. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..cd112c86 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,47 @@ +.. PythonRobotics documentation master file, created by + sphinx-quickstart on Sat Sep 15 13:15:55 2018. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PythonRobotics's documentation! +========================================== + +Python codes for robotics algorithm. + +This is a Python code collection of robotics algorithms, especially for autonomous navigation. + +Features: + +1. Easy to read for understanding each algorithm's basic idea. + +2. Widely used and practical algorithms are selected. + +3. Minimum dependency. + +See this paper for more details: + +- `[1808.10703] PythonRobotics: a Python code collection of robotics + algorithms`_ (`BibTeX`_) + +.. _`[1808.10703] PythonRobotics: a Python code collection of robotics algorithms`: https://arxiv.org/abs/1808.10703 +.. _BibTeX: https://github.com/AtsushiSakai/PythonRoboticsPaper/blob/master/python_robotics.bib + + + +.. toctree:: + :maxdepth: 2 + :caption: Guide + + getting_started + modules/localization + modules/mapping + modules/slam + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..6aab964d --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=PythonRobotics + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/docs/modules/localization.rst b/docs/modules/localization.rst new file mode 100644 index 00000000..a69ab51c --- /dev/null +++ b/docs/modules/localization.rst @@ -0,0 +1,89 @@ +.. _localization: + +Localization +============ + +Extended Kalman Filter localization +----------------------------------- + +.. raw:: html + + + +This is a sensor fusion localization with Extended Kalman Filter(EKF). + +The blue line is true trajectory, the black line is dead reckoning +trajectory, + +the green point is positioning observation (ex. GPS), and the red line +is estimated trajectory with EKF. + +The red ellipse is estimated covariance ellipse with EKF. + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +Unscented Kalman Filter localization +------------------------------------ + +|2| + +This is a sensor fusion localization with Unscented Kalman Filter(UKF). + +The lines and points are same meaning of the EKF simulation. + +Ref: + +- `Discriminatively Trained Unscented Kalman Filter for Mobile Robot + Localization`_ + +Particle filter localization +---------------------------- + +|3| + +This is a sensor fusion localization with Particle Filter(PF). + +The blue line is true trajectory, the black line is dead reckoning +trajectory, + +and the red line is estimated trajectory with PF. + +It is assumed that the robot can measure a distance from landmarks +(RFID). + +This measurements are used for PF localization. + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +Histogram filter localization +----------------------------- + +|4| + +This is a 2D localization example with Histogram filter. + +The red cross is true position, black points are RFID positions. + +The blue grid shows a position probability of histogram filter. + +In this simulation, x,y are unknown, yaw is known. + +The filter integrates speed input and range observations from RFID for +localization. + +Initial position is not needed. + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +.. _PROBABILISTIC ROBOTICS: http://www.probabilistic-robotics.org/ +.. _Discriminatively Trained Unscented Kalman Filter for Mobile Robot Localization: https://www.researchgate.net/publication/267963417_Discriminatively_Trained_Unscented_Kalman_Filter_for_Mobile_Robot_Localization + +.. |2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/unscented_kalman_filter/animation.gif +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/particle_filter/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/histogram_filter/animation.gif diff --git a/docs/modules/mapping.rst b/docs/modules/mapping.rst new file mode 100644 index 00000000..9da79fd6 --- /dev/null +++ b/docs/modules/mapping.rst @@ -0,0 +1,43 @@ +.. _mapping: + +Mapping +======= + +Gaussian grid map +----------------- + +This is a 2D Gaussian grid mapping example. + +|2| + +Ray casting grid map +-------------------- + +This is a 2D ray casting grid mapping example. + +|3| + +k-means object clustering +------------------------- + +This is a 2D object clustering with k-means algorithm. + +|4| + +Object shape recognition using circle fitting +--------------------------------------------- + +This is an object shape recognition using circle fitting. + +|5| + +The blue circle is the true object shape. + +The red crosses are observations from a ranging sensor. + +The red circle is the estimated object shape using circle fitting. + +.. |2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/gaussian_grid_map/animation.gif +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/raycasting_grid_map/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/kmeans_clustering/animation.gif +.. |5| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/circle_fitting/animation.gif diff --git a/docs/modules/slam.rst b/docs/modules/slam.rst new file mode 100644 index 00000000..13a122d8 --- /dev/null +++ b/docs/modules/slam.rst @@ -0,0 +1,104 @@ +.. _slam: + +SLAM +==== + +Simultaneous Localization and Mapping(SLAM) examples + +.. _iterative-closest-point-(icp)-matching: + +Iterative Closest Point (ICP) Matching +-------------------------------------- + +This is a 2D ICP matching example with singular value decomposition. + +It can calculate a rotation matrix and a translation vector between +points to points. + +|3| + +Ref: + +- `Introduction to Mobile Robotics: Iterative Closest Point Algorithm`_ + +EKF SLAM +-------- + +This is an Extended Kalman Filter based SLAM example. + +The blue line is ground truth, the black line is dead reckoning, the red +line is the estimated trajectory with EKF SLAM. + +The green crosses are estimated landmarks. + +|4| + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +FastSLAM 1.0 +------------ + +This is a feature based SLAM example using FastSLAM 1.0. + +The blue line is ground truth, the black line is dead reckoning, the red +line is the estimated trajectory with FastSLAM. + +The red points are particles of FastSLAM. + +Black points are landmarks, blue crosses are estimated landmark +positions by FastSLAM. + +|5| + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +- `SLAM simulations by Tim Bailey`_ + +FastSLAM 2.0 +------------ + +This is a feature based SLAM example using FastSLAM 2.0. + +The animation has the same meanings as one of FastSLAM 1.0. + +|6| + +Ref: + +- `PROBABILISTIC ROBOTICS`_ + +- `SLAM simulations by Tim Bailey`_ + +Graph based SLAM +---------------- + +This is a graph based SLAM example. + +The blue line is ground truth. + +The black line is dead reckoning. + +The red line is the estimated trajectory with Graph based SLAM. + +The black stars are landmarks for graph edge generation. + +|7| + +Ref: + +- `A Tutorial on Graph-Based SLAM`_ + +.. _`Introduction to Mobile Robotics: Iterative Closest Point Algorithm`: https://cs.gmu.edu/~kosecka/cs685/cs685-icp.pdf +.. _PROBABILISTIC ROBOTICS: http://www.probabilistic-robotics.org/ +.. _SLAM simulations by Tim Bailey: http://www-personal.acfr.usyd.edu.au/tbailey/software/slam_simulations.htm +.. _A Tutorial on Graph-Based SLAM: http://www2.informatik.uni-freiburg.de/~stachnis/pdf/grisetti10titsmag.pdf + +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/iterative_closest_point/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/EKFSLAM/animation.gif +.. |5| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/FastSLAM1/animation.gif +.. |6| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/FastSLAM2/animation.gif +.. |7| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/GraphBasedSLAM/animation.gif From 9ba2879540325e4018894d018903554183c92b5a Mon Sep 17 00:00:00 2001 From: Antonin RAFFIN Date: Sat, 15 Sep 2018 15:29:55 +0200 Subject: [PATCH 2/3] Finish doc --- docs/index.rst | 5 +- docs/modules/aerial_navigation.rst | 13 + docs/modules/arm_navigation.rst | 33 +++ docs/modules/path_planning.rst | 456 +++++++++++++++++++++++++++++ docs/modules/path_tracking.rst | 112 +++++++ 5 files changed, 618 insertions(+), 1 deletion(-) create mode 100644 docs/modules/aerial_navigation.rst create mode 100644 docs/modules/arm_navigation.rst create mode 100644 docs/modules/path_planning.rst create mode 100644 docs/modules/path_tracking.rst diff --git a/docs/index.rst b/docs/index.rst index cd112c86..bf775bf4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,7 +36,10 @@ See this paper for more details: modules/localization modules/mapping modules/slam - + modules/path_planning + modules/path_tracking + modules/arm_navigation + modules/aerial_navigation Indices and tables diff --git a/docs/modules/aerial_navigation.rst b/docs/modules/aerial_navigation.rst new file mode 100644 index 00000000..2f15289b --- /dev/null +++ b/docs/modules/aerial_navigation.rst @@ -0,0 +1,13 @@ +.. _aerial_navigation: + +Aerial Navigation +================= + +Drone 3d trajectory following +----------------------------- + +This is a 3d trajectory following simulation for a quadrotor. + +|3| + +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/AerialNavigation/drone_3d_trajectory_following/animation.gif diff --git a/docs/modules/arm_navigation.rst b/docs/modules/arm_navigation.rst new file mode 100644 index 00000000..a3e13dce --- /dev/null +++ b/docs/modules/arm_navigation.rst @@ -0,0 +1,33 @@ +.. _arm_navigation: + +Arm Navigation +============== + +Two joint arm to point control +------------------------------ + +Two joint arm to a point control simulation. + +This is a interactive simulation. + +You can set the goal position of the end effector with left-click on the +ploting area. + +|3| + +N joint arm to point control +---------------------------- + +N joint arm to a point control simulation. + +This is a interactive simulation. + +You can set the goal position of the end effector with left-click on the +ploting area. + +|4| + +In this simulation N = 10, however, you can change it. + +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/ArmNavigation/two_joint_arm_to_point_control/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/ArmNavigation/n_joint_arm_to_point_control/animation.gif diff --git a/docs/modules/path_planning.rst b/docs/modules/path_planning.rst new file mode 100644 index 00000000..dcec3e39 --- /dev/null +++ b/docs/modules/path_planning.rst @@ -0,0 +1,456 @@ +.. _path_planning: + +Path Planning +============= + +Dynamic Window Approach +----------------------- + +This is a 2D navigation sample code with Dynamic Window Approach. + +- `The Dynamic Window Approach to Collision + Avoidance `__ + +|DWA| + +Grid based search +----------------- + +Dijkstra algorithm +~~~~~~~~~~~~~~~~~~ + +This is a 2D grid based shortest path planning with Dijkstra's +algorithm. + +|Dijkstra| + +In the animation, cyan points are searched nodes. + +.. _a*-algorithm: + +A\* algorithm +~~~~~~~~~~~~~ + +This is a 2D grid based shortest path planning with A star algorithm. + +|astar| + +In the animation, cyan points are searched nodes. + +Its heuristic is 2D Euclid distance. + +Potential Field algorithm +~~~~~~~~~~~~~~~~~~~~~~~~~ + +This is a 2D grid based path planning with Potential Field algorithm. + +|PotentialField| + +In the animation, the blue heat map shows potential value on each grid. + +Ref: + +- `Robotic Motion Planning:Potential + Functions `__ + +Model Predictive Trajectory Generator +------------------------------------- + +This is a path optimization sample on model predictive trajectory +generator. + +This algorithm is used for state lattice planner. + +Path optimization sample +~~~~~~~~~~~~~~~~~~~~~~~~ + +|4| + +Lookup table generation sample +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|5| + +Ref: + +- `Optimal rough terrain trajectory generation for wheeled mobile + robots `__ + +State Lattice Planning +---------------------- + +This script is a path planning code with state lattice planning. + +This code uses the model predictive trajectory generator to solve +boundary problem. + +Ref: + +- `Optimal rough terrain trajectory generation for wheeled mobile + robots `__ + +- `State Space Sampling of Feasible Motions for High-Performance Mobile + Robot Navigation in Complex + Environments `__ + +Uniform polar sampling +~~~~~~~~~~~~~~~~~~~~~~ + +|6| + +Biased polar sampling +~~~~~~~~~~~~~~~~~~~~~ + +|7| + +Lane sampling +~~~~~~~~~~~~~ + +|8| + +.. _probabilistic-road-map-(prm)-planning: + +Probabilistic Road-Map (PRM) planning +------------------------------------- + +|PRM| + +This PRM planner uses Dijkstra method for graph search. + +In the animation, blue points are sampled points, + +Cyan crosses means searched points with Dijkstra method, + +The red line is the final path of PRM. + +Ref: + +- `Probabilistic roadmap - + Wikipedia `__ + +   + +Voronoi Road-Map planning +------------------------- + +|VRM| + +This Voronoi road-map planner uses Dijkstra method for graph search. + +In the animation, blue points are Voronoi points, + +Cyan crosses mean searched points with Dijkstra method, + +The red line is the final path of Vornoi Road-Map. + +Ref: + +- `Robotic Motion + Planning `__ + +.. _rapidly-exploring-random-trees-(rrt): + +Rapidly-Exploring Random Trees (RRT) +------------------------------------ + +Basic RRT +~~~~~~~~~ + +|9| + +This is a simple path planning code with Rapidly-Exploring Random Trees +(RRT) + +Black circles are obstacles, green line is a searched tree, red crosses +are start and goal positions. + +.. _rrt*: + +RRT\* +~~~~~ + +|10| + +This is a path planning code with RRT\* + +Black circles are obstacles, green line is a searched tree, red crosses +are start and goal positions. + +Ref: + +- `Incremental Sampling-based Algorithms for Optimal Motion + Planning `__ + +- `Sampling-based Algorithms for Optimal Motion + Planning `__ + +RRT with dubins path +~~~~~~~~~~~~~~~~~~~~ + +|PythonRobotics| + +Path planning for a car robot with RRT and dubins path planner. + +.. _rrt*-with-dubins-path: + +RRT\* with dubins path +~~~~~~~~~~~~~~~~~~~~~~ + +|AtsushiSakai/PythonRobotics| + +Path planning for a car robot with RRT\* and dubins path planner. + +.. _rrt*-with-reeds-sheep-path: + +RRT\* with reeds-sheep path +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +|11| + +Path planning for a car robot with RRT\* and reeds sheep path planner. + +.. _informed-rrt*: + +Informed RRT\* +~~~~~~~~~~~~~~ + +|irrt| + +This is a path planning code with Informed RRT*. + +The cyan ellipse is the heuristic sampling domain of Informed RRT*. + +Ref: + +- `Informed RRT\*: Optimal Sampling-based Path Planning Focused via + Direct Sampling of an Admissible Ellipsoidal + Heuristic `__ + +.. _batch-informed-rrt*: + +Batch Informed RRT\* +~~~~~~~~~~~~~~~~~~~~ + +|irrt2| + +This is a path planning code with Batch Informed RRT*. + +Ref: + +- `Batch Informed Trees (BIT*): Sampling-based Optimal Planning via the + Heuristically Guided Search of Implicit Random Geometric + Graphs `__ + +.. _closed-loop-rrt*: + +Closed Loop RRT\* +~~~~~~~~~~~~~~~~~ + +A vehicle model based path planning with closed loop RRT*. + +|CLRRT| + +In this code, pure-pursuit algorithm is used for steering control, + +PID is used for speed control. + +Ref: + +- `Motion Planning in Complex Environments using Closed-loop + Prediction `__ + +- `Real-time Motion Planning with Applications to Autonomous Urban + Driving `__ + +- `[1601.06326] Sampling-based Algorithms for Optimal Motion Planning + Using Closed-loop Prediction `__ + +.. _lqr-rrt*: + +LQR-RRT\* +~~~~~~~~~ + +This is a path planning simulation with LQR-RRT*. + +A double integrator motion model is used for LQR local planner. + +|LQRRRT| + +Ref: + +- `LQR-RRT\*: Optimal Sampling-Based Motion Planning with Automatically + Derived Extension + Heuristics `__ + +- `MahanFathi/LQR-RRTstar: LQR-RRT\* method is used for random motion + planning of a simple pendulum in its phase + plot `__ + +Cubic spline planning +--------------------- + +A sample code for cubic path planning. + +This code generates a curvature continuous path based on x-y waypoints +with cubic spline. + +Heading angle of each point can be also calculated analytically. + +|12| +|13| +|14| + +B-Spline planning +----------------- + +|B-Spline| + +This is a path planning with B-Spline curse. + +If you input waypoints, it generates a smooth path with B-Spline curve. + +The final course should be on the first and last waypoints. + +Ref: + +- `B-spline - Wikipedia `__ + +.. _eta^3-spline-path-planning: + +Eta^3 Spline path planning +-------------------------- + +|eta3| + +This is a path planning with Eta^3 spline. + +Ref: + +- `\\eta^3-Splines for the Smooth Path Generation of Wheeled Mobile + Robots `__ + +Bezier path planning +-------------------- + +A sample code of Bezier path planning. + +It is based on 4 control points Beier path. + +|Bezier1| + +If you change the offset distance from start and end point, + +You can get different Beizer course: + +|Bezier2| + +Ref: + +- `Continuous Curvature Path Generation Based on Bezier Curves for + Autonomous + Vehicles `__ + +Quintic polynomials planning +---------------------------- + +Motion planning with quintic polynomials. + +|2| + +It can calculate 2D path, velocity, and acceleration profile based on +quintic polynomials. + +Ref: + +- `Local Path Planning And Motion Control For Agv In + Positioning `__ + +Dubins path planning +-------------------- + +A sample code for Dubins path planning. + +|dubins| + +Ref: + +- `Dubins path - + Wikipedia `__ + +Reeds Shepp planning +-------------------- + +A sample code with Reeds Shepp path planning. + +|RSPlanning| + +Ref: + +- `15.3.2 Reeds-Shepp + Curves `__ + +- `optimal paths for a car that goes both forwards and + backwards `__ + +- `ghliu/pyReedsShepp: Implementation of Reeds Shepp + curve. `__ + +LQR based path planning +----------------------- + +A sample code using LQR based path planning for double integrator model. + +|RSPlanning2| + +Optimal Trajectory in a Frenet Frame +------------------------------------ + +|15| + +This is optimal trajectory generation in a Frenet Frame. + +The cyan line is the target course and black crosses are obstacles. + +The red line is predicted path. + +Ref: + +- `Optimal Trajectory Generation for Dynamic Street Scenarios in a + Frenet + Frame `__ + +- `Optimal trajectory generation for dynamic street scenarios in a + Frenet Frame `__ + +.. |DWA| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/DynamicWindowApproach/animation.gif +.. |Dijkstra| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/Dijkstra/animation.gif +.. |astar| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/AStar/animation.gif +.. |PotentialField| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/PotentialFieldPlanning/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ModelPredictiveTrajectoryGenerator/kn05animation.gif +.. |5| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ModelPredictiveTrajectoryGenerator/lookuptable.png?raw=True +.. |6| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/UniformPolarSampling.gif +.. |7| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/BiasedPolarSampling.gif +.. |8| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/LaneSampling.gif +.. |PRM| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ProbabilisticRoadMap/animation.gif +.. |VRM| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/VoronoiRoadMap/animation.gif +.. |9| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRT/animation.gif +.. |10| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTstar/animation.gif +.. |PythonRobotics| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTDubins/animation.gif +.. |AtsushiSakai/PythonRobotics| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTStarDubins/animation.gif +.. |11| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTStarReedsShepp/animation.gif +.. |irrt| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/InformedRRTStar/animation.gif +.. |irrt2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BatchInformedRRTStar/animation.gif +.. |CLRRT| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ClosedLoopRRTStar/animation.gif +.. |LQRRRT| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/LQRRRTStar/animation.gif +.. |12| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_1.png?raw=True +.. |13| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_2.png?raw=True +.. |14| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_3.png?raw=True +.. |B-Spline| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BSplinePath/Figure_1.png?raw=True +.. |eta3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/Eta3SplinePath/animation.gif?raw=True +.. |Bezier1| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BezierPath/Figure_1.png?raw=True +.. |Bezier2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BezierPath/Figure_2.png?raw=True +.. |2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/QuinticPolynomialsPlanner/animation.gif +.. |dubins| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/DubinsPath/animation.gif?raw=True +.. |RSPlanning| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ReedsSheppPath/animation.gif?raw=true +.. |RSPlanning2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/LQRPlanner/animation.gif?raw=true +.. |15| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/FrenetOptimalTrajectory/animation.gif diff --git a/docs/modules/path_tracking.rst b/docs/modules/path_tracking.rst new file mode 100644 index 00000000..24e6c9c5 --- /dev/null +++ b/docs/modules/path_tracking.rst @@ -0,0 +1,112 @@ +.. _path_tracking: + +Path tracking +============= + +move to a pose control +---------------------- + +This is a simulation of moving to a pose control + +|2| + +Ref: + +- `P. I. Corke, "Robotics, Vision and Control" \| SpringerLink + p102 `__ + +Pure pursuit tracking +--------------------- + +Path tracking simulation with pure pursuit steering control and PID +speed control. + +|3| + +The red line is a target course, the green cross means the target point +for pure pursuit control, the blue line is the tracking. + +Ref: + +- `A Survey of Motion Planning and Control Techniques for Self-driving + Urban Vehicles `__ + +Stanley control +--------------- + +Path tracking simulation with Stanley steering control and PID speed +control. + +|4| + +Ref: + +- `Stanley: The robot that won the DARPA grand + challenge `__ + +- `Automatic Steering Methods for Autonomous Automobile Path + Tracking `__ + +Rear wheel feedback control +--------------------------- + +Path tracking simulation with rear wheel feedback steering control and +PID speed control. + +|5| + +Ref: + +- `A Survey of Motion Planning and Control Techniques for Self-driving + Urban Vehicles `__ + +.. _linearquadratic-regulator-(lqr)-steering-control: + +Linear–quadratic regulator (LQR) steering control +------------------------------------------------- + +Path tracking simulation with LQR steering control and PID speed +control. + +|6| + +Ref: + +- `ApolloAuto/apollo: An open autonomous driving + platform `__ + +.. _linearquadratic-regulator-(lqr)-speed-and-steering-control: + +Linear–quadratic regulator (LQR) speed and steering control +----------------------------------------------------------- + +Path tracking simulation with LQR speed and steering control. + +|7| + +Ref: + +- `Towards fully autonomous driving: Systems and algorithms - IEEE + Conference + Publication `__ + +Model predictive speed and steering control +------------------------------------------- + +Path tracking simulation with iterative linear model predictive speed +and steering control. + +.. raw:: html + + + +Ref: + +- `notebook `__ + +.. |2| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/move_to_pose/animation.gif +.. |3| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/pure_pursuit/animation.gif +.. |4| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/stanley_controller/animation.gif +.. |5| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/rear_wheel_feedback/animation.gif +.. |6| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/lqr_steer_control/animation.gif +.. |7| image:: https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/lqr_speed_steer_control/animation.gif From 5f8a2edce22f02e0a786bd0fa79c9082a3cd2d91 Mon Sep 17 00:00:00 2001 From: Antonin RAFFIN Date: Sat, 15 Sep 2018 15:33:13 +0200 Subject: [PATCH 3/3] Clean up README --- README.md | 734 +----------------------------------------------------- 1 file changed, 8 insertions(+), 726 deletions(-) diff --git a/README.md b/README.md index c942dc1b..0642ee73 100644 --- a/README.md +++ b/README.md @@ -10,69 +10,7 @@ Python codes for robotics algorithm. * [What is this?](#what-is-this) * [Requirements](#requirements) * [How to use](#how-to-use) - * [Localization](#localization) - * [Extended Kalman Filter localization](#extended-kalman-filter-localization) - * [Unscented Kalman Filter localization](#unscented-kalman-filter-localization) - * [Particle filter localization](#particle-filter-localization) - * [Histogram filter localization](#histogram-filter-localization) - * [Mapping](#mapping) - * [Gaussian grid map](#gaussian-grid-map) - * [Ray casting grid map](#ray-casting-grid-map) - * [k-means object clustering](#k-means-object-clustering) - * [Object shape recognition using circle fitting](#object-shape-recognition-using-circle-fitting) - * [SLAM](#slam) - * [Iterative Closest Point (ICP) Matching](#iterative-closest-point-icp-matching) - * [EKF SLAM](#ekf-slam) - * [FastSLAM 1.0](#fastslam-10) - * [FastSLAM 2.0](#fastslam-20) - * [Graph based SLAM](#graph-based-slam) - * [Path Planning](#path-planning) - * [Dynamic Window Approach](#dynamic-window-approach) - * [Grid based search](#grid-based-search) - * [Dijkstra algorithm](#dijkstra-algorithm) - * [A* algorithm](#a-algorithm) - * [Potential Field algorithm](#potential-field-algorithm) - * [Model Predictive Trajectory Generator](#model-predictive-trajectory-generator) - * [Path optimization sample](#path-optimization-sample) - * [Lookup table generation sample](#lookup-table-generation-sample) - * [State Lattice Planning](#state-lattice-planning) - * [Uniform polar sampling](#uniform-polar-sampling) - * [Biased polar sampling](#biased-polar-sampling) - * [Lane sampling](#lane-sampling) - * [Probabilistic Road-Map (PRM) planning](#probabilistic-road-map-prm-planning) - * [Voronoi Road-Map planning](#voronoi-road-map-planning) - * [Rapidly-Exploring Random Trees (RRT)](#rapidly-exploring-random-trees-rrt) - * [Basic RRT](#basic-rrt) - * [RRT*](#rrt) - * [RRT with dubins path](#rrt-with-dubins-path) - * [RRT* with dubins path](#rrt-with-dubins-path-1) - * [RRT* with reeds-sheep path](#rrt-with-reeds-sheep-path) - * [Informed RRT*](#informed-rrt) - * [Batch Informed RRT*](#batch-informed-rrt) - * [Closed Loop RRT*](#closed-loop-rrt) - * [LQR-RRT*](#lqr-rrt) - * [Cubic spline planning](#cubic-spline-planning) - * [B-Spline planning](#b-spline-planning) - * [Eta^3 Spline path planning](#eta3-spline-path-planning) - * [Bezier path planning](#bezier-path-planning) - * [Quintic polynomials planning](#quintic-polynomials-planning) - * [Dubins path planning](#dubins-path-planning) - * [Reeds Shepp planning](#reeds-shepp-planning) - * [LQR based path planning](#lqr-based-path-planning) - * [Optimal Trajectory in a Frenet Frame](#optimal-trajectory-in-a-frenet-frame) - * [Path tracking](#path-tracking) - * [move to a pose control](#move-to-a-pose-control) - * [Pure pursuit tracking](#pure-pursuit-tracking) - * [Stanley control](#stanley-control) - * [Rear wheel feedback control](#rear-wheel-feedback-control) - * [Linear–quadratic regulator (LQR) steering control](#linearquadratic-regulator-lqr-steering-control) - * [Linear–quadratic regulator (LQR) speed and steering control](#linearquadratic-regulator-lqr-speed-and-steering-control) - * [Model predictive speed and steering control](#model-predictive-speed-and-steering-control) - * [Arm Navigation](#arm-navigation) - * [Two joint arm to point control](#two-joint-arm-to-point-control) - * [N joint arm to point control](#n-joint-arm-to-point-control) - * [Aerial Navigation](#aerial-navigation) - * [drone 3d trajectory following](#drone-3d-trajectory-following) + * [Documentation](#documentation) * [License](#license) * [Contribution](#contribution) * [Support](#support) @@ -94,20 +32,18 @@ See this paper for more details: - [\[1808\.10703\] PythonRobotics: a Python code collection of robotics algorithms](https://arxiv.org/abs/1808.10703) ([BibTeX](https://github.com/AtsushiSakai/PythonRoboticsPaper/blob/master/python_robotics.bib)) +# Documentation + +Documentation is available online: [https://pythonrobotics.readthedocs.io/](https://pythonrobotics.readthedocs.io/) # Requirements - Python 3.6.x - - numpy - - scipy - - matplotlib - - pandas - -- [cvxpy](http://www.cvxpy.org/en/latest/) +- [cvxpy](http://www.cvxpy.org/en/latest/) # How to use @@ -117,661 +53,10 @@ See this paper for more details: 3. Execute python script in each directory. -4. Add star to this repo if you like it :smiley:. +4. Add star to this repo if you like it :smiley:. -# Localization -## Extended Kalman Filter localization - - - -This is a sensor fusion localization with Extended Kalman Filter(EKF). - -The blue line is true trajectory, the black line is dead reckoning trajectory, - -the green point is positioning observation (ex. GPS), and the red line is estimated trajectory with EKF. - -The red ellipse is estimated covariance ellipse with EKF. - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - -## Unscented Kalman Filter localization - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/unscented_kalman_filter/animation.gif) - -This is a sensor fusion localization with Unscented Kalman Filter(UKF). - -The lines and points are same meaning of the EKF simulation. - -Ref: - -- [Discriminatively Trained Unscented Kalman Filter for Mobile Robot Localization](https://www.researchgate.net/publication/267963417_Discriminatively_Trained_Unscented_Kalman_Filter_for_Mobile_Robot_Localization) - - -## Particle filter localization - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/particle_filter/animation.gif) - -This is a sensor fusion localization with Particle Filter(PF). - -The blue line is true trajectory, the black line is dead reckoning trajectory, - -and the red line is estimated trajectory with PF. - -It is assumed that the robot can measure a distance from landmarks (RFID). - -This measurements are used for PF localization. - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - - -## Histogram filter localization - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Localization/histogram_filter/animation.gif) - -This is a 2D localization example with Histogram filter. - -The red cross is true position, black points are RFID positions. - -The blue grid shows a position probability of histogram filter. - -In this simulation, x,y are unknown, yaw is known. - -The filter integrates speed input and range observations from RFID for localization. - -Initial position is not needed. - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - -# Mapping - -## Gaussian grid map - -This is a 2D Gaussian grid mapping example. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/gaussian_grid_map/animation.gif) - -## Ray casting grid map - -This is a 2D ray casting grid mapping example. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/raycasting_grid_map/animation.gif) - -## k-means object clustering - -This is a 2D object clustering with k-means algorithm. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/kmeans_clustering/animation.gif) - -## Object shape recognition using circle fitting - -This is an object shape recognition using circle fitting. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/Mapping/circle_fitting/animation.gif) - -The blue circle is the true object shape. - -The red crosses are observations from a ranging sensor. - -The red circle is the estimated object shape using circle fitting. - -# SLAM - -Simultaneous Localization and Mapping(SLAM) examples - -## Iterative Closest Point (ICP) Matching - -This is a 2D ICP matching example with singular value decomposition. - -It can calculate a rotation matrix and a translation vector between points to points. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/iterative_closest_point/animation.gif) - -Ref: - -- [Introduction to Mobile Robotics: Iterative Closest Point Algorithm](https://cs.gmu.edu/~kosecka/cs685/cs685-icp.pdf) - - -## EKF SLAM - -This is an Extended Kalman Filter based SLAM example. - -The blue line is ground truth, the black line is dead reckoning, the red line is the estimated trajectory with EKF SLAM. - -The green crosses are estimated landmarks. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/EKFSLAM/animation.gif) - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - -## FastSLAM 1.0 - -This is a feature based SLAM example using FastSLAM 1.0. - -The blue line is ground truth, the black line is dead reckoning, the red line is the estimated trajectory with FastSLAM. - -The red points are particles of FastSLAM. - -Black points are landmarks, blue crosses are estimated landmark positions by FastSLAM. - - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/FastSLAM1/animation.gif) - - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - -- [SLAM simulations by Tim Bailey](http://www-personal.acfr.usyd.edu.au/tbailey/software/slam_simulations.htm) - -## FastSLAM 2.0 - -This is a feature based SLAM example using FastSLAM 2.0. - -The animation has the same meanings as one of FastSLAM 1.0. - - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/FastSLAM2/animation.gif) - - -Ref: - -- [PROBABILISTIC ROBOTICS](http://www.probabilistic-robotics.org/) - -- [SLAM simulations by Tim Bailey](http://www-personal.acfr.usyd.edu.au/tbailey/software/slam_simulations.htm) - - -## Graph based SLAM - -This is a graph based SLAM example. - -The blue line is ground truth. - -The black line is dead reckoning. - -The red line is the estimated trajectory with Graph based SLAM. - -The black stars are landmarks for graph edge generation. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/SLAM/GraphBasedSLAM/animation.gif) - -Ref: - -- [A Tutorial on Graph-Based SLAM](http://www2.informatik.uni-freiburg.de/~stachnis/pdf/grisetti10titsmag.pdf) - - -# Path Planning - -## Dynamic Window Approach - -This is a 2D navigation sample code with Dynamic Window Approach. - -- [The Dynamic Window Approach to Collision Avoidance](https://www.ri.cmu.edu/pub_files/pub1/fox_dieter_1997_1/fox_dieter_1997_1.pdf) - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/DynamicWindowApproach/animation.gif) - - -## Grid based search - -### Dijkstra algorithm - -This is a 2D grid based shortest path planning with Dijkstra's algorithm. - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/Dijkstra/animation.gif) - -In the animation, cyan points are searched nodes. - -### A\* algorithm - -This is a 2D grid based shortest path planning with A star algorithm. - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/AStar/animation.gif) - -In the animation, cyan points are searched nodes. - -Its heuristic is 2D Euclid distance. - -### Potential Field algorithm - -This is a 2D grid based path planning with Potential Field algorithm. - -![PotentialField](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/PotentialFieldPlanning/animation.gif) - -In the animation, the blue heat map shows potential value on each grid. - -Ref: - -- [Robotic Motion Planning:Potential Functions](https://www.cs.cmu.edu/~motionplanning/lecture/Chap4-Potential-Field_howie.pdf) - - -## Model Predictive Trajectory Generator - -This is a path optimization sample on model predictive trajectory generator. - -This algorithm is used for state lattice planner. - -### Path optimization sample - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ModelPredictiveTrajectoryGenerator/kn05animation.gif) - -### Lookup table generation sample - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ModelPredictiveTrajectoryGenerator/lookuptable.png?raw=True) - -Ref: - -- [Optimal rough terrain trajectory generation for wheeled mobile robots](http://journals.sagepub.com/doi/pdf/10.1177/0278364906075328) - - -## State Lattice Planning - -This script is a path planning code with state lattice planning. - -This code uses the model predictive trajectory generator to solve boundary problem. - -Ref: - -- [Optimal rough terrain trajectory generation for wheeled mobile robots](http://journals.sagepub.com/doi/pdf/10.1177/0278364906075328) - -- [State Space Sampling of Feasible Motions for High-Performance Mobile Robot Navigation in Complex Environments](http://www.frc.ri.cmu.edu/~alonzo/pubs/papers/JFR_08_SS_Sampling.pdf) - - -### Uniform polar sampling - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/UniformPolarSampling.gif) - - -### Biased polar sampling - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/BiasedPolarSampling.gif) - - -### Lane sampling - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/StateLatticePlanner/LaneSampling.gif) - -## Probabilistic Road-Map (PRM) planning - -![PRM](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ProbabilisticRoadMap/animation.gif) - -This PRM planner uses Dijkstra method for graph search. - -In the animation, blue points are sampled points, - -Cyan crosses means searched points with Dijkstra method, - -The red line is the final path of PRM. - -Ref: - -- [Probabilistic roadmap \- Wikipedia](https://en.wikipedia.org/wiki/Probabilistic_roadmap) - -   -## Voronoi Road-Map planning - -![VRM](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/VoronoiRoadMap/animation.gif) - -This Voronoi road-map planner uses Dijkstra method for graph search. - -In the animation, blue points are Voronoi points, - -Cyan crosses mean searched points with Dijkstra method, - -The red line is the final path of Vornoi Road-Map. - -Ref: - -- [Robotic Motion Planning](https://www.cs.cmu.edu/~motionplanning/lecture/Chap5-RoadMap-Methods_howie.pdf) - - -## Rapidly-Exploring Random Trees (RRT) - -### Basic RRT - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRT/animation.gif) - -This is a simple path planning code with Rapidly-Exploring Random Trees (RRT) - -Black circles are obstacles, green line is a searched tree, red crosses are start and goal positions. - -### RRT\* - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTstar/animation.gif) - -This is a path planning code with RRT\* - -Black circles are obstacles, green line is a searched tree, red crosses are start and goal positions. - -Ref: - -- [Incremental Sampling-based Algorithms for Optimal Motion Planning](https://arxiv.org/abs/1005.0416) - -- [Sampling-based Algorithms for Optimal Motion Planning](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.419.5503&rep=rep1&type=pdf) - - -### RRT with dubins path - -![PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTDubins/animation.gif) - -Path planning for a car robot with RRT and dubins path planner. - - -### RRT\* with dubins path - -![AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTStarDubins/animation.gif) - -Path planning for a car robot with RRT\* and dubins path planner. - - -### RRT\* with reeds-sheep path - -![Robotics/animation.gif at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/RRTStarReedsShepp/animation.gif)) - -Path planning for a car robot with RRT\* and reeds sheep path planner. - -### Informed RRT\* - -![irrt](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/InformedRRTStar/animation.gif)) - -This is a path planning code with Informed RRT\*. - -The cyan ellipse is the heuristic sampling domain of Informed RRT\*. - -Ref: - -- [Informed RRT\*: Optimal Sampling-based Path Planning Focused via Direct Sampling of an Admissible Ellipsoidal Heuristic](https://arxiv.org/pdf/1404.2334.pdf) - -### Batch Informed RRT\* - -![irrt](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BatchInformedRRTStar/animation.gif)) - -This is a path planning code with Batch Informed RRT\*. - -Ref: - -- [Batch Informed Trees \(BIT\*\): Sampling\-based Optimal Planning via the Heuristically Guided Search of Implicit Random Geometric Graphs](https://arxiv.org/abs/1405.5848) - -### Closed Loop RRT\* - -A vehicle model based path planning with closed loop RRT\*. - -![CLRRT](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ClosedLoopRRTStar/animation.gif) - -In this code, pure-pursuit algorithm is used for steering control, - -PID is used for speed control. - -Ref: - -- [Motion Planning in Complex Environments -using Closed-loop Prediction](http://acl.mit.edu/papers/KuwataGNC08.pdf) - -- [Real-time Motion Planning with Applications to -Autonomous Urban Driving](http://acl.mit.edu/papers/KuwataTCST09.pdf) - -- [[1601.06326] Sampling-based Algorithms for Optimal Motion Planning Using Closed-loop Prediction](https://arxiv.org/abs/1601.06326) - -### LQR-RRT\* - -This is a path planning simulation with LQR-RRT\*. - -A double integrator motion model is used for LQR local planner. - -![LQRRRT](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/LQRRRTStar/animation.gif) - -Ref: - -- [LQR\-RRT\*: Optimal Sampling\-Based Motion Planning with Automatically Derived Extension Heuristics](http://lis.csail.mit.edu/pubs/perez-icra12.pdf) - -- [MahanFathi/LQR\-RRTstar: LQR\-RRT\* method is used for random motion planning of a simple pendulum in its phase plot](https://github.com/MahanFathi/LQR-RRTstar) - - -## Cubic spline planning - -A sample code for cubic path planning. - -This code generates a curvature continuous path based on x-y waypoints with cubic spline. - -Heading angle of each point can be also calculated analytically. - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_1.png?raw=True) -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_2.png?raw=True) -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/CubicSpline/Figure_3.png?raw=True) - - -## B-Spline planning - -![B-Spline](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BSplinePath/Figure_1.png?raw=True) - -This is a path planning with B-Spline curse. - -If you input waypoints, it generates a smooth path with B-Spline curve. - -The final course should be on the first and last waypoints. - -Ref: - -- [B\-spline \- Wikipedia](https://en.wikipedia.org/wiki/B-spline) - -## Eta^3 Spline path planning - -![eta3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/Eta3SplinePath/animation.gif?raw=True) - -This is a path planning with Eta^3 spline. - -Ref: - -- [\eta^3-Splines for the Smooth Path Generation of Wheeled Mobile Robots](https://ieeexplore.ieee.org/document/4339545/) - - - -## Bezier path planning - -A sample code of Bezier path planning. - -It is based on 4 control points Beier path. - -![Bezier1](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BezierPath/Figure_1.png?raw=True) - -If you change the offset distance from start and end point, - -You can get different Beizer course: - -![Bezier2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/BezierPath/Figure_2.png?raw=True) - - -Ref: - -- [Continuous Curvature Path Generation Based on Bezier Curves for Autonomous Vehicles](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.294.6438&rep=rep1&type=pdf) - -## Quintic polynomials planning - -Motion planning with quintic polynomials. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/QuinticPolynomialsPlanner/animation.gif) - -It can calculate 2D path, velocity, and acceleration profile based on quintic polynomials. - -Ref: - -- [Local Path Planning And Motion Control For Agv In Positioning](http://ieeexplore.ieee.org/document/637936/) - - -## Dubins path planning - -A sample code for Dubins path planning. - -![dubins](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/DubinsPath/animation.gif?raw=True) - -Ref: - -- [Dubins path - Wikipedia](https://en.wikipedia.org/wiki/Dubins_path) - -## Reeds Shepp planning - -A sample code with Reeds Shepp path planning. - -![RSPlanning](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/ReedsSheppPath/animation.gif?raw=true) - -Ref: - -- [15.3.2 Reeds\-Shepp Curves](http://planning.cs.uiuc.edu/node822.html) - -- [optimal paths for a car that goes both forwards and backwards](https://pdfs.semanticscholar.org/932e/c495b1d0018fd59dee12a0bf74434fac7af4.pdf) - -- [ghliu/pyReedsShepp: Implementation of Reeds Shepp curve\.](https://github.com/ghliu/pyReedsShepp) - - -## LQR based path planning - -A sample code using LQR based path planning for double integrator model. - -![RSPlanning](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/LQRPlanner/animation.gif?raw=true) - - -## Optimal Trajectory in a Frenet Frame - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathPlanning/FrenetOptimalTrajectory/animation.gif) - -This is optimal trajectory generation in a Frenet Frame. - -The cyan line is the target course and black crosses are obstacles. - -The red line is predicted path. - -Ref: - -- [Optimal Trajectory Generation for Dynamic Street Scenarios in a Frenet Frame](https://www.researchgate.net/profile/Moritz_Werling/publication/224156269_Optimal_Trajectory_Generation_for_Dynamic_Street_Scenarios_in_a_Frenet_Frame/links/54f749df0cf210398e9277af.pdf) - -- [Optimal trajectory generation for dynamic street scenarios in a Frenet Frame](https://www.youtube.com/watch?v=Cj6tAQe7UCY) - - -# Path tracking - -## move to a pose control - -This is a simulation of moving to a pose control - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/move_to_pose/animation.gif) - -Ref: - -- [P. I. Corke, "Robotics, Vision and Control" \| SpringerLink p102](https://link.springer.com/book/10.1007/978-3-642-20144-8) - - -## Pure pursuit tracking - -Path tracking simulation with pure pursuit steering control and PID speed control. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/pure_pursuit/animation.gif) - -The red line is a target course, the green cross means the target point for pure pursuit control, the blue line is the tracking. - -Ref: - -- [A Survey of Motion Planning and Control Techniques for Self-driving Urban Vehicles](https://arxiv.org/abs/1604.07446) - -## Stanley control - -Path tracking simulation with Stanley steering control and PID speed control. - -![2](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/stanley_controller/animation.gif) - -Ref: - -- [Stanley: The robot that won the DARPA grand challenge](http://robots.stanford.edu/papers/thrun.stanley05.pdf) - -- [Automatic Steering Methods for Autonomous Automobile Path Tracking](https://www.ri.cmu.edu/pub_files/2009/2/Automatic_Steering_Methods_for_Autonomous_Automobile_Path_Tracking.pdf) - - - -## Rear wheel feedback control - -Path tracking simulation with rear wheel feedback steering control and PID speed control. - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/rear_wheel_feedback/animation.gif) - -Ref: - -- [A Survey of Motion Planning and Control Techniques for Self-driving Urban Vehicles](https://arxiv.org/abs/1604.07446) - - -## Linear–quadratic regulator (LQR) steering control - -Path tracking simulation with LQR steering control and PID speed control. - -![PythonRobotics/figure_1.png at master · AtsushiSakai/PythonRobotics](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/lqr_steer_control/animation.gif) - -Ref: - -- [ApolloAuto/apollo: An open autonomous driving platform](https://github.com/ApolloAuto/apollo) - - -## Linear–quadratic regulator (LQR) speed and steering control - -Path tracking simulation with LQR speed and steering control. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/PathTracking/lqr_speed_steer_control/animation.gif) - -Ref: - -- [Towards fully autonomous driving: Systems and algorithms \- IEEE Conference Publication](http://ieeexplore.ieee.org/document/5940562/) - - -## Model predictive speed and steering control - -Path tracking simulation with iterative linear model predictive speed and steering control. - - - -Ref: - -- [notebook](https://github.com/AtsushiSakai/PythonRobotics/blob/master/PathTracking/model_predictive_speed_and_steer_control/notebook.ipynb) - - -# Arm Navigation - -## Two joint arm to point control - -Two joint arm to a point control simulation. - -This is a interactive simulation. - -You can set the goal position of the end effector with left-click on the ploting area. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/ArmNavigation/two_joint_arm_to_point_control/animation.gif) - - -## N joint arm to point control - -N joint arm to a point control simulation. - -This is a interactive simulation. - -You can set the goal position of the end effector with left-click on the ploting area. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/ArmNavigation/n_joint_arm_to_point_control/animation.gif) - -In this simulation N = 10, however, you can change it. - -# Aerial Navigation - -## drone 3d trajectory following - -This is a 3d trajectory following simulation for a quadrotor. - -![3](https://github.com/AtsushiSakai/PythonRobotics/raw/master/AerialNavigation/drone_3d_trajectory_following/animation.gif) - - -# License +# License MIT @@ -795,7 +80,7 @@ E-mail consultant is also available.   -Your comment using [![Say Thanks!](https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg)](https://saythanks.io/to/AtsushiSakai) is also welcome. +Your comment using [![Say Thanks!](https://img.shields.io/badge/Say%20Thanks-!-1EAEDB.svg)](https://saythanks.io/to/AtsushiSakai) is also welcome. This is a list: [Users comments](https://github.com/AtsushiSakai/PythonRobotics/blob/master/users_comments.md) @@ -812,6 +97,3 @@ This is a list: [Users comments](https://github.com/AtsushiSakai/PythonRobotics/ - [Antonin RAFFIN](https://github.com/araffin) - [Alexis Paques](https://github.com/AlexisTM) - - -