From 583232259162205512630091f36cfd702bb593f9 Mon Sep 17 00:00:00 2001 From: Atsushi Sakai Date: Sat, 17 Nov 2018 08:34:10 +0900 Subject: [PATCH] update docs --- Localization/Kalmanfilter_basics.ipynb | 4 +-- Localization/Kalmanfilter_basics_2.ipynb | 8 +++--- docs/jupyternotebook2rst.py | 27 ++++++++++++++++--- docs/modules/Kalmanfilter_basics.rst | 34 +++++++++++++----------- docs/modules/Kalmanfilter_basics_2.rst | 22 +++++++++------ docs/modules/Planar_Two_Link_IK.rst | 12 ++++----- 6 files changed, 67 insertions(+), 40 deletions(-) diff --git a/Localization/Kalmanfilter_basics.ipynb b/Localization/Kalmanfilter_basics.ipynb index 1c4e8d97..37428791 100644 --- a/Localization/Kalmanfilter_basics.ipynb +++ b/Localization/Kalmanfilter_basics.ipynb @@ -32,7 +32,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [ { @@ -288,7 +288,7 @@ "$$ \n", "f(x, \\mu, \\sigma) = \\frac{1}{\\sigma\\sqrt{2\\pi}} \\exp\\big [{-\\frac{(x-\\mu)^2}{2\\sigma^2} }\\big ]\n", "$$\n", - "Range is [$-\\inf,\\inf $]\n", + "Range is $$[-\\inf,\\inf] $$\n", "\n", "\n", "This is just a function of mean($\\mu$) and standard deviation ($\\sigma$) and what gives the normal distribution the charecteristic **bell curve**. " diff --git a/Localization/Kalmanfilter_basics_2.ipynb b/Localization/Kalmanfilter_basics_2.ipynb index 2501ac81..813d240e 100644 --- a/Localization/Kalmanfilter_basics_2.ipynb +++ b/Localization/Kalmanfilter_basics_2.ipynb @@ -13,7 +13,7 @@ "source": [ " ### Probabilistic Generative Laws\n", " \n", - "**1st Law**:\n", + "#### 1st Law:\n", "The belief representing the state $x_{t}$, is conditioned on all past states, measurements and controls. This can be shown mathematically by the conditional probability shown below:\n", "\n", "$$p(x_{t} | x_{0:t-1},z_{1:t-1},u_{1:t})$$\n", @@ -31,7 +31,7 @@ "\n", "$$p(x_{t} | x_{0:t-1},z_{1:t-1},u_{1:t})=p(x_{t} | x_{t-1},u_{t})$$\n", "\n", - "**2nd Law**:\n", + "#### 2nd Law:\n", "\n", "If $x_{t}$ is complete, then:\n", "\n", @@ -84,7 +84,7 @@ "### Bayes Rule:\n", "\n", "\n", - "Posterior = $\\frac{Likelihood*Prior}{Marginal} $\n", + "Posterior = $$\\frac{Likelihood*Prior}{Marginal} $$\n", "\n", "Here,\n", "\n", @@ -274,7 +274,7 @@ "\\end{aligned}$$\n", "\n", "\n", - "**$K$ is the *Kalman gain*. It's the crux of the Kalman filter. It is a scaling term that chooses a value partway between $\\mu_z$ and $\\bar\\mu$.**" + "$K$ is the *Kalman gain*. It's the crux of the Kalman filter. It is a scaling term that chooses a value partway between $\\mu_z$ and $\\bar\\mu$." ] }, { diff --git a/docs/jupyternotebook2rst.py b/docs/jupyternotebook2rst.py index 12879cfb..f496971d 100644 --- a/docs/jupyternotebook2rst.py +++ b/docs/jupyternotebook2rst.py @@ -1,6 +1,6 @@ -""" +""" -Jupyter notebook converter to rst file +Jupyter notebook converter to rst file author: Atsushi Sakai @@ -19,8 +19,22 @@ def get_notebook_path_list(ndir): return path +def convert_rst(rstpath): + with open(rstpath, "r") as file: + filedata = file.read() + + # convert from code directive to code-block + # because showing code in Sphinx + before = ".. code:: ipython3" + after = ".. code-block:: ipython3" + filedata = filedata.replace(before, after) + + with open(rstpath, "w") as file: + file.write(filedata) + + def generate_rst(npath): - # print(npath) + print("====Start generating rst======") # generate dir dirpath = os.path.dirname(npath) @@ -36,13 +50,18 @@ def generate_rst(npath): print(cmd) subprocess.call(cmd, shell=True) + rstpath = dirpath + "/" + basename + convert_rst(rstpath) + + # clean up old files cmd = "rm -rf " cmd += "./modules/" cmd += basename[:-4] cmd += "*" - print(cmd) + # print(cmd) subprocess.call(cmd, shell=True) + # move files to module dir cmd = "mv " cmd += dirpath cmd += "/*.rst ./modules/" diff --git a/docs/modules/Kalmanfilter_basics.rst b/docs/modules/Kalmanfilter_basics.rst index 1e879218..ce84fe0d 100644 --- a/docs/modules/Kalmanfilter_basics.rst +++ b/docs/modules/Kalmanfilter_basics.rst @@ -27,7 +27,7 @@ In the continous form, .. math:: \mathbb E[X] = \int_{-\infty}^\infty x\, f(x) \,dx -.. code:: ipython3 +.. code-block:: ipython3 import numpy as np import random @@ -61,7 +61,7 @@ data meaning the spread of the data. .. math:: \mathit{VAR}(X) = \frac{1}{n}\sum_{i=1}^n (x_i - \mu)^2 -.. code:: ipython3 +.. code-block:: ipython3 x=np.random.randn(10) np.var(x) @@ -123,7 +123,7 @@ normal distribution: \begin{aligned}VAR(X) = \sigma_x^2 &= \frac{1}{n}\sum_{i=1}^n(X - \mu)^2\\ COV(X, Y) = \sigma_{xy} &= \frac{1}{n}\sum_{i=1}^n[(X-\mu_x)(Y-\mu_y)\big]\end{aligned} -.. code:: ipython3 +.. code-block:: ipython3 x=np.random.random((3,3)) np.cov(x) @@ -141,7 +141,7 @@ normal distribution: Covariance taking the data as **sample** with :math:`\frac{1}{N-1}` -.. code:: ipython3 +.. code-block:: ipython3 x_cor=np.random.rand(1,10) y_cor=np.random.rand(1,10) @@ -159,7 +159,7 @@ Covariance taking the data as **sample** with :math:`\frac{1}{N-1}` Covariance taking the data as **population** with :math:`\frac{1}{N}` -.. code:: ipython3 +.. code-block:: ipython3 np.cov(x_cor,y_cor,bias=1) @@ -183,7 +183,7 @@ According to this theorem, the average of n samples of random and independant variables tends to follow a normal distribution as we increase the sample size.(Generally, for n>=30) -.. code:: ipython3 +.. code-block:: ipython3 import matplotlib.pyplot as plt import random @@ -222,13 +222,15 @@ described with two parameters, the mean (:math:`\mu`) and the variance f(x, \mu, \sigma) = \frac{1}{\sigma\sqrt{2\pi}} \exp\big [{-\frac{(x-\mu)^2}{2\sigma^2} }\big ] - Range is [$-:raw-latex:`\inf`,:raw-latex:`\inf `$] + Range is + +.. math:: [-\inf,\inf] This is just a function of mean(\ :math:`\mu`) and standard deviation (:math:`\sigma`) and what gives the normal distribution the charecteristic **bell curve**. -.. code:: ipython3 +.. code-block:: ipython3 import matplotlib.mlab as mlab import math @@ -284,7 +286,7 @@ New mean is \sigma_\mathtt{new} = \frac{\sigma_z^2\bar\sigma^2}{\bar\sigma^2+\sigma_z^2} -.. code:: ipython3 +.. code-block:: ipython3 import matplotlib.mlab as mlab import math @@ -336,7 +338,7 @@ of the two. \begin{gathered}\mu_x = \mu_p + \mu_z \\ \sigma_x^2 = \sigma_z^2+\sigma_p^2\, \square\end{gathered} -.. code:: ipython3 +.. code-block:: ipython3 import matplotlib.mlab as mlab import math @@ -375,7 +377,7 @@ of the two. .. image:: Kalmanfilter_basics_files/Kalmanfilter_basics_21_1.png -.. code:: ipython3 +.. code-block:: ipython3 #Example from: #https://scipython.com/blog/visualizing-the-bivariate-gaussian-distribution/ @@ -448,7 +450,7 @@ a given (X,Y) value. \*\* numpy einsum examples \*\* -.. code:: ipython3 +.. code-block:: ipython3 a = np.arange(25).reshape(5,5) b = np.arange(5) @@ -471,7 +473,7 @@ a given (X,Y) value. [3 4 5]] -.. code:: ipython3 +.. code-block:: ipython3 #this is the diagonal sum, i repeated means the diagonal np.einsum('ij', a) @@ -490,7 +492,7 @@ a given (X,Y) value. -.. code:: ipython3 +.. code-block:: ipython3 A = np.arange(3).reshape(3,1) B = np.array([[ 0, 1, 2, 3], @@ -508,7 +510,7 @@ a given (X,Y) value. -.. code:: ipython3 +.. code-block:: ipython3 D = np.array([0,1,2]) E = np.array([[ 0, 1, 2, 3], @@ -526,7 +528,7 @@ a given (X,Y) value. -.. code:: ipython3 +.. code-block:: ipython3 from scipy.stats import multivariate_normal x, y = np.mgrid[-5:5:.1, -5:5:.1] diff --git a/docs/modules/Kalmanfilter_basics_2.rst b/docs/modules/Kalmanfilter_basics_2.rst index 2556061d..e6cfafc6 100644 --- a/docs/modules/Kalmanfilter_basics_2.rst +++ b/docs/modules/Kalmanfilter_basics_2.rst @@ -4,9 +4,12 @@ KF Basics - Part 2 ### Probabilistic Generative Laws -**1st Law**: The belief representing the state :math:`x_{t}`, is -conditioned on all past states, measurements and controls. This can be -shown mathematically by the conditional probability shown below: +1st Law: +^^^^^^^^ + +The belief representing the state :math:`x_{t}`, is conditioned on all +past states, measurements and controls. This can be shown mathematically +by the conditional probability shown below: .. math:: p(x_{t} | x_{0:t-1},z_{1:t-1},u_{1:t}) @@ -27,7 +30,8 @@ Therefore the law now holds as: .. math:: p(x_{t} | x_{0:t-1},z_{1:t-1},u_{1:t})=p(x_{t} | x_{t-1},u_{t}) -**2nd Law**: +2nd Law: +^^^^^^^^ If :math:`x_{t}` is complete, then: @@ -84,7 +88,9 @@ hand, given C (Coin 1 is selected), A and B are independent. Bayes Rule: ~~~~~~~~~~~ -Posterior = $:raw-latex:`\frac{Likelihood*Prior}{Marginal}` $ +Posterior = + +.. math:: \frac{Likelihood*Prior}{Marginal} Here, @@ -158,7 +164,7 @@ and the resultant covariance is smaller. Bayes filter localization example: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -.. code:: ipython3 +.. code-block:: ipython3 from IPython.display import Image Image(filename="bayes_filter.png",width=400) @@ -281,9 +287,9 @@ The variance in terms of the Kalman gain: &= (1-K)\bar\sigma^2 \end{aligned} -**:math:`K` is the Kalman gain. It’s the crux of the Kalman filter. It +:math:`K` is the *Kalman gain*. It’s the crux of the Kalman filter. It is a scaling term that chooses a value partway between :math:`\mu_z` and -:math:`\bar\mu`.** +:math:`\bar\mu`. Kalman Filter - Univariate and Multivariate ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/modules/Planar_Two_Link_IK.rst b/docs/modules/Planar_Two_Link_IK.rst index 6dc826c8..83a72fc1 100644 --- a/docs/modules/Planar_Two_Link_IK.rst +++ b/docs/modules/Planar_Two_Link_IK.rst @@ -31,7 +31,7 @@ https://robotacademy.net.au/lesson/inverse-kinematics-for-a-2-joint-robot-arm-us First, let’s define a class to make plotting our arm easier. -.. code:: ipython3 +.. code-block:: ipython3 %matplotlib inline from math import cos, sin @@ -70,7 +70,7 @@ First, let’s define a class to make plotting our arm easier. Let’s also define a function to make it easier to draw an angle on our diagram. -.. code:: ipython3 +.. code-block:: ipython3 from math import sqrt @@ -103,7 +103,7 @@ called forward_kinematics - forward kinematics specifies the end-effector position given the joint angles and link lengths. Forward kinematics is easier than inverse kinematics. -.. code:: ipython3 +.. code-block:: ipython3 arm = TwoLinkArm() @@ -172,7 +172,7 @@ kinematics for :math:`\theta_0` and :math:`\theta_1`, but that would be the wrong move. An easier path involves going back to the geometry of the arm. -.. code:: ipython3 +.. code-block:: ipython3 from math import pi @@ -245,7 +245,7 @@ to the “arm-down” configuration of the arm, which is what’s shown in the diagram. Now we’ll derive an equation for :math:`\theta_0` that depends on this value of :math:`\theta_1`. -.. code:: ipython3 +.. code-block:: ipython3 from math import atan2 @@ -294,7 +294,7 @@ We now have an expression for this angle :math:`\beta` in terms of one of our arm’s joint angles. Now, can we relate :math:`\beta` to :math:`\theta_0`? Yes! -.. code:: ipython3 +.. code-block:: ipython3 arm.plot() label_diagram()