mirror of
https://github.com/davidfraser/pyan.git
synced 2026-01-08 23:17:58 -05:00
Merge pull request #11 from Technologicat/master
Update stable repo to Pyan3 v1.2.0
This commit is contained in:
164
.gitignore
vendored
Normal file
164
.gitignore
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
# based on https://github.com/github/gitignore/blob/master/Python.gitignore
|
||||
*.csv
|
||||
*.pkl
|
||||
*.joblib
|
||||
*.msgpack
|
||||
.DS_Store
|
||||
.ipynb_checkpoints
|
||||
.venv/
|
||||
Endpoint_test/
|
||||
run_simulator.py
|
||||
__pycache__/
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
docs/source/api
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
|
||||
# others
|
||||
VERSION
|
||||
coverage.xml
|
||||
junit.xml
|
||||
htmlcov
|
||||
|
||||
# editors
|
||||
.idea/
|
||||
.history/
|
||||
.vscode/
|
||||
20
.pre-commit-config.yaml
Normal file
20
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.3.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: ""
|
||||
hooks:
|
||||
- id: flake8
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v5.6.4
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
19
AUTHORS.md
Normal file
19
AUTHORS.md
Normal file
@@ -0,0 +1,19 @@
|
||||
Original [pyan.py](https://github.com/ejrh/ejrh/blob/master/utils/pyan.py) for Python 2 by Edmund Horner, 2012. [Original blog post with explanation](http://ejrh.wordpress.com/2012/01/31/call-graphs-in-python-part-2/).
|
||||
|
||||
[Coloring and grouping](https://ejrh.wordpress.com/2012/08/18/coloured-call-graphs/) for GraphViz output by Juha Jeronen.
|
||||
|
||||
[Git repository cleanup](https://github.com/davidfraser/pyan/) and maintenance by David Fraser.
|
||||
|
||||
[yEd GraphML output, and framework for easily adding new output formats](https://github.com/davidfraser/pyan/pull/1) by Patrick Massot.
|
||||
|
||||
A bugfix [[2]](https://github.com/davidfraser/pyan/pull/2) and the option `--dot-rankdir` [[3]](https://github.com/davidfraser/pyan/pull/3) contributed by GitHub user ch41rmn.
|
||||
|
||||
A bug in `.tgf` output [[4]](https://github.com/davidfraser/pyan/pull/4) pointed out and fix suggested by Adam Eijdenberg.
|
||||
|
||||
This Python 3 port, analyzer expansion, and additional refactoring by Juha Jeronen.
|
||||
|
||||
HTML and SVG export by Jan Beitner.
|
||||
|
||||
Support for relative imports by Jan Beitner and Rakan Alanazi.
|
||||
|
||||
Further contributions by Ioannis Filippidis, Jan Malek, José Eduardo Montenegro Cavalcanti de Oliveira, Mantas Zimnickas, Sam Basak, Brady Deetz, and GitHub user dmfreemon.
|
||||
20
LICENSE.md
20
LICENSE.md
@@ -2,7 +2,7 @@
|
||||
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
@@ -96,17 +96,17 @@ portion of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
|
||||
|
||||
**a)** You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
|
||||
|
||||
**b)** You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any part
|
||||
thereof, to be licensed as a whole at no charge to all third parties
|
||||
under the terms of this License.
|
||||
|
||||
|
||||
|
||||
**c)** If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such interactive
|
||||
use in the most ordinary way, to print or display an announcement
|
||||
@@ -143,12 +143,12 @@ the scope of this License.
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
|
||||
|
||||
**a)** Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections 1
|
||||
and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
|
||||
|
||||
**b)** Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your cost of
|
||||
physically performing source distribution, a complete machine-readable
|
||||
@@ -156,7 +156,7 @@ copy of the corresponding source code, to be distributed under the
|
||||
terms of Sections 1 and 2 above on a medium customarily used for
|
||||
software interchange; or,
|
||||
|
||||
|
||||
|
||||
**c)** Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is allowed
|
||||
only for noncommercial distribution and only if you received the
|
||||
@@ -331,7 +331,7 @@ when it starts in an interactive mode:
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
|
||||
type `show w'. This is free software, and you are welcome
|
||||
to redistribute it under certain conditions; type `show c'
|
||||
to redistribute it under certain conditions; type `show c'
|
||||
for details.
|
||||
|
||||
The hypothetical commands \`show w' and \`show c' should show the
|
||||
@@ -346,7 +346,7 @@ if necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright
|
||||
interest in the program `Gnomovision'
|
||||
(which makes passes at compilers) written
|
||||
(which makes passes at compilers) written
|
||||
by James Hacker.
|
||||
|
||||
signature of Ty Coon, 1 April 1989
|
||||
@@ -358,4 +358,4 @@ you may consider it more useful to permit linking proprietary
|
||||
applications with the library. If this is what you want to do, use the
|
||||
[GNU Lesser General Public
|
||||
License](http://www.gnu.org/licenses/lgpl.html) instead of this
|
||||
License.
|
||||
License.
|
||||
|
||||
238
README.md
238
README.md
@@ -1,26 +1,36 @@
|
||||
# Pyan3: Offline call graph generator for Python 3
|
||||
# Pyan3
|
||||
|
||||
Generate approximate call graphs for Python programs.
|
||||
Offline call graph generator for Python 3
|
||||
|
||||
[](https://travis-ci.com/edumco/pyan)
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fedumco%2Fpyan?ref=badge_shield)
|
||||
[](https://www.codacy.com/manual/edumco/pyan?utm_source=github.com&utm_medium=referral&utm_content=edumco/pyan&utm_campaign=Badge_Grade)
|
||||

|
||||
|
||||
Pyan takes one or more Python source files, performs a (rather superficial) static analysis, and constructs a directed graph of the objects in the combined source, and how they define or use each other. The graph can be output for rendering by GraphViz or yEd.
|
||||
|
||||
*And now it is available for Python 3!*
|
||||
This project has 2 official repositories:
|
||||
|
||||
Note: The previous Python 2-compatible version is tagged as `pre-python3`
|
||||
- The original stable [davidfraser/pyan](https://github.com/davidfraser/pyan).
|
||||
- The development repository [Technologicat/pyan](https://github.com/Technologicat/pyan)
|
||||
|
||||
> The PyPI package [pyan3](https://pypi.org/project/pyan3/) is built from development
|
||||
|
||||
## About
|
||||
|
||||
[")](graph0.svg)
|
||||
|
||||
**Defines** relations are drawn with *dotted gray arrows*.
|
||||
**Defines** relations are drawn with _dotted gray arrows_.
|
||||
|
||||
**Uses** relations are drawn with *black solid arrows*. Recursion is indicated by an arrow from a node to itself. [Mutual recursion](https://en.wikipedia.org/wiki/Mutual_recursion#Basic_examples) between nodes X and Y is indicated by a pair of arrows, one pointing from X to Y, and the other from Y to X.
|
||||
**Uses** relations are drawn with _black solid arrows_. Recursion is indicated by an arrow from a node to itself. [Mutual recursion](https://en.wikipedia.org/wiki/Mutual_recursion#Basic_examples) between nodes X and Y is indicated by a pair of arrows, one pointing from X to Y, and the other from Y to X.
|
||||
|
||||
**Nodes** are always filled, and made translucent to clearly show any arrows passing underneath them. This is especially useful for large graphs with GraphViz's `fdp` filter. If colored output is not enabled, the fill is white.
|
||||
|
||||
In **node coloring**, the [HSL](https://en.wikipedia.org/wiki/HSL_and_HSV) color model is used. The **hue** is determined by the *filename* the node comes from. The **lightness** is determined by *depth of namespace nesting*, with darker meaning more deeply nested. Saturation is constant. The spacing between different hues depends on the number of files analyzed; better results are obtained for fewer files.
|
||||
In **node coloring**, the [HSL](https://en.wikipedia.org/wiki/HSL_and_HSV) color model is used. The **hue** is determined by the _filename_ the node comes from. The **lightness** is determined by _depth of namespace nesting_, with darker meaning more deeply nested. Saturation is constant. The spacing between different hues depends on the number of files analyzed; better results are obtained for fewer files.
|
||||
|
||||
**Groups** are filled with translucent gray to avoid clashes with any node color.
|
||||
|
||||
The nodes can be **annotated** by *filename and source line number* information.
|
||||
The nodes can be **annotated** by _filename and source line number_ information.
|
||||
|
||||
## Note
|
||||
|
||||
@@ -28,10 +38,13 @@ The static analysis approach Pyan takes is different from running the code and s
|
||||
|
||||
In Pyan3, the analyzer was ported from `compiler` ([good riddance](https://stackoverflow.com/a/909172)) to a combination of `ast` and `symtable`, and slightly extended.
|
||||
|
||||
# Install
|
||||
|
||||
pip install pyan3
|
||||
|
||||
# Usage
|
||||
|
||||
See `pyan --help`.
|
||||
See `pyan3 --help`.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -41,9 +54,66 @@ Then render using your favorite GraphViz filter, mainly `dot` or `fdp`:
|
||||
|
||||
`dot -Tsvg myuses.dot >myuses.svg`
|
||||
|
||||
Or use directly
|
||||
|
||||
`pyan *.py --uses --no-defines --colored --grouped --annotated --svg >myuses.svg`
|
||||
|
||||
You can also export as an interactive HTML
|
||||
|
||||
`pyan *.py --uses --no-defines --colored --grouped --annotated --html > myuses.html`
|
||||
|
||||
Alternatively, you can call `pyan` from a script
|
||||
|
||||
```shell script
|
||||
import pyan
|
||||
from IPython.display import HTML
|
||||
HTML(pyan.create_callgraph(filenames="**/*.py", format="html"))
|
||||
```
|
||||
|
||||
#### Sphinx integration
|
||||
|
||||
You can integrate callgraphs into Sphinx.
|
||||
Install graphviz (e.g. via `sudo apt-get install graphviz`) and modify `source/conf.py` so that
|
||||
|
||||
```
|
||||
# modify extensions
|
||||
extensions = [
|
||||
...
|
||||
"sphinx.ext.graphviz"
|
||||
"pyan.sphinx",
|
||||
]
|
||||
|
||||
# add graphviz options
|
||||
graphviz_output_format = "svg"
|
||||
```
|
||||
|
||||
Now, there is a callgraph directive which has all the options of the [graphviz directive](https://www.sphinx-doc.org/en/master/usage/extensions/graphviz.html)
|
||||
and in addition:
|
||||
|
||||
- **:no-groups:** (boolean flag): do not group
|
||||
- **:no-defines:** (boolean flag): if to not draw edges that show which functions, methods and classes are defined by a class or module
|
||||
- **:no-uses:** (boolean flag): if to not draw edges that show how a function uses other functions
|
||||
- **:no-colors:** (boolean flag): if to not color in callgraph (default is coloring)
|
||||
- **:nested-grops:** (boolean flag): if to group by modules and submodules
|
||||
- **:annotated:** (boolean flag): annotate callgraph with file names
|
||||
- **:direction:** (string): "horizontal" or "vertical" callgraph
|
||||
- **:toctree:** (string): path to toctree (as used with autosummary) to link elements of callgraph to documentation (makes all nodes clickable)
|
||||
- **:zoomable:** (boolean flag): enables users to zoom and pan callgraph
|
||||
|
||||
Example to create a callgraph for the function `pyan.create_callgraph` that is
|
||||
zoomable, is defined from left to right and links each node to the API documentation that
|
||||
was created at the toctree path `api`.
|
||||
|
||||
```
|
||||
.. callgraph:: pyan.create_callgraph
|
||||
:toctree: api
|
||||
:zoomable:
|
||||
:direction: horizontal
|
||||
```
|
||||
|
||||
#### Troubleshooting
|
||||
|
||||
If GraphViz says *trouble in init_rank*, try adding `-Gnewrank=true`, as in:
|
||||
If GraphViz says _trouble in init_rank_, try adding `-Gnewrank=true`, as in:
|
||||
|
||||
`dot -Gnewrank=true -Tsvg myuses.dot >myuses.svg`
|
||||
|
||||
@@ -55,86 +125,85 @@ If the graph is visually unreadable due to too much detail, consider visualizing
|
||||
|
||||
Currently Pyan always operates at the level of individual functions and methods; an option to visualize only relations between namespaces may (or may not) be added in a future version.
|
||||
|
||||
|
||||
# Features
|
||||
|
||||
*Items tagged with ☆ are new in Pyan3.*
|
||||
_Items tagged with ☆ are new in Pyan3._
|
||||
|
||||
**Graph creation**:
|
||||
|
||||
- Nodes for functions and classes
|
||||
- Edges for defines
|
||||
- Edges for uses
|
||||
- This includes recursive calls ☆
|
||||
- Grouping to represent defines, with or without nesting
|
||||
- Coloring of nodes by filename
|
||||
- Unlimited number of hues ☆
|
||||
- Nodes for functions and classes
|
||||
- Edges for defines
|
||||
- Edges for uses
|
||||
- This includes recursive calls ☆
|
||||
- Grouping to represent defines, with or without nesting
|
||||
- Coloring of nodes by filename
|
||||
- Unlimited number of hues ☆
|
||||
|
||||
**Analysis**:
|
||||
|
||||
- Name lookup across the given set of files
|
||||
- Nested function definitions
|
||||
- Nested class definitions ☆
|
||||
- Nested attribute accesses like `self.a.b` ☆
|
||||
- Inherited attributes ☆
|
||||
- Pyan3 looks up also in base classes when resolving attributes. In the old Pyan, calls to inherited methods used to be picked up by `contract_nonexistents()` followed by `expand_unknowns()`, but that often generated spurious uses edges (because the wildcard to `*.name` expands to `X.name` *for all* `X` that have an attribute called `name`.).
|
||||
- Resolution of `super()` based on the static type at the call site ☆
|
||||
- MRO is (statically) respected in looking up inherited attributes and `super()` ☆
|
||||
- Assignment tracking with lexical scoping
|
||||
- E.g. if `self.a = MyFancyClass()`, the analyzer knows that any references to `self.a` point to `MyFancyClass`
|
||||
- All binding forms are supported (assign, augassign, for, comprehensions, generator expressions, with) ☆
|
||||
- Name clashes between `for` loop counter variables and functions or classes defined elsewhere no longer confuse Pyan.
|
||||
- `self` is defined by capturing the name of the first argument of a method definition, like Python does. ☆
|
||||
- Simple item-by-item tuple assignments like `x,y,z = a,b,c` ☆
|
||||
- Chained assignments `a = b = c` ☆
|
||||
- Local scope for lambda, listcomp, setcomp, dictcomp, genexpr ☆
|
||||
- Keep in mind that list comprehensions gained a local scope (being treated like a function) only in Python 3. Thus, Pyan3, when applied to legacy Python 2 code, will give subtly wrong results if the code uses list comprehensions.
|
||||
- Source filename and line number annotation ☆
|
||||
- The annotation is appended to the node label. If grouping is off, namespace is included in the annotation. If grouping is on, only source filename and line number information is included, because the group title already shows the namespace.
|
||||
- Name lookup across the given set of files
|
||||
- Nested function definitions
|
||||
- Nested class definitions ☆
|
||||
- Nested attribute accesses like `self.a.b` ☆
|
||||
- Inherited attributes ☆
|
||||
- Pyan3 looks up also in base classes when resolving attributes. In the old Pyan, calls to inherited methods used to be picked up by `contract_nonexistents()` followed by `expand_unknowns()`, but that often generated spurious uses edges (because the wildcard to `*.name` expands to `X.name` _for all_ `X` that have an attribute called `name`.).
|
||||
- Resolution of `super()` based on the static type at the call site ☆
|
||||
- MRO is (statically) respected in looking up inherited attributes and `super()` ☆
|
||||
- Assignment tracking with lexical scoping
|
||||
- E.g. if `self.a = MyFancyClass()`, the analyzer knows that any references to `self.a` point to `MyFancyClass`
|
||||
- All binding forms are supported (assign, augassign, for, comprehensions, generator expressions, with) ☆
|
||||
- Name clashes between `for` loop counter variables and functions or classes defined elsewhere no longer confuse Pyan.
|
||||
- `self` is defined by capturing the name of the first argument of a method definition, like Python does. ☆
|
||||
- Simple item-by-item tuple assignments like `x,y,z = a,b,c` ☆
|
||||
- Chained assignments `a = b = c` ☆
|
||||
- Local scope for lambda, listcomp, setcomp, dictcomp, genexpr ☆
|
||||
- Keep in mind that list comprehensions gained a local scope (being treated like a function) only in Python 3. Thus, Pyan3, when applied to legacy Python 2 code, will give subtly wrong results if the code uses list comprehensions.
|
||||
- Source filename and line number annotation ☆
|
||||
- The annotation is appended to the node label. If grouping is off, namespace is included in the annotation. If grouping is on, only source filename and line number information is included, because the group title already shows the namespace.
|
||||
|
||||
## TODO
|
||||
|
||||
- Determine confidence of detected edges (probability that the edge is correct). Start with a binary system, with only values 1.0 and 0.0.
|
||||
- A fully resolved reference to a name, based on lexical scoping, has confidence 1.0.
|
||||
- A reference to an unknown name has confidence 0.0.
|
||||
- Attributes:
|
||||
- A fully resolved reference to a known attribute of a known object has confidence 1.0.
|
||||
- A reference to an unknown attribute of a known object has confidence 1.0. These are mainly generated by imports, when the imported file is not in the analyzed set. (Does this need a third value, such as 0.5?)
|
||||
- A reference to an attribute of an unknown object has confidence 0.0.
|
||||
- A wildcard and its expansions have confidence 0.0.
|
||||
- Effects of binding analysis? The system should not claim full confidence in a bound value, unless it fully understands both the binding syntax and the value. (Note that this is very restrictive. A function call or a list in the expression for the value will currently spoil the full analysis.)
|
||||
- Confidence values may need updating in pass 2.
|
||||
- Make the analyzer understand `del name` (probably seen as `isinstance(node.ctx, ast.Del)` in `visit_Name()`, `visit_Attribute()`)
|
||||
- Prefix methods by class name in the graph; create a legend for annotations. See the discussion [here](https://github.com/johnyf/pyan/issues/4).
|
||||
- Improve the wildcard resolution mechanism, see discussion [here](https://github.com/johnyf/pyan/issues/5).
|
||||
- Could record the namespace of the use site upon creating the wildcard, and check any possible resolutions against that (requiring that the resolved name is in scope at the use site)?
|
||||
- Add an option to visualize relations only between namespaces, useful for large projects.
|
||||
- Scan the nodes and edges, basically generate a new graph and visualize that.
|
||||
- Publish test cases.
|
||||
- Get rid of `self.last_value`?
|
||||
- Consider each specific kind of expression or statement being handled; get the relevant info directly (or by a more controlled kind of recursion) instead of `self.visit()`.
|
||||
- At some point, may need a second visitor class that is just a catch-all that extracts names, which is then applied to only relevant branches of the AST.
|
||||
- On the other hand, maybe `self.last_value` is the simplest implementation that extracts a value from an expression, and it only needs to be used in a controlled manner (as `analyze_binding()` currently does); i.e. reset before visiting, and reset immediately when done.
|
||||
- Determine confidence of detected edges (probability that the edge is correct). Start with a binary system, with only values 1.0 and 0.0.
|
||||
- A fully resolved reference to a name, based on lexical scoping, has confidence 1.0.
|
||||
- A reference to an unknown name has confidence 0.0.
|
||||
- Attributes:
|
||||
- A fully resolved reference to a known attribute of a known object has confidence 1.0.
|
||||
- A reference to an unknown attribute of a known object has confidence 1.0. These are mainly generated by imports, when the imported file is not in the analyzed set. (Does this need a third value, such as 0.5?)
|
||||
- A reference to an attribute of an unknown object has confidence 0.0.
|
||||
- A wildcard and its expansions have confidence 0.0.
|
||||
- Effects of binding analysis? The system should not claim full confidence in a bound value, unless it fully understands both the binding syntax and the value. (Note that this is very restrictive. A function call or a list in the expression for the value will currently spoil the full analysis.)
|
||||
- Confidence values may need updating in pass 2.
|
||||
- Make the analyzer understand `del name` (probably seen as `isinstance(node.ctx, ast.Del)` in `visit_Name()`, `visit_Attribute()`)
|
||||
- Prefix methods by class name in the graph; create a legend for annotations. See the discussion [here](https://github.com/johnyf/pyan/issues/4).
|
||||
- Improve the wildcard resolution mechanism, see discussion [here](https://github.com/johnyf/pyan/issues/5).
|
||||
- Could record the namespace of the use site upon creating the wildcard, and check any possible resolutions against that (requiring that the resolved name is in scope at the use site)?
|
||||
- Add an option to visualize relations only between namespaces, useful for large projects.
|
||||
- Scan the nodes and edges, basically generate a new graph and visualize that.
|
||||
- Publish test cases.
|
||||
- Get rid of `self.last_value`?
|
||||
- Consider each specific kind of expression or statement being handled; get the relevant info directly (or by a more controlled kind of recursion) instead of `self.visit()`.
|
||||
- At some point, may need a second visitor class that is just a catch-all that extracts names, which is then applied to only relevant branches of the AST.
|
||||
- On the other hand, maybe `self.last_value` is the simplest implementation that extracts a value from an expression, and it only needs to be used in a controlled manner (as `analyze_binding()` currently does); i.e. reset before visiting, and reset immediately when done.
|
||||
|
||||
The analyzer **does not currently support**:
|
||||
|
||||
- Tuples/lists as first-class values (currently ignores any assignment of a tuple/list to a single name).
|
||||
- Support empty lists, too (for resolving method calls to `.append()` and similar).
|
||||
- Starred assignment `a,*b,c = d,e,f,g,h`
|
||||
- Slicing and indexing in assignment (`ast.Subscript`)
|
||||
- Additional unpacking generalizations ([PEP 448](https://www.python.org/dev/peps/pep-0448/), Python 3.5+).
|
||||
- Any **uses** on the RHS *at the binding site* in all of the above are already detected by the name and attribute analyzers, but the binding information from assignments of these forms will not be recorded (at least not correctly).
|
||||
- Enums; need to mark the use of any of their attributes as use of the Enum. Need to detect `Enum` in `bases` during analysis of ClassDef; then tag the class as an enum and handle differently.
|
||||
- Resolving results of function calls, except for a very limited special case for `super()`.
|
||||
- Any binding of a name to a result of a function (or method) call - provided that the binding itself is understood by Pyan - will instead show in the output as binding the name to that function (or method). (This may generate some unintuitive uses edges in the graph.)
|
||||
- Distinguishing between different Lambdas in the same namespace (to report uses of a particular `lambda` that has been stored in `self.something`).
|
||||
- Type hints ([PEP 484](https://www.python.org/dev/peps/pep-0484/), Python 3.5+).
|
||||
- Type inference for function arguments
|
||||
- Either of these two could be used to bind function argument names to the appropriate object types, avoiding the need for wildcard references (especially for attribute accesses on objects passed in as function arguments).
|
||||
- Type inference could run as pass 3, using additional information from the state of the graph after pass 2 to connect call sites to function definitions. Alternatively, no additional pass; store the AST nodes in the earlier pass. Type inference would allow resolving some wildcards by finding the method of the actual object instance passed in.
|
||||
- Must understand, at the call site, whether the first positional argument in the function def is handled implicitly or not. This is found by looking at the flavor of the Node representing the call target.
|
||||
- Async definitions are detected, but passed through to the corresponding non-async analyzers; could be annotated.
|
||||
- Cython; could strip or comment out Cython-specific code as a preprocess step, then treat as Python (will need to be careful to get line numbers right).
|
||||
- Tuples/lists as first-class values (currently ignores any assignment of a tuple/list to a single name).
|
||||
- Support empty lists, too (for resolving method calls to `.append()` and similar).
|
||||
- Starred assignment `a,*b,c = d,e,f,g,h`
|
||||
- Slicing and indexing in assignment (`ast.Subscript`)
|
||||
- Additional unpacking generalizations ([PEP 448](https://www.python.org/dev/peps/pep-0448/), Python 3.5+).
|
||||
- Any **uses** on the RHS _at the binding site_ in all of the above are already detected by the name and attribute analyzers, but the binding information from assignments of these forms will not be recorded (at least not correctly).
|
||||
- Enums; need to mark the use of any of their attributes as use of the Enum. Need to detect `Enum` in `bases` during analysis of ClassDef; then tag the class as an enum and handle differently.
|
||||
- Resolving results of function calls, except for a very limited special case for `super()`.
|
||||
- Any binding of a name to a result of a function (or method) call - provided that the binding itself is understood by Pyan - will instead show in the output as binding the name to that function (or method). (This may generate some unintuitive uses edges in the graph.)
|
||||
- Distinguishing between different Lambdas in the same namespace (to report uses of a particular `lambda` that has been stored in `self.something`).
|
||||
- Type hints ([PEP 484](https://www.python.org/dev/peps/pep-0484/), Python 3.5+).
|
||||
- Type inference for function arguments
|
||||
- Either of these two could be used to bind function argument names to the appropriate object types, avoiding the need for wildcard references (especially for attribute accesses on objects passed in as function arguments).
|
||||
- Type inference could run as pass 3, using additional information from the state of the graph after pass 2 to connect call sites to function definitions. Alternatively, no additional pass; store the AST nodes in the earlier pass. Type inference would allow resolving some wildcards by finding the method of the actual object instance passed in.
|
||||
- Must understand, at the call site, whether the first positional argument in the function def is handled implicitly or not. This is found by looking at the flavor of the Node representing the call target.
|
||||
- Async definitions are detected, but passed through to the corresponding non-async analyzers; could be annotated.
|
||||
- Cython; could strip or comment out Cython-specific code as a preprocess step, then treat as Python (will need to be careful to get line numbers right).
|
||||
|
||||
# How it works
|
||||
|
||||
@@ -143,7 +212,7 @@ From the viewpoint of graphing the defines and uses relations, the interesting p
|
||||
Bindings are tracked, with lexical scoping, to determine which type of object, or which function, each name points to at any given point in the source code being analyzed. This allows tracking things like:
|
||||
|
||||
```python
|
||||
def some_func()
|
||||
def some_func():
|
||||
pass
|
||||
|
||||
class MyClass:
|
||||
@@ -164,21 +233,8 @@ When a binding statement is encountered, the current namespace determines in whi
|
||||
|
||||
# Authors
|
||||
|
||||
Original [pyan.py](https://github.com/ejrh/ejrh/blob/master/utils/pyan.py) by Edmund Horner. [Original post with explanation](http://ejrh.wordpress.com/2012/01/31/call-graphs-in-python-part-2/).
|
||||
|
||||
[Coloring and grouping](https://ejrh.wordpress.com/2012/08/18/coloured-call-graphs/) for GraphViz output by Juha Jeronen.
|
||||
|
||||
[Git repository cleanup](https://github.com/davidfraser/pyan/) and maintenance by David Fraser.
|
||||
|
||||
[yEd GraphML output, and framework for easily adding new output formats](https://github.com/davidfraser/pyan/pull/1) by Patrick Massot.
|
||||
|
||||
A bugfix [[2]](https://github.com/davidfraser/pyan/pull/2) and the option `--dot-rankdir` [[3]](https://github.com/davidfraser/pyan/pull/3) contributed by GitHub user ch41rmn.
|
||||
|
||||
A bug in `.tgf` output [[4]](https://github.com/davidfraser/pyan/pull/4) pointed out and fix suggested by Adam Eijdenberg.
|
||||
|
||||
This Python 3 port, analyzer expansion, and additional refactoring by Juha Jeronen.
|
||||
See [AUTHORS.md](AUTHORS.md).
|
||||
|
||||
# License
|
||||
|
||||
[GPL v2](LICENSE.md), as per [comments here](https://ejrh.wordpress.com/2012/08/18/coloured-call-graphs/).
|
||||
|
||||
|
||||
2
makedist.sh
Executable file
2
makedist.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
python3 setup.py sdist bdist_wheel
|
||||
421
modvis.py
Normal file
421
modvis.py
Normal file
@@ -0,0 +1,421 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8; -*-
|
||||
"""A simple import analyzer. Visualize dependencies between modules."""
|
||||
|
||||
import ast
|
||||
from glob import glob
|
||||
import logging
|
||||
from optparse import OptionParser # TODO: migrate to argparse
|
||||
import os
|
||||
|
||||
import pyan.node
|
||||
import pyan.visgraph
|
||||
import pyan.writers
|
||||
|
||||
# from pyan.anutils import get_module_name
|
||||
|
||||
|
||||
def filename_to_module_name(fullpath): # we need to see __init__, hence we don't use anutils.get_module_name.
|
||||
"""'some/path/module.py' -> 'some.path.module'"""
|
||||
if not fullpath.endswith(".py"):
|
||||
raise ValueError("Expected a .py filename, got '{}'".format(fullpath))
|
||||
rel = ".{}".format(os.path.sep) # ./
|
||||
if fullpath.startswith(rel):
|
||||
fullpath = fullpath[len(rel) :]
|
||||
fullpath = fullpath[:-3] # remove .py
|
||||
return fullpath.replace(os.path.sep, ".")
|
||||
|
||||
|
||||
def split_module_name(m):
|
||||
"""'fully.qualified.name' -> ('fully.qualified', 'name')"""
|
||||
k = m.rfind(".")
|
||||
if k == -1:
|
||||
return ("", m)
|
||||
return (m[:k], m[(k + 1) :])
|
||||
|
||||
|
||||
# blacklist = (".git", "build", "dist", "test")
|
||||
# def find_py_files(basedir):
|
||||
# py_files = []
|
||||
# for root, dirs, files in os.walk(basedir):
|
||||
# for x in blacklist: # don't visit blacklisted dirs
|
||||
# if x in dirs:
|
||||
# dirs.remove(x)
|
||||
# for filename in files:
|
||||
# if filename.endswith(".py"):
|
||||
# fullpath = os.path.join(root, filename)
|
||||
# py_files.append(fullpath)
|
||||
# return py_files
|
||||
|
||||
|
||||
def resolve(current_module, target_module, level):
|
||||
"""Return fully qualified name of the target_module in an import.
|
||||
|
||||
If level == 0, the import is absolute, hence target_module is already the
|
||||
fully qualified name (and will be returned as-is).
|
||||
|
||||
Relative imports (level > 0) are resolved using current_module as the
|
||||
starting point. Usually this is good enough (especially if you analyze your
|
||||
project by invoking modvis in its top-level directory).
|
||||
|
||||
For the exact implications, see the section "Import sibling packages" in:
|
||||
https://alex.dzyoba.com/blog/python-import/
|
||||
and this SO discussion:
|
||||
https://stackoverflow.com/questions/14132789/relative-imports-for-the-billionth-time
|
||||
"""
|
||||
if level < 0:
|
||||
raise ValueError("Relative import level must be >= 0, got {}".format(level))
|
||||
if level == 0: # absolute import
|
||||
return target_module
|
||||
# level > 0 (let's have some simplistic support for relative imports)
|
||||
if level > current_module.count(".") + 1: # foo.bar.baz -> max level 3, pointing to top level
|
||||
raise ValueError("Relative import level {} too large for module name {}".format(level, current_module))
|
||||
base = current_module
|
||||
for _ in range(level):
|
||||
k = base.rfind(".")
|
||||
if k == -1:
|
||||
base = ""
|
||||
break
|
||||
base = base[:k]
|
||||
return ".".join((base, target_module))
|
||||
|
||||
|
||||
class ImportVisitor(ast.NodeVisitor):
|
||||
def __init__(self, filenames, logger):
|
||||
self.modules = {} # modname: {dep0, dep1, ...}
|
||||
self.fullpaths = {} # modname: fullpath
|
||||
self.logger = logger
|
||||
self.analyze(filenames)
|
||||
|
||||
def analyze(self, filenames):
|
||||
for fullpath in filenames:
|
||||
with open(fullpath, "rt", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
m = filename_to_module_name(fullpath)
|
||||
self.current_module = m
|
||||
self.fullpaths[m] = fullpath
|
||||
self.visit(ast.parse(content, fullpath))
|
||||
|
||||
def add_dependency(self, target_module): # source module is always self.current_module
|
||||
m = self.current_module
|
||||
if m not in self.modules:
|
||||
self.modules[m] = set()
|
||||
self.modules[m].add(target_module)
|
||||
# Just in case the target (or one or more of its parents) is a package
|
||||
# (we don't know that), add a dependency on the relevant __init__ module.
|
||||
#
|
||||
# If there's no matching __init__ (either no __init__.py provided, or
|
||||
# the target is just a module), this is harmless - we just generate a
|
||||
# spurious dependency on a module that doesn't even exist.
|
||||
#
|
||||
# Since nonexistent modules are not in the analyzed set (i.e. do not
|
||||
# appear as keys of self.modules), prepare_graph will ignore them.
|
||||
#
|
||||
# TODO: This would be a problem for a simple plain-text output that doesn't use the graph.
|
||||
modpath = target_module.split(".")
|
||||
for k in range(1, len(modpath) + 1):
|
||||
base = ".".join(modpath[:k])
|
||||
possible_init = base + ".__init__"
|
||||
if possible_init != m: # will happen when current_module is somepackage.__init__ itself
|
||||
self.modules[m].add(possible_init)
|
||||
self.logger.debug(" added possible implicit use of '{}'".format(possible_init))
|
||||
|
||||
def visit_Import(self, node):
|
||||
self.logger.debug(
|
||||
"{}:{}: Import {}".format(self.current_module, node.lineno, [alias.name for alias in node.names])
|
||||
)
|
||||
for alias in node.names:
|
||||
self.add_dependency(alias.name) # alias.asname not relevant for our purposes
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
# from foo import some_symbol
|
||||
if node.module:
|
||||
self.logger.debug(
|
||||
"{}:{}: ImportFrom '{}', relative import level {}".format(
|
||||
self.current_module, node.lineno, node.module, node.level
|
||||
)
|
||||
)
|
||||
absname = resolve(self.current_module, node.module, node.level)
|
||||
if node.level > 0:
|
||||
self.logger.debug(" resolved relative import to '{}'".format(absname))
|
||||
self.add_dependency(absname)
|
||||
|
||||
# from . import foo --> module = None; now the **names** refer to modules
|
||||
else:
|
||||
for alias in node.names:
|
||||
self.logger.debug(
|
||||
"{}:{}: ImportFrom '{}', target module '{}', relative import level {}".format(
|
||||
self.current_module, node.lineno, "." * node.level, alias.name, node.level
|
||||
)
|
||||
)
|
||||
absname = resolve(self.current_module, alias.name, node.level)
|
||||
if node.level > 0:
|
||||
self.logger.debug(" resolved relative import to '{}'".format(absname))
|
||||
self.add_dependency(absname)
|
||||
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
def detect_cycles(self):
|
||||
"""Postprocessing. Detect import cycles.
|
||||
|
||||
Return format is `[(prefix, cycle), ...]` where `prefix` is the
|
||||
non-cyclic prefix of the import chain, and `cycle` contains only
|
||||
the cyclic part (where the first and last elements are the same).
|
||||
"""
|
||||
cycles = []
|
||||
|
||||
def walk(m, seen=None, trace=None):
|
||||
trace = (trace or []) + [m]
|
||||
seen = seen or set()
|
||||
if m in seen:
|
||||
cycles.append(trace)
|
||||
return
|
||||
seen = seen | {m}
|
||||
deps = self.modules[m]
|
||||
for d in sorted(deps):
|
||||
if d in self.modules:
|
||||
walk(d, seen, trace)
|
||||
|
||||
for root in sorted(self.modules):
|
||||
walk(root)
|
||||
|
||||
# For each detected cycle, report the non-cyclic prefix and the cycle separately
|
||||
out = []
|
||||
for cycle in cycles:
|
||||
offender = cycle[-1]
|
||||
k = cycle.index(offender)
|
||||
out.append((cycle[:k], cycle[k:]))
|
||||
return out
|
||||
|
||||
def prepare_graph(self): # same format as in pyan.analyzer
|
||||
"""Postprocessing. Prepare data for pyan.visgraph for graph file generation."""
|
||||
self.nodes = {} # Node name: list of Node objects (in possibly different namespaces)
|
||||
self.uses_edges = {}
|
||||
# we have no defines_edges, which doesn't matter as long as we don't enable that option in visgraph.
|
||||
|
||||
# TODO: Right now we care only about modules whose files we read.
|
||||
# TODO: If we want to include in the graph also targets that are not in the analyzed set,
|
||||
# TODO: then we could create nodes also for the modules listed in the *values* of self.modules.
|
||||
for m in self.modules:
|
||||
ns, mod = split_module_name(m)
|
||||
package = os.path.dirname(self.fullpaths[m])
|
||||
# print("{}: ns={}, mod={}, fn={}".format(m, ns, mod, fn))
|
||||
# HACK: The `filename` attribute of the node determines the visual color.
|
||||
# HACK: We are visualizing at module level, so color by package.
|
||||
# TODO: If we are analyzing files from several projects in the same run,
|
||||
# TODO: it could be useful to decide the hue by the top-level directory name
|
||||
# TODO: (after the './' if any), and lightness by the depth in each tree.
|
||||
# TODO: This would be most similar to how Pyan does it for functions/classes.
|
||||
n = pyan.node.Node(namespace=ns, name=mod, ast_node=None, filename=package, flavor=pyan.node.Flavor.MODULE)
|
||||
n.defined = True
|
||||
# Pyan's analyzer.py allows several nodes to share the same short name,
|
||||
# which is used as the key to self.nodes; but we use the fully qualified
|
||||
# name as the key. Nevertheless, visgraph expects a format where the
|
||||
# values in the visitor's `nodes` attribute are lists.
|
||||
self.nodes[m] = [n]
|
||||
|
||||
def add_uses_edge(from_node, to_node):
|
||||
if from_node not in self.uses_edges:
|
||||
self.uses_edges[from_node] = set()
|
||||
self.uses_edges[from_node].add(to_node)
|
||||
|
||||
for m, deps in self.modules.items():
|
||||
for d in deps:
|
||||
n_from = self.nodes.get(m)
|
||||
n_to = self.nodes.get(d)
|
||||
if n_from and n_to:
|
||||
add_uses_edge(n_from[0], n_to[0])
|
||||
|
||||
# sanity check output
|
||||
for m, deps in self.uses_edges.items():
|
||||
assert m.get_name() in self.nodes
|
||||
for d in deps:
|
||||
assert d.get_name() in self.nodes
|
||||
|
||||
|
||||
def main():
|
||||
usage = """usage: %prog FILENAME... [--dot|--tgf|--yed]"""
|
||||
desc = "Analyse one or more Python source files and generate an approximate module dependency graph."
|
||||
parser = OptionParser(usage=usage, description=desc)
|
||||
parser.add_option("--dot", action="store_true", default=False, help="output in GraphViz dot format")
|
||||
parser.add_option("--tgf", action="store_true", default=False, help="output in Trivial Graph Format")
|
||||
parser.add_option("--yed", action="store_true", default=False, help="output in yEd GraphML Format")
|
||||
parser.add_option("-f", "--file", dest="filename", help="write graph to FILE", metavar="FILE", default=None)
|
||||
parser.add_option("-l", "--log", dest="logname", help="write log to LOG", metavar="LOG")
|
||||
parser.add_option("-v", "--verbose", action="store_true", default=False, dest="verbose", help="verbose output")
|
||||
parser.add_option(
|
||||
"-V",
|
||||
"--very-verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="very_verbose",
|
||||
help="even more verbose output (mainly for debug)",
|
||||
)
|
||||
parser.add_option(
|
||||
"-c",
|
||||
"--colored",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="colored",
|
||||
help="color nodes according to namespace [dot only]",
|
||||
)
|
||||
parser.add_option(
|
||||
"-g",
|
||||
"--grouped",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="grouped",
|
||||
help="group nodes (create subgraphs) according to namespace [dot only]",
|
||||
)
|
||||
parser.add_option(
|
||||
"-e",
|
||||
"--nested-groups",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="nested_groups",
|
||||
help="create nested groups (subgraphs) for nested namespaces (implies -g) [dot only]",
|
||||
)
|
||||
parser.add_option(
|
||||
"-C",
|
||||
"--cycles",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="cycles",
|
||||
help="detect import cycles and print report to stdout",
|
||||
)
|
||||
parser.add_option(
|
||||
"--dot-rankdir",
|
||||
default="TB",
|
||||
dest="rankdir",
|
||||
help=(
|
||||
"specifies the dot graph 'rankdir' property for "
|
||||
"controlling the direction of the graph. "
|
||||
"Allowed values: ['TB', 'LR', 'BT', 'RL']. "
|
||||
"[dot only]"
|
||||
),
|
||||
)
|
||||
parser.add_option(
|
||||
"-a", "--annotated", action="store_true", default=False, dest="annotated", help="annotate with module location"
|
||||
)
|
||||
|
||||
options, args = parser.parse_args()
|
||||
filenames = [fn2 for fn in args for fn2 in glob(fn, recursive=True)]
|
||||
if len(args) == 0:
|
||||
parser.error("Need one or more filenames to process")
|
||||
|
||||
if options.nested_groups:
|
||||
options.grouped = True
|
||||
|
||||
graph_options = {
|
||||
"draw_defines": False, # we have no defines edges
|
||||
"draw_uses": True,
|
||||
"colored": options.colored,
|
||||
"grouped_alt": False,
|
||||
"grouped": options.grouped,
|
||||
"nested_groups": options.nested_groups,
|
||||
"annotated": options.annotated,
|
||||
}
|
||||
|
||||
# TODO: use an int argument for verbosity
|
||||
logger = logging.getLogger(__name__)
|
||||
if options.very_verbose:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif options.verbose:
|
||||
logger.setLevel(logging.INFO)
|
||||
else:
|
||||
logger.setLevel(logging.WARN)
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
if options.logname:
|
||||
handler = logging.FileHandler(options.logname)
|
||||
logger.addHandler(handler)
|
||||
|
||||
# run the analysis
|
||||
v = ImportVisitor(filenames, logger)
|
||||
|
||||
# Postprocessing: detect import cycles
|
||||
#
|
||||
# NOTE: Because this is a static analysis, it doesn't care about the order
|
||||
# the code runs in any particular invocation of the software. Every
|
||||
# analyzed module is considered as a possible entry point to the program,
|
||||
# and all cycles (considering *all* possible branches *at any step* of
|
||||
# *each* import chain) will be mapped recursively.
|
||||
#
|
||||
# Obviously, this easily leads to a combinatoric explosion. In a mid-size
|
||||
# project (~20k SLOC), the analysis may find thousands of unique import
|
||||
# cycles, most of which are harmless.
|
||||
#
|
||||
# Many cycles appear due to package A importing something from package B
|
||||
# (possibly from one of its submodules) and vice versa, when both packages
|
||||
# have an __init__ module. If they don't actually try to import any names
|
||||
# that only become defined after the init has finished running, it's
|
||||
# usually fine.
|
||||
#
|
||||
# (Init modules often import names from their submodules to the package's
|
||||
# top-level namespace; those names can be reliably accessed only after the
|
||||
# init module has finished running. But importing names directly from the
|
||||
# submodule where they are defined is fine also during the init.)
|
||||
#
|
||||
# But if your program is crashing due to a cyclic import, you already know
|
||||
# in any case *which* import cycle is causing it, just by looking at the
|
||||
# stack trace. So this analysis is just extra information that says what
|
||||
# other cycles exist, if any.
|
||||
if options.cycles:
|
||||
cycles = v.detect_cycles()
|
||||
if not cycles:
|
||||
print("No import cycles detected.")
|
||||
else:
|
||||
unique_cycles = set()
|
||||
for prefix, cycle in cycles:
|
||||
unique_cycles.add(tuple(cycle))
|
||||
print("Detected the following import cycles (n_results={}).".format(len(unique_cycles)))
|
||||
|
||||
def stats():
|
||||
lengths = [len(x) - 1 for x in unique_cycles] # number of modules in the cycle
|
||||
|
||||
def mean(lst):
|
||||
return sum(lst) / len(lst)
|
||||
|
||||
def median(lst):
|
||||
tmp = list(sorted(lst))
|
||||
n = len(lst)
|
||||
if n % 2 == 1:
|
||||
return tmp[n // 2] # e.g. tmp[5] if n = 11
|
||||
else:
|
||||
return (tmp[n // 2 - 1] + tmp[n // 2]) / 2 # e.g. avg of tmp[4] and tmp[5] if n = 10
|
||||
|
||||
return min(lengths), mean(lengths), median(lengths), max(lengths)
|
||||
|
||||
print(
|
||||
"Number of modules in a cycle: min = {}, average = {:0.2g}, median = {:0.2g}, max = {}".format(*stats())
|
||||
)
|
||||
for c in sorted(unique_cycles):
|
||||
print(" {}".format(c))
|
||||
|
||||
# # we could generate a plaintext report like this (with caveats; see TODO above)
|
||||
# ms = v.modules
|
||||
# for m in sorted(ms):
|
||||
# print(m)
|
||||
# for d in sorted(ms[m]):
|
||||
# print(" {}".format(d))
|
||||
|
||||
# Postprocessing: format graph report
|
||||
make_graph = options.dot or options.tgf or options.yed
|
||||
if make_graph:
|
||||
v.prepare_graph()
|
||||
# print(v.nodes, v.uses_edges)
|
||||
graph = pyan.visgraph.VisualGraph.from_visitor(v, options=graph_options, logger=logger)
|
||||
|
||||
if options.dot:
|
||||
writer = pyan.writers.DotWriter(
|
||||
graph, options=["rankdir=" + options.rankdir], output=options.filename, logger=logger
|
||||
)
|
||||
if options.tgf:
|
||||
writer = pyan.writers.TgfWriter(graph, output=options.filename, logger=logger)
|
||||
if options.yed:
|
||||
writer = pyan.writers.YedWriter(graph, output=options.filename, logger=logger)
|
||||
if make_graph:
|
||||
writer.run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
11
pyan.py
11
pyan.py
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pyan import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
100
pyan/__init__.py
100
pyan/__init__.py
@@ -1,6 +1,102 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from .main import main
|
||||
from glob import glob
|
||||
import io
|
||||
from typing import List, Union
|
||||
|
||||
__version__ = "1.0.2"
|
||||
from .analyzer import CallGraphVisitor
|
||||
from .main import main # noqa: F401, for export only.
|
||||
from .visgraph import VisualGraph
|
||||
from .writers import DotWriter, HTMLWriter, SVGWriter
|
||||
|
||||
__version__ = "1.2.1"
|
||||
|
||||
|
||||
# TODO: fix code duplication with main.py, should have just one implementation.
|
||||
def create_callgraph(
|
||||
filenames: Union[List[str], str] = "**/*.py",
|
||||
root: str = None,
|
||||
function: Union[str, None] = None,
|
||||
namespace: Union[str, None] = None,
|
||||
format: str = "dot",
|
||||
rankdir: str = "LR",
|
||||
nested_groups: bool = True,
|
||||
draw_defines: bool = True,
|
||||
draw_uses: bool = True,
|
||||
colored: bool = True,
|
||||
grouped_alt: bool = False,
|
||||
annotated: bool = False,
|
||||
grouped: bool = True,
|
||||
max_iter: int = 1000,
|
||||
) -> str:
|
||||
"""
|
||||
create callgraph based on static code analysis
|
||||
|
||||
Args:
|
||||
filenames: glob pattern or list of glob patterns
|
||||
to identify filenames to parse (`**` for multiple directories)
|
||||
example: **/*.py for all python files
|
||||
root: path to known root directory at which package root sits. Defaults to None, i.e. it will be inferred.
|
||||
function: if defined, function name to filter for, e.g. "my_module.my_function"
|
||||
to only include calls that are related to `my_function`
|
||||
namespace: if defined, namespace to filter for, e.g. "my_module", it is highly
|
||||
recommended to define this filter
|
||||
format: format to write callgraph to, of of "dot", "svg", "html". you need to have graphviz
|
||||
installed for svg or html output
|
||||
rankdir: direction of graph, e.g. "LR" for horizontal or "TB" for vertical
|
||||
nested_groups: if to group by modules and submodules
|
||||
draw_defines: if to draw defines edges (functions that are defines)
|
||||
draw_uses: if to draw uses edges (functions that are used)
|
||||
colored: if to color graph
|
||||
grouped_alt: if to use alternative grouping
|
||||
annotated: if to annotate graph with filenames
|
||||
grouped: if to group by modules
|
||||
max_iter: maximum number of iterations for filtering. Defaults to 1000.
|
||||
|
||||
Returns:
|
||||
str: callgraph
|
||||
"""
|
||||
if isinstance(filenames, str):
|
||||
filenames = [filenames]
|
||||
filenames = [fn2 for fn in filenames for fn2 in glob(fn, recursive=True)]
|
||||
|
||||
if nested_groups:
|
||||
grouped = True
|
||||
graph_options = {
|
||||
"draw_defines": draw_defines,
|
||||
"draw_uses": draw_uses,
|
||||
"colored": colored,
|
||||
"grouped_alt": grouped_alt,
|
||||
"grouped": grouped,
|
||||
"nested_groups": nested_groups,
|
||||
"annotated": annotated,
|
||||
}
|
||||
|
||||
v = CallGraphVisitor(filenames, root=root)
|
||||
if function or namespace:
|
||||
if function:
|
||||
function_name = function.split(".")[-1]
|
||||
function_namespace = ".".join(function.split(".")[:-1])
|
||||
node = v.get_node(function_namespace, function_name)
|
||||
else:
|
||||
node = None
|
||||
v.filter(node=node, namespace=namespace, max_iter=max_iter)
|
||||
graph = VisualGraph.from_visitor(v, options=graph_options)
|
||||
|
||||
stream = io.StringIO()
|
||||
if format == "dot":
|
||||
writer = DotWriter(graph, options=["rankdir=" + rankdir], output=stream)
|
||||
writer.run()
|
||||
|
||||
elif format == "html":
|
||||
writer = HTMLWriter(graph, options=["rankdir=" + rankdir], output=stream)
|
||||
writer.run()
|
||||
|
||||
elif format == "svg":
|
||||
writer = SVGWriter(graph, options=["rankdir=" + rankdir], output=stream)
|
||||
writer.run()
|
||||
else:
|
||||
raise ValueError(f"format {format} is unknown")
|
||||
|
||||
return stream.getvalue()
|
||||
|
||||
6
pyan/__main__.py
Normal file
6
pyan/__main__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pyan
|
||||
|
||||
if __name__ == "__main__":
|
||||
pyan.main()
|
||||
744
pyan/analyzer.py
744
pyan/analyzer.py
File diff suppressed because it is too large
Load Diff
@@ -2,37 +2,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Utilities for analyzer."""
|
||||
|
||||
import os.path
|
||||
import ast
|
||||
import os.path
|
||||
|
||||
from .node import Flavor
|
||||
|
||||
|
||||
def head(lst):
|
||||
if len(lst):
|
||||
return lst[0]
|
||||
|
||||
|
||||
def tail(lst):
|
||||
if len(lst) > 1:
|
||||
return lst[1:]
|
||||
else:
|
||||
return []
|
||||
|
||||
def get_module_name(filename):
|
||||
|
||||
def get_module_name(filename, root: str = None):
|
||||
"""Try to determine the full module name of a source file, by figuring out
|
||||
if its directory looks like a package (i.e. has an __init__.py file)."""
|
||||
if its directory looks like a package (i.e. has an __init__.py file or
|
||||
there is a .py file in it )."""
|
||||
|
||||
if os.path.basename(filename) == '__init__.py':
|
||||
return get_module_name(os.path.dirname(filename))
|
||||
if os.path.basename(filename) == "__init__.py":
|
||||
# init file means module name is directory name
|
||||
module_path = os.path.dirname(filename)
|
||||
else:
|
||||
# otherwise it is the filename without extension
|
||||
module_path = filename.replace(".py", "")
|
||||
|
||||
init_path = os.path.join(os.path.dirname(filename), '__init__.py')
|
||||
mod_name = os.path.basename(filename).replace('.py', '')
|
||||
# find the module root - walk up the tree and check if it contains .py files - if yes. it is the new root
|
||||
directories = [(module_path, True)]
|
||||
if root is None:
|
||||
while directories[0][0] != os.path.dirname(directories[0][0]):
|
||||
potential_root = os.path.dirname(directories[0][0])
|
||||
is_root = any([f == "__init__.py" for f in os.listdir(potential_root)])
|
||||
directories.insert(0, (potential_root, is_root))
|
||||
|
||||
if not os.path.exists(init_path):
|
||||
return mod_name
|
||||
# keep directories where itself of parent is root
|
||||
while not directories[0][1]:
|
||||
directories.pop(0)
|
||||
|
||||
if not os.path.dirname(filename):
|
||||
return mod_name
|
||||
else: # root is already known - just walk up until it is matched
|
||||
while directories[0][0] != root:
|
||||
potential_root = os.path.dirname(directories[0][0])
|
||||
directories.insert(0, (potential_root, True))
|
||||
|
||||
mod_name = ".".join([os.path.basename(f[0]) for f in directories])
|
||||
return mod_name
|
||||
|
||||
return get_module_name(os.path.dirname(filename)) + '.' + mod_name
|
||||
|
||||
def format_alias(x):
|
||||
"""Return human-readable description of an ast.alias (used in Import and ImportFrom nodes)."""
|
||||
@@ -44,6 +63,7 @@ def format_alias(x):
|
||||
else:
|
||||
return "%s" % (x.name)
|
||||
|
||||
|
||||
def get_ast_node_name(x):
|
||||
"""Return human-readable name of ast.Attribute or ast.Name. Pass through anything else."""
|
||||
if isinstance(x, ast.Attribute):
|
||||
@@ -54,19 +74,23 @@ def get_ast_node_name(x):
|
||||
else:
|
||||
return x
|
||||
|
||||
|
||||
# Helper for handling binding forms.
|
||||
def sanitize_exprs(exprs):
|
||||
"""Convert ast.Tuples in exprs to Python tuples; wrap result in a Python tuple."""
|
||||
|
||||
def process(expr):
|
||||
if isinstance(expr, (ast.Tuple, ast.List)):
|
||||
return expr.elts # .elts is a Python tuple
|
||||
else:
|
||||
return [expr]
|
||||
|
||||
if isinstance(exprs, (tuple, list)):
|
||||
return [process(expr) for expr in exprs]
|
||||
else:
|
||||
return process(exprs)
|
||||
|
||||
|
||||
def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
"""Compute the method resolution order (MRO) for each of the analyzed classes.
|
||||
|
||||
@@ -81,17 +105,21 @@ def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
|
||||
from functools import reduce
|
||||
from operator import add
|
||||
|
||||
def C3_find_good_head(heads, tails): # find an element of heads which is not in any of the tails
|
||||
flat_tails = reduce(add, tails, []) # flatten the outer level
|
||||
for hd in heads:
|
||||
if hd not in flat_tails:
|
||||
break
|
||||
else: # no break only if there are cyclic dependencies.
|
||||
raise LinearizationImpossible("MRO linearization impossible; cyclic dependency detected. heads: %s, tails: %s" % (heads, tails))
|
||||
raise LinearizationImpossible(
|
||||
"MRO linearization impossible; cyclic dependency detected. heads: %s, tails: %s" % (heads, tails)
|
||||
)
|
||||
return hd
|
||||
|
||||
def remove_all(elt, lst): # remove all occurrences of elt from lst, return a copy
|
||||
return [x for x in lst if x != elt]
|
||||
|
||||
def remove_all_in(elt, lists): # remove elt from all lists, return a copy
|
||||
return [remove_all(elt, lst) for lst in lists]
|
||||
|
||||
@@ -113,6 +141,7 @@ def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
mro = {} # result
|
||||
try:
|
||||
memo = {} # caching/memoization
|
||||
|
||||
def C3_linearize(node):
|
||||
logger.debug("MRO: C3 linearizing %s" % (node))
|
||||
seen.add(node)
|
||||
@@ -133,6 +162,7 @@ def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
memo[node] = [node] + C3_merge(lists)
|
||||
logger.debug("MRO: C3 linearized %s, result %s" % (node, memo[node]))
|
||||
return memo[node]
|
||||
|
||||
for node in class_base_nodes:
|
||||
logger.debug("MRO: analyzing class %s" % (node))
|
||||
seen = set() # break cycles (separately for each class we start from)
|
||||
@@ -146,6 +176,7 @@ def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
# analyzed is so badly formed that the MRO algorithm fails)
|
||||
|
||||
memo = {} # caching/memoization
|
||||
|
||||
def lookup_bases_recursive(node):
|
||||
seen.add(node)
|
||||
if node not in memo:
|
||||
@@ -166,10 +197,13 @@ def resolve_method_resolution_order(class_base_nodes, logger):
|
||||
|
||||
return mro
|
||||
|
||||
|
||||
class UnresolvedSuperCallError(Exception):
|
||||
"""For specifically signaling an unresolved super()."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Scope:
|
||||
"""Adaptor that makes scopes look somewhat like those from the Python 2
|
||||
compiler module, as far as Pyan's CallGraphVisitor is concerned."""
|
||||
@@ -177,15 +211,16 @@ class Scope:
|
||||
def __init__(self, table):
|
||||
"""table: SymTable instance from symtable.symtable()"""
|
||||
name = table.get_name()
|
||||
if name == 'top':
|
||||
name = '' # Pyan defines the top level as anonymous
|
||||
if name == "top":
|
||||
name = "" # Pyan defines the top level as anonymous
|
||||
self.name = name
|
||||
self.type = table.get_type() # useful for __repr__()
|
||||
self.defs = {iden:None for iden in table.get_identifiers()} # name:assigned_value
|
||||
self.defs = {iden: None for iden in table.get_identifiers()} # name:assigned_value
|
||||
|
||||
def __repr__(self):
|
||||
return "<Scope: %s %s>" % (self.type, self.name)
|
||||
|
||||
|
||||
# A context manager, sort of a friend of CallGraphVisitor (depends on implementation details)
|
||||
class ExecuteInInnerScope:
|
||||
"""Execute a code block with the scope stack augmented with an inner scope.
|
||||
|
||||
72
pyan/callgraph.html
Normal file
72
pyan/callgraph.html
Normal file
File diff suppressed because one or more lines are too long
300
pyan/main.py
300
pyan/main.py
@@ -9,124 +9,236 @@
|
||||
for rendering by e.g. GraphViz or yEd.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from argparse import ArgumentParser
|
||||
from glob import glob
|
||||
from optparse import OptionParser # TODO: migrate to argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .analyzer import CallGraphVisitor
|
||||
from .visgraph import VisualGraph
|
||||
from .writers import TgfWriter, DotWriter, YedWriter
|
||||
from .writers import DotWriter, HTMLWriter, SVGWriter, TgfWriter, YedWriter
|
||||
|
||||
def main():
|
||||
usage = """usage: %prog FILENAME... [--dot|--tgf|--yed]"""
|
||||
desc = ('Analyse one or more Python source files and generate an'
|
||||
'approximate call graph of the modules, classes and functions'
|
||||
' within them.')
|
||||
parser = OptionParser(usage=usage, description=desc)
|
||||
parser.add_option("--dot",
|
||||
action="store_true", default=False,
|
||||
help="output in GraphViz dot format")
|
||||
parser.add_option("--tgf",
|
||||
action="store_true", default=False,
|
||||
help="output in Trivial Graph Format")
|
||||
parser.add_option("--yed",
|
||||
action="store_true", default=False,
|
||||
help="output in yEd GraphML Format")
|
||||
parser.add_option("-f", "--file", dest="filename",
|
||||
help="write graph to FILE", metavar="FILE", default=None)
|
||||
parser.add_option("-l", "--log", dest="logname",
|
||||
help="write log to LOG", metavar="LOG")
|
||||
parser.add_option("-v", "--verbose",
|
||||
action="store_true", default=False, dest="verbose",
|
||||
help="verbose output")
|
||||
parser.add_option("-V", "--very-verbose",
|
||||
action="store_true", default=False, dest="very_verbose",
|
||||
help="even more verbose output (mainly for debug)")
|
||||
parser.add_option("-d", "--defines",
|
||||
action="store_true", default=True, dest="draw_defines",
|
||||
help="add edges for 'defines' relationships [default]")
|
||||
parser.add_option("-n", "--no-defines",
|
||||
action="store_false", default=True, dest="draw_defines",
|
||||
help="do not add edges for 'defines' relationships")
|
||||
parser.add_option("-u", "--uses",
|
||||
action="store_true", default=True, dest="draw_uses",
|
||||
help="add edges for 'uses' relationships [default]")
|
||||
parser.add_option("-N", "--no-uses",
|
||||
action="store_false", default=True, dest="draw_uses",
|
||||
help="do not add edges for 'uses' relationships")
|
||||
parser.add_option("-c", "--colored",
|
||||
action="store_true", default=False, dest="colored",
|
||||
help="color nodes according to namespace [dot only]")
|
||||
parser.add_option("-G", "--grouped-alt",
|
||||
action="store_true", default=False, dest="grouped_alt",
|
||||
help="suggest grouping by adding invisible defines edges [only useful with --no-defines]")
|
||||
parser.add_option("-g", "--grouped",
|
||||
action="store_true", default=False, dest="grouped",
|
||||
help="group nodes (create subgraphs) according to namespace [dot only]")
|
||||
parser.add_option("-e", "--nested-groups",
|
||||
action="store_true", default=False, dest="nested_groups",
|
||||
help="create nested groups (subgraphs) for nested namespaces (implies -g) [dot only]")
|
||||
parser.add_option("--dot-rankdir", default="TB", dest="rankdir",
|
||||
help=(
|
||||
"specifies the dot graph 'rankdir' property for "
|
||||
"controlling the direction of the graph. "
|
||||
"Allowed values: ['TB', 'LR', 'BT', 'RL']. "
|
||||
"[dot only]"))
|
||||
parser.add_option("-a", "--annotated",
|
||||
action="store_true", default=False, dest="annotated",
|
||||
help="annotate with module and source line number")
|
||||
|
||||
options, args = parser.parse_args()
|
||||
filenames = [fn2 for fn in args for fn2 in glob(fn)]
|
||||
if len(args) == 0:
|
||||
parser.error('Need one or more filenames to process')
|
||||
def main(cli_args=None):
|
||||
usage = """%(prog)s FILENAME... [--dot|--tgf|--yed|--svg|--html]"""
|
||||
desc = (
|
||||
"Analyse one or more Python source files and generate an"
|
||||
"approximate call graph of the modules, classes and functions"
|
||||
" within them."
|
||||
)
|
||||
|
||||
if options.nested_groups:
|
||||
options.grouped = True
|
||||
parser = ArgumentParser(usage=usage, description=desc)
|
||||
|
||||
parser.add_argument("--dot", action="store_true", default=False, help="output in GraphViz dot format")
|
||||
|
||||
parser.add_argument("--tgf", action="store_true", default=False, help="output in Trivial Graph Format")
|
||||
|
||||
parser.add_argument("--svg", action="store_true", default=False, help="output in SVG Format")
|
||||
|
||||
parser.add_argument("--html", action="store_true", default=False, help="output in HTML Format")
|
||||
|
||||
parser.add_argument("--yed", action="store_true", default=False, help="output in yEd GraphML Format")
|
||||
|
||||
parser.add_argument("--file", dest="filename", help="write graph to FILE", metavar="FILE", default=None)
|
||||
|
||||
parser.add_argument("--namespace", dest="namespace", help="filter for NAMESPACE", metavar="NAMESPACE", default=None)
|
||||
|
||||
parser.add_argument("--function", dest="function", help="filter for FUNCTION", metavar="FUNCTION", default=None)
|
||||
|
||||
parser.add_argument("-l", "--log", dest="logname", help="write log to LOG", metavar="LOG")
|
||||
|
||||
parser.add_argument("-v", "--verbose", action="store_true", default=False, dest="verbose", help="verbose output")
|
||||
|
||||
parser.add_argument(
|
||||
"-V",
|
||||
"--very-verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="very_verbose",
|
||||
help="even more verbose output (mainly for debug)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--defines",
|
||||
action="store_true",
|
||||
dest="draw_defines",
|
||||
help="add edges for 'defines' relationships [default]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-defines",
|
||||
action="store_false",
|
||||
default=True,
|
||||
dest="draw_defines",
|
||||
help="do not add edges for 'defines' relationships",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--uses",
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="draw_uses",
|
||||
help="add edges for 'uses' relationships [default]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-N",
|
||||
"--no-uses",
|
||||
action="store_false",
|
||||
default=True,
|
||||
dest="draw_uses",
|
||||
help="do not add edges for 'uses' relationships",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--colored",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="colored",
|
||||
help="color nodes according to namespace [dot only]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
"--grouped-alt",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="grouped_alt",
|
||||
help="suggest grouping by adding invisible defines edges [only useful with --no-defines]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-g",
|
||||
"--grouped",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="grouped",
|
||||
help="group nodes (create subgraphs) according to namespace [dot only]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--nested-groups",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="nested_groups",
|
||||
help="create nested groups (subgraphs) for nested namespaces (implies -g) [dot only]",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dot-rankdir",
|
||||
default="TB",
|
||||
dest="rankdir",
|
||||
help=(
|
||||
"specifies the dot graph 'rankdir' property for "
|
||||
"controlling the direction of the graph. "
|
||||
"Allowed values: ['TB', 'LR', 'BT', 'RL']. "
|
||||
"[dot only]"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--annotated",
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="annotated",
|
||||
help="annotate with module and source line number",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--root",
|
||||
default=None,
|
||||
dest="root",
|
||||
help="Package root directory. Is inferred by default.",
|
||||
)
|
||||
|
||||
known_args, unknown_args = parser.parse_known_args(cli_args)
|
||||
|
||||
filenames = [fn2 for fn in unknown_args for fn2 in glob(fn, recursive=True)]
|
||||
|
||||
# determine root
|
||||
if known_args.root is not None:
|
||||
root = os.path.abspath(known_args.root)
|
||||
else:
|
||||
root = None
|
||||
|
||||
if len(unknown_args) == 0:
|
||||
parser.error("Need one or more filenames to process")
|
||||
elif len(filenames) == 0:
|
||||
parser.error("No files found matching given glob: %s" % " ".join(unknown_args))
|
||||
|
||||
if known_args.nested_groups:
|
||||
known_args.grouped = True
|
||||
|
||||
graph_options = {
|
||||
'draw_defines': options.draw_defines,
|
||||
'draw_uses': options.draw_uses,
|
||||
'colored': options.colored,
|
||||
'grouped_alt' : options.grouped_alt,
|
||||
'grouped': options.grouped,
|
||||
'nested_groups': options.nested_groups,
|
||||
'annotated': options.annotated}
|
||||
"draw_defines": known_args.draw_defines,
|
||||
"draw_uses": known_args.draw_uses,
|
||||
"colored": known_args.colored,
|
||||
"grouped_alt": known_args.grouped_alt,
|
||||
"grouped": known_args.grouped,
|
||||
"nested_groups": known_args.nested_groups,
|
||||
"annotated": known_args.annotated,
|
||||
}
|
||||
|
||||
# TODO: use an int argument for verbosity
|
||||
logger = logging.getLogger(__name__)
|
||||
if options.very_verbose:
|
||||
|
||||
if known_args.very_verbose:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif options.verbose:
|
||||
|
||||
elif known_args.verbose:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
else:
|
||||
logger.setLevel(logging.WARN)
|
||||
|
||||
logger.addHandler(logging.StreamHandler())
|
||||
if options.logname:
|
||||
handler = logging.FileHandler(options.logname)
|
||||
|
||||
if known_args.logname:
|
||||
handler = logging.FileHandler(known_args.logname)
|
||||
logger.addHandler(handler)
|
||||
|
||||
v = CallGraphVisitor(filenames, logger)
|
||||
v = CallGraphVisitor(filenames, logger, root=root)
|
||||
|
||||
if known_args.function or known_args.namespace:
|
||||
|
||||
if known_args.function:
|
||||
function_name = known_args.function.split(".")[-1]
|
||||
namespace = ".".join(known_args.function.split(".")[:-1])
|
||||
node = v.get_node(namespace, function_name)
|
||||
|
||||
else:
|
||||
node = None
|
||||
|
||||
v.filter(node=node, namespace=known_args.namespace)
|
||||
|
||||
graph = VisualGraph.from_visitor(v, options=graph_options, logger=logger)
|
||||
|
||||
if options.dot:
|
||||
writer = DotWriter(
|
||||
graph,
|
||||
options=['rankdir='+options.rankdir],
|
||||
output=options.filename,
|
||||
logger=logger)
|
||||
writer.run()
|
||||
writer = None
|
||||
|
||||
if options.tgf:
|
||||
writer = TgfWriter(
|
||||
graph, output=options.filename, logger=logger)
|
||||
writer.run()
|
||||
if known_args.dot:
|
||||
writer = DotWriter(graph, options=["rankdir=" + known_args.rankdir], output=known_args.filename, logger=logger)
|
||||
|
||||
if options.yed:
|
||||
writer = YedWriter(
|
||||
graph, output=options.filename, logger=logger)
|
||||
if known_args.html:
|
||||
writer = HTMLWriter(graph, options=["rankdir=" + known_args.rankdir], output=known_args.filename, logger=logger)
|
||||
|
||||
if known_args.svg:
|
||||
writer = SVGWriter(graph, options=["rankdir=" + known_args.rankdir], output=known_args.filename, logger=logger)
|
||||
|
||||
if known_args.tgf:
|
||||
writer = TgfWriter(graph, output=known_args.filename, logger=logger)
|
||||
|
||||
if known_args.yed:
|
||||
writer = YedWriter(graph, output=known_args.filename, logger=logger)
|
||||
|
||||
if writer:
|
||||
writer.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
57
pyan/node.py
57
pyan/node.py
@@ -1,34 +1,38 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Abstract node representing data gathered from the analysis."""
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
def make_safe_label(label):
|
||||
"""Avoid name clashes with GraphViz reserved words such as 'graph'."""
|
||||
unsafe_words = ("digraph", "graph", "cluster", "subgraph")
|
||||
unsafe_words = ("digraph", "graph", "cluster", "subgraph", "node")
|
||||
out = label
|
||||
for word in unsafe_words:
|
||||
out = out.replace(word, "%sX" % word)
|
||||
return out.replace('.', '__').replace('*', '')
|
||||
return out.replace(".", "__").replace("*", "")
|
||||
|
||||
|
||||
class Flavor(Enum):
|
||||
"""Flavor describes the kind of object a node represents."""
|
||||
UNSPECIFIED = "---" # as it says on the tin
|
||||
UNKNOWN = "???" # not determined by analysis (wildcard)
|
||||
|
||||
NAMESPACE = "namespace" # node representing a namespace
|
||||
ATTRIBUTE = "attribute" # attr of something, but not known if class or func.
|
||||
UNSPECIFIED = "---" # as it says on the tin
|
||||
UNKNOWN = "???" # not determined by analysis (wildcard)
|
||||
|
||||
IMPORTEDITEM = "import" # imported item of unanalyzed type
|
||||
NAMESPACE = "namespace" # node representing a namespace
|
||||
ATTRIBUTE = "attribute" # attr of something, but not known if class or func.
|
||||
|
||||
MODULE = "module"
|
||||
CLASS = "class"
|
||||
FUNCTION = "function"
|
||||
METHOD = "method" # instance method
|
||||
IMPORTEDITEM = "import" # imported item of unanalyzed type
|
||||
|
||||
MODULE = "module"
|
||||
CLASS = "class"
|
||||
FUNCTION = "function"
|
||||
METHOD = "method" # instance method
|
||||
STATICMETHOD = "staticmethod"
|
||||
CLASSMETHOD = "classmethod"
|
||||
NAME = "name" # Python name (e.g. "x" in "x = 42")
|
||||
CLASSMETHOD = "classmethod"
|
||||
NAME = "name" # Python name (e.g. "x" in "x = 42")
|
||||
|
||||
# Flavors have a partial ordering in specificness of the information.
|
||||
#
|
||||
@@ -50,6 +54,7 @@ class Flavor(Enum):
|
||||
def __repr__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
class Node:
|
||||
"""A node is an object in the call graph.
|
||||
|
||||
@@ -96,7 +101,7 @@ class Node:
|
||||
Names of unknown nodes will include the *. prefix."""
|
||||
|
||||
if self.namespace is None:
|
||||
return '*.' + self.name
|
||||
return "*." + self.name
|
||||
else:
|
||||
return self.name
|
||||
|
||||
@@ -104,7 +109,7 @@ class Node:
|
||||
"""Return the short name, plus module and line number of definition site, if available.
|
||||
Names of unknown nodes will include the *. prefix."""
|
||||
if self.namespace is None:
|
||||
return '*.' + self.name
|
||||
return "*." + self.name
|
||||
else:
|
||||
if self.get_level() >= 1 and self.ast_node is not None:
|
||||
return "%s\\n(%s:%d)" % (self.name, self.filename, self.ast_node.lineno)
|
||||
@@ -115,11 +120,17 @@ class Node:
|
||||
"""Return the short name, plus namespace, and module and line number of definition site, if available.
|
||||
Names of unknown nodes will include the *. prefix."""
|
||||
if self.namespace is None:
|
||||
return '*.' + self.name
|
||||
return "*." + self.name
|
||||
else:
|
||||
if self.get_level() >= 1:
|
||||
if self.ast_node is not None:
|
||||
return "%s\\n\\n(%s:%d,\\n%s in %s)" % (self.name, self.filename, self.ast_node.lineno, repr(self.flavor), self.namespace)
|
||||
return "%s\\n\\n(%s:%d,\\n%s in %s)" % (
|
||||
self.name,
|
||||
self.filename,
|
||||
self.ast_node.lineno,
|
||||
repr(self.flavor),
|
||||
self.namespace,
|
||||
)
|
||||
else:
|
||||
return "%s\\n\\n(%s in %s)" % (self.name, repr(self.flavor), self.namespace)
|
||||
else:
|
||||
@@ -128,12 +139,12 @@ class Node:
|
||||
def get_name(self):
|
||||
"""Return the full name of this node."""
|
||||
|
||||
if self.namespace == '':
|
||||
if self.namespace == "":
|
||||
return self.name
|
||||
elif self.namespace is None:
|
||||
return '*.' + self.name
|
||||
return "*." + self.name
|
||||
else:
|
||||
return self.namespace + '.' + self.name
|
||||
return self.namespace + "." + self.name
|
||||
|
||||
def get_level(self):
|
||||
"""Return the level of this node (in terms of nested namespaces).
|
||||
@@ -145,7 +156,7 @@ class Node:
|
||||
if self.namespace == "":
|
||||
return 0
|
||||
else:
|
||||
return 1 + self.namespace.count('.')
|
||||
return 1 + self.namespace.count(".")
|
||||
|
||||
def get_toplevel_namespace(self):
|
||||
"""Return the name of the top-level namespace of this node, or "" if none."""
|
||||
@@ -154,7 +165,7 @@ class Node:
|
||||
if self.namespace is None: # group all unknowns in one namespace, "*"
|
||||
return "*"
|
||||
|
||||
idx = self.namespace.find('.')
|
||||
idx = self.namespace.find(".")
|
||||
if idx > -1:
|
||||
return self.namespace[0:idx]
|
||||
else:
|
||||
@@ -175,4 +186,4 @@ class Node:
|
||||
return make_safe_label(self.namespace)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Node %s:%s>' % (repr(self.flavor), self.get_name())
|
||||
return "<Node %s:%s>" % (repr(self.flavor), self.get_name())
|
||||
|
||||
171
pyan/sphinx.py
Normal file
171
pyan/sphinx.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""
|
||||
Simple sphinx extension that allows including callgraphs in documentation.
|
||||
|
||||
Example usage:
|
||||
|
||||
```
|
||||
.. callgraph:: <function_name>
|
||||
|
||||
|
||||
Options are
|
||||
|
||||
- **:no-groups:** (boolean flag): do not group
|
||||
- **:no-defines:** (boolean flag): if to not draw edges that show which
|
||||
functions, methods and classes are defined by a class or module
|
||||
- **:no-uses:** (boolean flag): if to not draw edges that show how a function
|
||||
uses other functions
|
||||
- **:no-colors:** (boolean flag): if to not color in callgraph (default is
|
||||
coloring)
|
||||
- **:nested-grops:** (boolean flag): if to group by modules and submodules
|
||||
- **:annotated:** (boolean flag): annotate callgraph with file names
|
||||
- **:direction:** (string): "horizontal" or "vertical" callgraph
|
||||
- **:toctree:** (string): path to toctree (as used with autosummary) to link
|
||||
elements of callgraph to documentation (makes all nodes clickable)
|
||||
- **:zoomable:** (boolean flag): enables users to zoom and pan callgraph
|
||||
```
|
||||
"""
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from docutils.parsers.rst import directives
|
||||
from sphinx.ext.graphviz import align_spec, figure_wrapper, graphviz
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
|
||||
from pyan import create_callgraph
|
||||
|
||||
|
||||
def direction_spec(argument: Any) -> str:
|
||||
return directives.choice(argument, ("vertical", "horizontal"))
|
||||
|
||||
|
||||
class CallgraphDirective(SphinxDirective):
|
||||
|
||||
# this enables content in the directive
|
||||
has_content = True
|
||||
|
||||
option_spec = {
|
||||
# graphviz
|
||||
"alt": directives.unchanged,
|
||||
"align": align_spec,
|
||||
"caption": directives.unchanged,
|
||||
"name": directives.unchanged,
|
||||
"class": directives.class_option,
|
||||
# pyan
|
||||
"no-groups": directives.unchanged,
|
||||
"no-defines": directives.unchanged,
|
||||
"no-uses": directives.unchanged,
|
||||
"no-colors": directives.unchanged,
|
||||
"nested-groups": directives.unchanged,
|
||||
"annotated": directives.unchanged,
|
||||
"direction": direction_spec,
|
||||
"toctree": directives.unchanged,
|
||||
"zoomable": directives.unchanged,
|
||||
}
|
||||
|
||||
def run(self):
|
||||
func_name = self.content[0]
|
||||
base_name = func_name.split(".")[0]
|
||||
if len(func_name.split(".")) == 1:
|
||||
func_name = None
|
||||
base_path = __import__(base_name).__path__[0]
|
||||
|
||||
direction = "vertical"
|
||||
if "direction" in self.options:
|
||||
direction = self.options["direction"]
|
||||
dotcode = create_callgraph(
|
||||
filenames=f"{base_path}/**/*.py",
|
||||
root=base_path,
|
||||
function=func_name,
|
||||
namespace=base_name,
|
||||
format="dot",
|
||||
grouped="no-groups" not in self.options,
|
||||
draw_uses="no-uses" not in self.options,
|
||||
draw_defines="no-defines" not in self.options,
|
||||
nested_groups="nested-groups" in self.options,
|
||||
colored="no-colors" not in self.options,
|
||||
annotated="annotated" in self.options,
|
||||
rankdir={"horizontal": "LR", "vertical": "TB"}[direction],
|
||||
)
|
||||
node = graphviz()
|
||||
|
||||
# insert link targets into groups: first insert link, then reformat link
|
||||
if "toctree" in self.options:
|
||||
path = self.options["toctree"].strip("/")
|
||||
# create raw link
|
||||
dotcode = re.sub(
|
||||
r'([\w\d]+)(\s.+), (style="filled")',
|
||||
r'\1\2, href="../' + path + r'/\1.html", target="_blank", \3',
|
||||
dotcode,
|
||||
)
|
||||
|
||||
def create_link(dot_name):
|
||||
raw_link = re.sub(r"__(\w)", r".\1", dot_name)
|
||||
# determine if name this is a class by checking if its first letter is capital
|
||||
# (heuristic but should work almost always)
|
||||
splits = raw_link.rsplit(".", 2)
|
||||
if len(splits) > 1 and splits[-2][0].capitalize() == splits[-2][0]:
|
||||
# is class
|
||||
link = ".".join(splits[:-1]) + ".html#" + raw_link + '"'
|
||||
else:
|
||||
link = raw_link + '.html"'
|
||||
return link
|
||||
|
||||
dotcode = re.sub(
|
||||
r'(href="../' + path + r'/)(\w+)(\.html")',
|
||||
lambda m: m.groups()[0] + create_link(m.groups()[1]),
|
||||
dotcode,
|
||||
)
|
||||
|
||||
node["code"] = dotcode
|
||||
node["options"] = {"docname": self.env.docname}
|
||||
if "graphviz_dot" in self.options:
|
||||
node["options"]["graphviz_dot"] = self.options["graphviz_dot"]
|
||||
if "layout" in self.options:
|
||||
node["options"]["graphviz_dot"] = self.options["layout"]
|
||||
if "alt" in self.options:
|
||||
node["alt"] = self.options["alt"]
|
||||
if "align" in self.options:
|
||||
node["align"] = self.options["align"]
|
||||
|
||||
if "class" in self.options:
|
||||
classes = self.options["class"]
|
||||
else:
|
||||
classes = []
|
||||
if "zoomable" in self.options:
|
||||
if len(classes) == 0:
|
||||
classes = ["zoomable-callgraph"]
|
||||
else:
|
||||
classes.append("zoomable-callgraph")
|
||||
if len(classes) > 0:
|
||||
node["classes"] = classes
|
||||
|
||||
if "caption" not in self.options:
|
||||
self.add_name(node)
|
||||
return [node]
|
||||
else:
|
||||
figure = figure_wrapper(self, node, self.options["caption"])
|
||||
self.add_name(figure)
|
||||
return [figure]
|
||||
|
||||
|
||||
def setup(app):
|
||||
|
||||
app.add_directive("callgraph", CallgraphDirective)
|
||||
app.add_js_file("https://cdn.jsdelivr.net/npm/svg-pan-zoom@3.6.1/dist/svg-pan-zoom.min.js")
|
||||
|
||||
# script to find zoomable svgs
|
||||
script = """
|
||||
window.addEventListener('load', () => {
|
||||
Array.from(document.getElementsByClassName('zoomable-callgraph')).forEach(function(element) {
|
||||
svgPanZoom(element);
|
||||
});
|
||||
})
|
||||
"""
|
||||
|
||||
app.add_js_file(None, body=script)
|
||||
|
||||
return {
|
||||
"version": "0.1",
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
||||
154
pyan/visgraph.py
154
pyan/visgraph.py
@@ -2,24 +2,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Format-agnostic representation of the output graph."""
|
||||
|
||||
import re
|
||||
import logging
|
||||
import colorsys
|
||||
import logging
|
||||
import re
|
||||
|
||||
|
||||
# Set node color by filename.
|
||||
#
|
||||
# HSL: hue = top-level namespace, lightness = nesting level, saturation constant.
|
||||
#
|
||||
# The "" namespace (for *.py files) gets the first color. Since its
|
||||
# level is 0, its lightness will be 1.0, i.e. pure white regardless
|
||||
# of the hue.
|
||||
#
|
||||
class Colorizer:
|
||||
"""Output graph color manager.
|
||||
|
||||
We set node color by filename.
|
||||
|
||||
HSL: hue = top-level namespace, lightness = nesting level, saturation constant.
|
||||
|
||||
The "" namespace (for *.py files) gets the first color. Since its
|
||||
level is 0, its lightness will be 1.0, i.e. pure white regardless
|
||||
of the hue.
|
||||
"""
|
||||
|
||||
def __init__(self, num_colors, colored=True, logger=None):
|
||||
self.logger = logger or logging.getLogger(__name__)
|
||||
self.colored = colored
|
||||
|
||||
self._hues = [j/num_colors for j in range(num_colors)]
|
||||
self._hues = [j / num_colors for j in range(num_colors)]
|
||||
self._idx_of = {} # top-level namespace: hue index
|
||||
self._idx = 0
|
||||
|
||||
@@ -40,42 +44,41 @@ class Colorizer:
|
||||
|
||||
def get(self, node): # return (group number, hue index)
|
||||
idx = self._node_to_idx(node)
|
||||
return (idx,self._hues[idx])
|
||||
return (idx, self._hues[idx])
|
||||
|
||||
def make_colors(self, node): # return (group number, fill color, text color)
|
||||
if self.colored:
|
||||
idx,H = self.get(node)
|
||||
L = max( [1.0 - 0.1*node.get_level(), 0.1] )
|
||||
idx, H = self.get(node)
|
||||
L = max([1.0 - 0.1 * node.get_level(), 0.1])
|
||||
S = 1.0
|
||||
A = 0.7 # make nodes translucent (to handle possible overlaps)
|
||||
fill_RGBA = self.htmlize_rgb(*colorsys.hls_to_rgb(H,L,S), A=A)
|
||||
fill_RGBA = self.htmlize_rgb(*colorsys.hls_to_rgb(H, L, S), A=A)
|
||||
|
||||
# black text on light nodes, white text on (very) dark nodes.
|
||||
text_RGB = "#000000" if L >= 0.5 else "#ffffff"
|
||||
else:
|
||||
idx,_ = self.get(node)
|
||||
idx, _ = self.get(node)
|
||||
fill_RGBA = self.htmlize_rgb(1.0, 1.0, 1.0, 0.7)
|
||||
text_RGB = "#000000"
|
||||
return idx, fill_RGBA, text_RGB
|
||||
|
||||
@staticmethod
|
||||
def htmlize_rgb(R,G,B,A=None):
|
||||
def htmlize_rgb(R, G, B, A=None):
|
||||
if A is not None:
|
||||
R,G,B,A = [int(255.0*x) for x in (R,G,B,A)]
|
||||
return "#%02x%02x%02x%02x" % (R,G,B,A)
|
||||
R, G, B, A = [int(255.0 * x) for x in (R, G, B, A)]
|
||||
return "#%02x%02x%02x%02x" % (R, G, B, A)
|
||||
else:
|
||||
R,G,B = [int(255.0*x) for x in (R,G,B)]
|
||||
return "#%02x%02x%02x" % (R,G,B)
|
||||
R, G, B = [int(255.0 * x) for x in (R, G, B)]
|
||||
return "#%02x%02x%02x" % (R, G, B)
|
||||
|
||||
|
||||
class VisualNode(object):
|
||||
"""
|
||||
A node in the output graph: colors, internal ID, human-readable label, ...
|
||||
"""
|
||||
def __init__(
|
||||
self, id, label='', flavor='',
|
||||
fill_color='', text_color='', group=''):
|
||||
self.id = id # graphing software friendly label (no special chars)
|
||||
|
||||
def __init__(self, id, label="", flavor="", fill_color="", text_color="", group=""):
|
||||
self.id = id # graphing software friendly label (no special chars)
|
||||
self.label = label # human-friendly label
|
||||
self.flavor = flavor
|
||||
self.fill_color = fill_color
|
||||
@@ -83,15 +86,11 @@ class VisualNode(object):
|
||||
self.group = group
|
||||
|
||||
def __repr__(self):
|
||||
optionals = [
|
||||
repr(s) for s in [
|
||||
self.label, self.flavor,
|
||||
self.fill_color, self.text_color, self.group] if s]
|
||||
optionals = [repr(s) for s in [self.label, self.flavor, self.fill_color, self.text_color, self.group] if s]
|
||||
if optionals:
|
||||
return ('VisualNode(' + repr(self.id) +
|
||||
', ' + ', '.join(optionals)+')')
|
||||
return "VisualNode(" + repr(self.id) + ", " + ", ".join(optionals) + ")"
|
||||
else:
|
||||
return 'VisualNode(' + repr(self.id) + ')'
|
||||
return "VisualNode(" + repr(self.id) + ")"
|
||||
|
||||
|
||||
class VisualEdge(object):
|
||||
@@ -100,22 +99,19 @@ class VisualEdge(object):
|
||||
|
||||
flavor is meant to be 'uses' or 'defines'
|
||||
"""
|
||||
|
||||
def __init__(self, source, target, flavor, color):
|
||||
self.source = source
|
||||
self.target = target
|
||||
self.flavor = flavor
|
||||
self.color = color
|
||||
self.color = color
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
'Edge('+self.source.label+' '+self.flavor+' ' +
|
||||
self.target.label+')')
|
||||
return "Edge(" + self.source.label + " " + self.flavor + " " + self.target.label + ")"
|
||||
|
||||
|
||||
class VisualGraph(object):
|
||||
def __init__(
|
||||
self, id, label, nodes=None, edges=None, subgraphs=None,
|
||||
grouped=False):
|
||||
def __init__(self, id, label, nodes=None, edges=None, subgraphs=None, grouped=False):
|
||||
self.id = id
|
||||
self.label = label
|
||||
self.nodes = nodes or []
|
||||
@@ -125,13 +121,13 @@ class VisualGraph(object):
|
||||
|
||||
@classmethod
|
||||
def from_visitor(cls, visitor, options=None, logger=None):
|
||||
colored = options.get('colored', False)
|
||||
nested = options.get('nested_groups', False)
|
||||
grouped_alt = options.get('grouped_alt', False)
|
||||
grouped = nested or options.get('grouped', False) # nested -> grouped
|
||||
annotated = options.get('annotated', False)
|
||||
draw_defines = options.get('draw_defines', False)
|
||||
draw_uses = options.get('draw_uses', False)
|
||||
colored = options.get("colored", False)
|
||||
nested = options.get("nested_groups", False)
|
||||
grouped_alt = options.get("grouped_alt", False)
|
||||
grouped = nested or options.get("grouped", False) # nested -> grouped
|
||||
annotated = options.get("annotated", False)
|
||||
draw_defines = options.get("draw_defines", False)
|
||||
draw_uses = options.get("draw_uses", False)
|
||||
|
||||
# Terminology:
|
||||
# - what Node calls "label" is a computer-friendly unique identifier
|
||||
@@ -143,12 +139,18 @@ class VisualGraph(object):
|
||||
if annotated:
|
||||
if grouped:
|
||||
# group label includes namespace already
|
||||
labeler = lambda n: n.get_annotated_name()
|
||||
def labeler(n):
|
||||
return n.get_annotated_name()
|
||||
|
||||
else:
|
||||
# the node label is the only place to put the namespace info
|
||||
labeler = lambda n: n.get_long_annotated_name()
|
||||
def labeler(n):
|
||||
return n.get_long_annotated_name()
|
||||
|
||||
else:
|
||||
labeler = lambda n: n.get_short_name()
|
||||
|
||||
def labeler(n):
|
||||
return n.get_short_name()
|
||||
|
||||
logger = logger or logging.getLogger(__name__)
|
||||
|
||||
@@ -165,33 +167,36 @@ class VisualGraph(object):
|
||||
for node in visited_nodes:
|
||||
filenames.add(node.filename)
|
||||
return filenames
|
||||
colorizer = Colorizer(num_colors=len(find_filenames())+1,
|
||||
colored=colored, logger=logger)
|
||||
|
||||
colorizer = Colorizer(num_colors=len(find_filenames()) + 1, colored=colored, logger=logger)
|
||||
|
||||
nodes_dict = dict()
|
||||
root_graph = cls('G', label='', grouped=grouped)
|
||||
root_graph = cls("G", label="", grouped=grouped)
|
||||
subgraph = root_graph
|
||||
namespace_stack = []
|
||||
prev_namespace = '' # The namespace '' is first in visited_nodes.
|
||||
prev_namespace = "" # The namespace '' is first in visited_nodes.
|
||||
for node in visited_nodes:
|
||||
logger.info('Looking at %s' % node.name)
|
||||
logger.info("Looking at %s" % node.name)
|
||||
|
||||
# Create the node itself and add it to nodes_dict
|
||||
idx, fill_RGBA, text_RGB = colorizer.make_colors(node)
|
||||
visual_node = VisualNode(
|
||||
id=node.get_label(),
|
||||
label=labeler(node),
|
||||
flavor=repr(node.flavor),
|
||||
fill_color=fill_RGBA,
|
||||
text_color=text_RGB,
|
||||
group=idx)
|
||||
id=node.get_label(),
|
||||
label=labeler(node),
|
||||
flavor=repr(node.flavor),
|
||||
fill_color=fill_RGBA,
|
||||
text_color=text_RGB,
|
||||
group=idx,
|
||||
)
|
||||
nodes_dict[node] = visual_node
|
||||
|
||||
# next namespace?
|
||||
if grouped and node.namespace != prev_namespace:
|
||||
logger.info(
|
||||
'New namespace %s, old was %s'
|
||||
% (node.namespace, prev_namespace))
|
||||
if not prev_namespace:
|
||||
logger.info("New namespace %s" % (node.namespace))
|
||||
else:
|
||||
logger.info("New namespace %s, old was %s" % (node.namespace, prev_namespace))
|
||||
prev_namespace = node.namespace
|
||||
|
||||
label = node.get_namespace_label()
|
||||
subgraph = cls(label, node.namespace)
|
||||
@@ -204,14 +209,11 @@ class VisualGraph(object):
|
||||
m = re.match(namespace_stack[-1].label, node.namespace)
|
||||
# The '.' check catches siblings in cases like
|
||||
# MeshGenerator vs. Mesh.
|
||||
while (m is None or
|
||||
m.end() == len(node.namespace) or
|
||||
node.namespace[m.end()] != '.'):
|
||||
while m is None or m.end() == len(node.namespace) or node.namespace[m.end()] != ".":
|
||||
namespace_stack.pop()
|
||||
if not len(namespace_stack):
|
||||
break
|
||||
m = re.match(
|
||||
namespace_stack[-1].label, node.namespace)
|
||||
m = re.match(namespace_stack[-1].label, node.namespace)
|
||||
parentgraph = namespace_stack[-1] if len(namespace_stack) else root_graph
|
||||
parentgraph.subgraphs.append(subgraph)
|
||||
|
||||
@@ -231,17 +233,12 @@ class VisualGraph(object):
|
||||
# place closer together those nodes that are linked by a
|
||||
# defines relationship.
|
||||
#
|
||||
color = "#838b8b" if draw_defines else '#ffffff00'
|
||||
color = "#838b8b" if draw_defines else "#ffffff00"
|
||||
for n in visitor.defines_edges:
|
||||
if n.defined:
|
||||
for n2 in visitor.defines_edges[n]:
|
||||
if n2.defined:
|
||||
root_graph.edges.append(
|
||||
VisualEdge(
|
||||
nodes_dict[n],
|
||||
nodes_dict[n2],
|
||||
'defines',
|
||||
color))
|
||||
root_graph.edges.append(VisualEdge(nodes_dict[n], nodes_dict[n2], "defines", color))
|
||||
|
||||
if draw_uses:
|
||||
color = "#000000"
|
||||
@@ -249,11 +246,6 @@ class VisualGraph(object):
|
||||
if n.defined:
|
||||
for n2 in visitor.uses_edges[n]:
|
||||
if n2.defined:
|
||||
root_graph.edges.append(
|
||||
VisualEdge(
|
||||
nodes_dict[n],
|
||||
nodes_dict[n2],
|
||||
'uses',
|
||||
color))
|
||||
root_graph.edges.append(VisualEdge(nodes_dict[n], nodes_dict[n2], "uses", color))
|
||||
|
||||
return root_graph
|
||||
|
||||
245
pyan/writers.py
245
pyan/writers.py
@@ -1,9 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Graph markup writers."""
|
||||
|
||||
import sys
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from jinja2 import Template
|
||||
|
||||
|
||||
class Writer(object):
|
||||
@@ -12,7 +18,7 @@ class Writer(object):
|
||||
self.output = output
|
||||
self.logger = logger or logging.getLogger(__name__)
|
||||
self.indent_level = 0
|
||||
self.tabstop = tabstop*' '
|
||||
self.tabstop = tabstop * " "
|
||||
|
||||
def log(self, msg):
|
||||
self.logger.info(msg)
|
||||
@@ -24,19 +30,22 @@ class Writer(object):
|
||||
self.indent_level -= level
|
||||
|
||||
def write(self, line):
|
||||
self.outstream.write(self.tabstop*self.indent_level+line+'\n')
|
||||
self.outstream.write(self.tabstop * self.indent_level + line + "\n")
|
||||
|
||||
def run(self):
|
||||
self.log('%s running' % type(self))
|
||||
self.log("%s running" % type(self))
|
||||
try:
|
||||
self.outstream = open(self.output, 'w')
|
||||
if isinstance(self.output, io.StringIO): # write to stream
|
||||
self.outstream = self.output
|
||||
else:
|
||||
self.outstream = open(self.output, "w") # write to file
|
||||
except TypeError:
|
||||
self.outstream = sys.stdout
|
||||
self.start_graph()
|
||||
self.write_subgraph(self.graph)
|
||||
self.write_edges()
|
||||
self.finish_graph()
|
||||
if self.output:
|
||||
if self.output and not isinstance(self.output, io.StringIO):
|
||||
self.outstream.close()
|
||||
|
||||
def write_subgraph(self, graph):
|
||||
@@ -80,102 +89,126 @@ class Writer(object):
|
||||
|
||||
class TgfWriter(Writer):
|
||||
def __init__(self, graph, output=None, logger=None):
|
||||
Writer.__init__(
|
||||
self, graph,
|
||||
output=output,
|
||||
logger=logger)
|
||||
Writer.__init__(self, graph, output=output, logger=logger)
|
||||
self.i = 1
|
||||
self.id_map = {}
|
||||
|
||||
def write_node(self, node):
|
||||
self.write('%d %s' % (self.i, node.label))
|
||||
self.write("%d %s" % (self.i, node.label))
|
||||
self.id_map[node] = self.i
|
||||
self.i += 1
|
||||
|
||||
def start_edges(self):
|
||||
self.write('#')
|
||||
self.write("#")
|
||||
|
||||
def write_edge(self, edge):
|
||||
flavor = 'U' if edge.flavor == 'uses' else 'D'
|
||||
self.write(
|
||||
'%s %s %s' %
|
||||
(self.id_map[edge.source], self.id_map[edge.target], flavor))
|
||||
flavor = "U" if edge.flavor == "uses" else "D"
|
||||
self.write("%s %s %s" % (self.id_map[edge.source], self.id_map[edge.target], flavor))
|
||||
|
||||
|
||||
class DotWriter(Writer):
|
||||
def __init__(self, graph,
|
||||
options=None, output=None, logger=None, tabstop=4):
|
||||
Writer.__init__(
|
||||
self, graph,
|
||||
output=output,
|
||||
logger=logger,
|
||||
tabstop=tabstop)
|
||||
def __init__(self, graph, options=None, output=None, logger=None, tabstop=4):
|
||||
Writer.__init__(self, graph, output=output, logger=logger, tabstop=tabstop)
|
||||
options = options or []
|
||||
if graph.grouped:
|
||||
options += ['clusterrank="local"']
|
||||
self.options = ', '.join(options)
|
||||
self.options = ", ".join(options)
|
||||
self.grouped = graph.grouped
|
||||
|
||||
def start_graph(self):
|
||||
self.write('digraph G {')
|
||||
self.write(' graph [' + self.options + '];')
|
||||
self.write("digraph G {")
|
||||
self.write(" graph [" + self.options + "];")
|
||||
self.indent()
|
||||
|
||||
def start_subgraph(self, graph):
|
||||
self.log('Start subgraph %s' % graph.label)
|
||||
self.log("Start subgraph %s" % graph.label)
|
||||
# Name must begin with "cluster" to be recognized as a cluster by GraphViz.
|
||||
self.write(
|
||||
"subgraph cluster_%s {\n" % graph.id)
|
||||
self.write("subgraph cluster_%s {\n" % graph.id)
|
||||
self.indent()
|
||||
|
||||
# translucent gray (no hue to avoid visual confusion with any
|
||||
# group of colored nodes)
|
||||
self.write(
|
||||
'graph [style="filled,rounded",'
|
||||
'fillcolor="#80808018", label="%s"];'
|
||||
% graph.label)
|
||||
self.write('graph [style="filled,rounded", fillcolor="#80808018", label="%s"];' % graph.label)
|
||||
|
||||
def finish_subgraph(self, graph):
|
||||
self.log('Finish subgraph %s' % graph.label)
|
||||
self.log("Finish subgraph %s" % graph.label)
|
||||
# terminate previous subgraph
|
||||
self.dedent()
|
||||
self.write('}')
|
||||
self.write("}")
|
||||
|
||||
def write_node(self, node):
|
||||
self.log('Write node %s' % node.label)
|
||||
self.log("Write node %s" % node.label)
|
||||
self.write(
|
||||
'%s [label="%s", style="filled", fillcolor="%s",'
|
||||
' fontcolor="%s", group="%s"];'
|
||||
% (
|
||||
node.id, node.label,
|
||||
node.fill_color, node.text_color, node.group))
|
||||
' fontcolor="%s", group="%s"];' % (node.id, node.label, node.fill_color, node.text_color, node.group)
|
||||
)
|
||||
|
||||
def write_edge(self, edge):
|
||||
source = edge.source
|
||||
target = edge.target
|
||||
color = edge.color
|
||||
if edge.flavor == 'defines':
|
||||
self.write(
|
||||
' %s -> %s [style="dashed",'
|
||||
' color="%s"];'
|
||||
% (source.id, target.id, color))
|
||||
else: # edge.flavor == 'uses':
|
||||
self.write(
|
||||
' %s -> %s [style="solid",'
|
||||
' color="%s"];'
|
||||
% (source.id, target.id, color))
|
||||
color = edge.color
|
||||
if edge.flavor == "defines":
|
||||
self.write(' %s -> %s [style="dashed", color="%s"];' % (source.id, target.id, color))
|
||||
else: # edge.flavor == 'uses':
|
||||
self.write(' %s -> %s [style="solid", color="%s"];' % (source.id, target.id, color))
|
||||
|
||||
def finish_graph(self):
|
||||
self.write('}') # terminate "digraph G {"
|
||||
self.write("}") # terminate "digraph G {"
|
||||
|
||||
|
||||
class SVGWriter(DotWriter):
|
||||
def run(self):
|
||||
# write dot file
|
||||
self.log("%s running" % type(self))
|
||||
self.outstream = io.StringIO()
|
||||
self.start_graph()
|
||||
self.write_subgraph(self.graph)
|
||||
self.write_edges()
|
||||
self.finish_graph()
|
||||
|
||||
# convert to svg
|
||||
svg = subprocess.run(
|
||||
"dot -Tsvg", shell=True, stdout=subprocess.PIPE, input=self.outstream.getvalue().encode()
|
||||
).stdout.decode()
|
||||
|
||||
if self.output:
|
||||
if isinstance(self.output, io.StringIO):
|
||||
self.output.write(svg)
|
||||
else:
|
||||
with open(self.output, "w") as f:
|
||||
f.write(svg)
|
||||
else:
|
||||
print(svg)
|
||||
|
||||
|
||||
class HTMLWriter(SVGWriter):
|
||||
def run(self):
|
||||
with io.StringIO() as svg_stream:
|
||||
# run SVGWriter with stream as output
|
||||
output = self.output
|
||||
self.output = svg_stream
|
||||
super().run()
|
||||
svg = svg_stream.getvalue()
|
||||
self.output = output
|
||||
|
||||
# insert svg into html
|
||||
with open(os.path.join(os.path.dirname(__file__), "callgraph.html"), "r") as f:
|
||||
template = Template(f.read())
|
||||
|
||||
html = template.render(svg=svg)
|
||||
if self.output:
|
||||
if isinstance(self.output, io.StringIO):
|
||||
self.output.write(html)
|
||||
else:
|
||||
with open(self.output, "w") as f:
|
||||
f.write(html)
|
||||
else:
|
||||
print(html)
|
||||
|
||||
|
||||
class YedWriter(Writer):
|
||||
def __init__(self, graph, output=None, logger=None, tabstop=2):
|
||||
Writer.__init__(
|
||||
self, graph,
|
||||
output=output,
|
||||
logger=logger,
|
||||
tabstop=tabstop)
|
||||
Writer.__init__(self, graph, output=output, logger=logger, tabstop=tabstop)
|
||||
self.grouped = graph.grouped
|
||||
self.indent_level = 0
|
||||
self.edge_id = 0
|
||||
@@ -183,18 +216,19 @@ class YedWriter(Writer):
|
||||
def start_graph(self):
|
||||
self.write('<?xml version="1.0" encoding="UTF-8" standalone="no"?>')
|
||||
self.write(
|
||||
'<graphml xmlns="http://graphml.graphdrawing.org/xmlns"'
|
||||
' xmlns:java='
|
||||
'"http://www.yworks.com/xml/yfiles-common/1.0/java"'
|
||||
' xmlns:sys='
|
||||
'"http://www.yworks.com/xml/yfiles-common/markup/primitives'
|
||||
'/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/'
|
||||
'markup/2.0" xmlns:xsi="http://www.w3.org/2001/'
|
||||
'XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/'
|
||||
'graphml" xmlns:yed="http://www.yworks.com/xml/yed/3"'
|
||||
' xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns'
|
||||
' http://www.yworks.com/xml/schema/graphml/1.1/'
|
||||
'ygraphml.xsd">')
|
||||
'<graphml xmlns="http://graphml.graphdrawing.org/xmlns"'
|
||||
" xmlns:java="
|
||||
'"http://www.yworks.com/xml/yfiles-common/1.0/java"'
|
||||
" xmlns:sys="
|
||||
'"http://www.yworks.com/xml/yfiles-common/markup/primitives'
|
||||
'/2.0" xmlns:x="http://www.yworks.com/xml/yfiles-common/'
|
||||
'markup/2.0" xmlns:xsi="http://www.w3.org/2001/'
|
||||
'XMLSchema-instance" xmlns:y="http://www.yworks.com/xml/'
|
||||
'graphml" xmlns:yed="http://www.yworks.com/xml/yed/3"'
|
||||
' xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns'
|
||||
" http://www.yworks.com/xml/schema/graphml/1.1/"
|
||||
'ygraphml.xsd">'
|
||||
)
|
||||
self.indent()
|
||||
self.write('<key for="node" id="d0" yfiles.type="nodegraphics"/>')
|
||||
self.write('<key for="edge" id="d1" yfiles.type="edgegraphics"/>')
|
||||
@@ -202,96 +236,87 @@ class YedWriter(Writer):
|
||||
self.indent()
|
||||
|
||||
def start_subgraph(self, graph):
|
||||
self.log('Start subgraph %s' % graph.label)
|
||||
self.log("Start subgraph %s" % graph.label)
|
||||
|
||||
self.write('<node id="%s:" yfiles.foldertype="group">' % graph.id)
|
||||
self.indent()
|
||||
self.write('<data key="d0">')
|
||||
self.indent()
|
||||
self.write('<y:ProxyAutoBoundsNode>')
|
||||
self.write("<y:ProxyAutoBoundsNode>")
|
||||
self.indent()
|
||||
self.write('<y:Realizers active="0">')
|
||||
self.indent()
|
||||
self.write('<y:GroupNode>')
|
||||
self.write("<y:GroupNode>")
|
||||
self.indent()
|
||||
self.write('<y:Fill color="#CCCCCC" transparent="false"/>')
|
||||
self.write('<y:NodeLabel modelName="internal" modelPosition="t" '
|
||||
'alignment="right">%s</y:NodeLabel>'
|
||||
% graph.label)
|
||||
self.write(
|
||||
'<y:NodeLabel modelName="internal" modelPosition="t" alignment="right">%s</y:NodeLabel>' % graph.label
|
||||
)
|
||||
self.write('<y:Shape type="roundrectangle"/>')
|
||||
self.dedent()
|
||||
self.write('</y:GroupNode>')
|
||||
self.write("</y:GroupNode>")
|
||||
self.dedent()
|
||||
self.write('</y:Realizers>')
|
||||
self.write("</y:Realizers>")
|
||||
self.dedent()
|
||||
self.write('</y:ProxyAutoBoundsNode>')
|
||||
self.write("</y:ProxyAutoBoundsNode>")
|
||||
self.dedent()
|
||||
self.write('</data>')
|
||||
self.write("</data>")
|
||||
self.write('<graph edgedefault="directed" id="%s::">' % graph.id)
|
||||
self.indent()
|
||||
|
||||
def finish_subgraph(self, graph):
|
||||
self.log('Finish subgraph %s' % graph.label)
|
||||
self.log("Finish subgraph %s" % graph.label)
|
||||
self.dedent()
|
||||
self.write('</graph>')
|
||||
self.write("</graph>")
|
||||
self.dedent()
|
||||
self.write('</node>')
|
||||
self.write("</node>")
|
||||
|
||||
def write_node(self, node):
|
||||
self.log('Write node %s' % node.label)
|
||||
width = 20 + 10*len(node.label)
|
||||
self.log("Write node %s" % node.label)
|
||||
width = 20 + 10 * len(node.label)
|
||||
self.write('<node id="%s">' % node.id)
|
||||
self.indent()
|
||||
self.write('<data key="d0">')
|
||||
self.indent()
|
||||
self.write('<y:ShapeNode>')
|
||||
self.write("<y:ShapeNode>")
|
||||
self.indent()
|
||||
self.write('<y:Geometry height="%s" width="%s"/>' % ("30", width))
|
||||
self.write('<y:Fill color="%s" transparent="false"/>'
|
||||
% node.fill_color)
|
||||
self.write('<y:BorderStyle color="#000000" type="line" '
|
||||
'width="1.0"/>')
|
||||
self.write('<y:NodeLabel>%s</y:NodeLabel>'
|
||||
% node.label)
|
||||
self.write('<y:Fill color="%s" transparent="false"/>' % node.fill_color)
|
||||
self.write('<y:BorderStyle color="#000000" type="line" width="1.0"/>')
|
||||
self.write("<y:NodeLabel>%s</y:NodeLabel>" % node.label)
|
||||
self.write('<y:Shape type="ellipse"/>')
|
||||
self.dedent()
|
||||
self.write('</y:ShapeNode>')
|
||||
self.write("</y:ShapeNode>")
|
||||
self.dedent()
|
||||
self.write('</data>')
|
||||
self.write("</data>")
|
||||
self.dedent()
|
||||
self.write('</node>')
|
||||
self.write("</node>")
|
||||
|
||||
def write_edge(self, edge):
|
||||
self.edge_id += 1
|
||||
source = edge.source
|
||||
target = edge.target
|
||||
self.write(
|
||||
'<edge id="%s" source="%s" target="%s">'
|
||||
% (self.edge_id, source.id, target.id))
|
||||
self.write('<edge id="%s" source="%s" target="%s">' % (self.edge_id, source.id, target.id))
|
||||
self.indent()
|
||||
self.write('<data key="d1">')
|
||||
self.indent()
|
||||
self.write('<y:PolyLineEdge>')
|
||||
self.write("<y:PolyLineEdge>")
|
||||
self.indent()
|
||||
if edge.flavor == 'defines':
|
||||
self.write('<y:LineStyle color="%s" '
|
||||
'type="dashed" width="1.0"/>'
|
||||
% edge.color)
|
||||
if edge.flavor == "defines":
|
||||
self.write('<y:LineStyle color="%s" type="dashed" width="1.0"/>' % edge.color)
|
||||
else:
|
||||
self.write('<y:LineStyle color="%s" '
|
||||
'type="line" width="1.0"/>'
|
||||
% edge.color)
|
||||
self.write('<y:LineStyle color="%s" type="line" width="1.0"/>' % edge.color)
|
||||
self.write('<y:Arrows source="none" target="standard"/>')
|
||||
self.write('<y:BendStyle smoothed="true"/>')
|
||||
self.dedent()
|
||||
self.write('</y:PolyLineEdge>')
|
||||
self.write("</y:PolyLineEdge>")
|
||||
self.dedent()
|
||||
self.write('</data>')
|
||||
self.write("</data>")
|
||||
self.dedent()
|
||||
self.write('</edge>')
|
||||
self.write("</edge>")
|
||||
|
||||
def finish_graph(self):
|
||||
self.dedent(2)
|
||||
self.write(' </graph>')
|
||||
self.write(" </graph>")
|
||||
self.dedent()
|
||||
self.write('</graphml>')
|
||||
self.write("</graphml>")
|
||||
|
||||
26
pyproject.toml
Normal file
26
pyproject.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
honor_noqa = true
|
||||
line_length = 120
|
||||
combine_as_imports = true
|
||||
force_sort_within_sections = true
|
||||
known_first_party = "pyan"
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
/(
|
||||
\.git
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| egg-info
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
| env
|
||||
)/
|
||||
'''
|
||||
15
pytest.ini
Normal file
15
pytest.ini
Normal file
@@ -0,0 +1,15 @@
|
||||
[pytest]
|
||||
addopts =
|
||||
-rsxX
|
||||
-vv
|
||||
|
||||
--cov-config=.coveragerc
|
||||
--cov=pyan
|
||||
--cov-report=html
|
||||
--cov-report=term-missing:skip-covered
|
||||
--no-cov-on-fail
|
||||
testpaths = tests/
|
||||
log_cli_level = ERROR
|
||||
log_format = %(asctime)s %(levelname)s %(message)s
|
||||
log_date_format = %Y-%m-%d %H:%M:%S
|
||||
cache_dir = .cache
|
||||
3
requirements.txt
Normal file
3
requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
coverage>=5.3
|
||||
pytest>=6.1.2
|
||||
pytest-cov>=2.10.1
|
||||
24
setup.cfg
Normal file
24
setup.cfg
Normal file
@@ -0,0 +1,24 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
show-source = true
|
||||
ignore =
|
||||
E203, # space before : (needed for how black formats slicing)
|
||||
W503, # line break before binary operator
|
||||
W504, # line break after binary operator
|
||||
E402, # module level import not at top of file
|
||||
E731, # do not assign a lambda expression, use a def
|
||||
E741, # ignore not easy to read variables like i l I etc.
|
||||
C406, # Unnecessary list literal - rewrite as a dict literal.
|
||||
C408, # Unnecessary dict call - rewrite as a literal.
|
||||
C409, # Unnecessary list passed to tuple() - rewrite as a tuple literal.
|
||||
S001, # found modulo formatter (incorrect picks up mod operations)
|
||||
F401 # unused imports
|
||||
W605 # invalid escape sequence (e.g. for LaTeX)
|
||||
exclude = docs/build/*.py,
|
||||
node_modules/*.py,
|
||||
.eggs/*.py,
|
||||
versioneer.py,
|
||||
venv/*,
|
||||
.venv/*,
|
||||
.git/*
|
||||
.history/*
|
||||
133
setup.py
Normal file
133
setup.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""setuptools-based setup.py for pyan3.
|
||||
|
||||
Tested on Python 3.6.
|
||||
|
||||
Usage as usual with setuptools:
|
||||
python3 setup.py build
|
||||
python3 setup.py sdist
|
||||
python3 setup.py bdist_wheel --universal
|
||||
python3 setup.py install
|
||||
|
||||
For details, see
|
||||
http://setuptools.readthedocs.io/en/latest/setuptools.html#command-reference
|
||||
or
|
||||
python3 setup.py --help
|
||||
python3 setup.py --help-commands
|
||||
python3 setup.py --help bdist_wheel # or any command
|
||||
"""
|
||||
|
||||
import ast
|
||||
import os
|
||||
|
||||
from setuptools import setup
|
||||
|
||||
#########################################################
|
||||
# General config
|
||||
#########################################################
|
||||
|
||||
# Short description for package list on PyPI
|
||||
#
|
||||
SHORTDESC = "Offline call graph generator for Python 3"
|
||||
|
||||
# Long description for package homepage on PyPI
|
||||
#
|
||||
DESC = (
|
||||
"Generate approximate call graphs for Python programs.\n"
|
||||
"\n"
|
||||
"Pyan takes one or more Python source files, performs a "
|
||||
"(rather superficial) static analysis, and constructs a directed graph of "
|
||||
"the objects in the combined source, and how they define or "
|
||||
"use each other. The graph can be output for rendering by GraphViz or yEd."
|
||||
)
|
||||
|
||||
#########################################################
|
||||
# Init
|
||||
#########################################################
|
||||
|
||||
# Extract __version__ from the package __init__.py
|
||||
# (since it's not a good idea to actually run __init__.py during the
|
||||
# build process).
|
||||
#
|
||||
# https://stackoverflow.com/q/2058802/1959808
|
||||
#
|
||||
init_py_path = os.path.join("pyan", "__init__.py")
|
||||
version = None
|
||||
try:
|
||||
with open(init_py_path) as f:
|
||||
for line in f:
|
||||
if line.startswith("__version__"):
|
||||
module = ast.parse(line)
|
||||
expr = module.body[0]
|
||||
v = expr.value
|
||||
if type(v) is ast.Constant:
|
||||
version = v.value
|
||||
elif type(v) is ast.Str: # TODO: Python 3.8: remove ast.Str
|
||||
version = v.s
|
||||
break
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if not version:
|
||||
raise RuntimeError(f"Version information not found in {init_py_path}")
|
||||
|
||||
#########################################################
|
||||
# Call setup()
|
||||
#########################################################
|
||||
|
||||
setup(
|
||||
name="pyan3",
|
||||
version=version,
|
||||
author="Juha Jeronen",
|
||||
author_email="juha.m.jeronen@gmail.com",
|
||||
url="https://github.com/Technologicat/pyan",
|
||||
description=SHORTDESC,
|
||||
long_description=DESC,
|
||||
license="GPL 2.0",
|
||||
# free-form text field;
|
||||
# https://stackoverflow.com/q/34994130/1959808
|
||||
platforms=["Linux"],
|
||||
# See
|
||||
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
#
|
||||
# for the standard classifiers.
|
||||
#
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Topic :: Software Development",
|
||||
],
|
||||
# See
|
||||
# http://setuptools.readthedocs.io/en/latest/setuptools.html
|
||||
#
|
||||
setup_requires=["wheel"],
|
||||
install_requires=["jinja2"],
|
||||
provides=["pyan"],
|
||||
# keywords for PyPI (in case you upload your project)
|
||||
#
|
||||
# e.g. the keywords your project uses as topics on GitHub,
|
||||
# minus "python" (if there)
|
||||
#
|
||||
keywords=["call-graph", "static-code-analysis"],
|
||||
# Declare packages so that python -m setup build will copy .py files
|
||||
# (especially __init__.py).
|
||||
#
|
||||
# This **does not** automatically recurse into subpackages,
|
||||
# so they must also be declared.
|
||||
#
|
||||
packages=["pyan"],
|
||||
zip_safe=True,
|
||||
package_data={"pyan": ["callgraph.html"]},
|
||||
include_package_data=True,
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"pyan3 = pyan.main:main",
|
||||
]
|
||||
},
|
||||
)
|
||||
14
tests/old_tests/issue2/pyan_err.py
Normal file
14
tests/old_tests/issue2/pyan_err.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# -*- coding: utf-8; -*-
|
||||
# See issue #2
|
||||
|
||||
"""
|
||||
This works fine
|
||||
a = 3
|
||||
b = 4
|
||||
print(a + b)
|
||||
"""
|
||||
|
||||
# But this did not (#2)
|
||||
a: int = 3
|
||||
b = 4
|
||||
print(a + b)
|
||||
2
tests/old_tests/issue2/run.sh
Normal file
2
tests/old_tests/issue2/run.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
pyan pyan_err.py -V >out.dot
|
||||
21
tests/old_tests/issue3/testi.py
Normal file
21
tests/old_tests/issue3/testi.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8; -*-
|
||||
# See issue #3
|
||||
|
||||
|
||||
def f():
|
||||
return [x for x in range(10)]
|
||||
|
||||
|
||||
def g():
|
||||
return [(x, y) for x in range(10) for y in range(10)]
|
||||
|
||||
|
||||
def h(results):
|
||||
return [
|
||||
(
|
||||
[(name, allargs) for name, _, _, allargs, _ in recs],
|
||||
{name: inargs for name, inargs, _, _, _ in recs},
|
||||
{name: meta for name, _, _, _, meta in recs},
|
||||
)
|
||||
for recs in (results[key] for key in sorted(results.keys()))
|
||||
]
|
||||
28
tests/old_tests/issue5/meas_xrd.py
Normal file
28
tests/old_tests/issue5/meas_xrd.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import os.path
|
||||
|
||||
import numpy as np
|
||||
import pandas.io.parsers
|
||||
|
||||
|
||||
class MeasXRD:
|
||||
def __init__(self, path: str):
|
||||
if not os.path.isfile(path):
|
||||
raise FileNotFoundError("Invalid XRD file path:", path)
|
||||
|
||||
row_ind = 2
|
||||
self.params = {}
|
||||
with open(path, "r") as file:
|
||||
line = file.readline()
|
||||
if line != "[Measurement conditions]\n":
|
||||
raise ValueError("XRD measurement file does not contain a valid header")
|
||||
|
||||
line = file.readline()
|
||||
while line not in ["[Scan points]\n", ""]:
|
||||
row_ind += 1
|
||||
columns = line.rstrip("\n").split(",", 1)
|
||||
self.params[columns[0]] = columns[1]
|
||||
line = file.readline()
|
||||
|
||||
self.data = pandas.io.parsers.read_csv(
|
||||
path, skiprows=row_ind, dtype={"Angle": np.float_, "Intensity": np.int_}, engine="c"
|
||||
)
|
||||
14
tests/old_tests/issue5/plot_xrd.py
Normal file
14
tests/old_tests/issue5/plot_xrd.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import plotly.graph_objs as go
|
||||
import plotly.offline as py
|
||||
|
||||
from . import meas_xrd
|
||||
|
||||
|
||||
def plot_xrd(meas: meas_xrd.MeasXRD):
|
||||
trace = go.Scatter(x=meas.data["Angle"], y=meas.data["Intensity"])
|
||||
|
||||
layout = go.Layout(title="XRD data", xaxis=dict(title="Angle"), yaxis=dict(title="Intensity", type="log"))
|
||||
|
||||
data = [trace]
|
||||
fig = go.Figure(data=data, layout=layout)
|
||||
return py.plot(fig, output_type="div", include_plotlyjs=False)
|
||||
7
tests/old_tests/issue5/relimport.py
Normal file
7
tests/old_tests/issue5/relimport.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# -*- coding: utf-8; -*-
|
||||
# See issue #5
|
||||
|
||||
from . import mod1 # noqa
|
||||
from . import mod1 as moo # noqa
|
||||
from ..mod3 import bar
|
||||
from .mod2 import foo
|
||||
2
tests/old_tests/issue5/run.sh
Normal file
2
tests/old_tests/issue5/run.sh
Normal file
@@ -0,0 +1,2 @@
|
||||
#!/bin/bash
|
||||
pyan plot_xrd.py --uses --colored --grouped --annotated --dot > test.dot
|
||||
64
tests/test_analyzer.py
Normal file
64
tests/test_analyzer.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from glob import glob
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from pyan.analyzer import CallGraphVisitor
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def callgraph():
|
||||
filenames = glob(os.path.join(os.path.dirname(__file__), "test_code/**/*.py"), recursive=True)
|
||||
v = CallGraphVisitor(filenames, logger=logging.getLogger())
|
||||
return v
|
||||
|
||||
|
||||
def get_node(nodes, name):
|
||||
filtered_nodes = [node for node in nodes if node.get_name() == name]
|
||||
assert len(filtered_nodes) == 1, f"Node with name {name} should exist"
|
||||
return filtered_nodes[0]
|
||||
|
||||
|
||||
def get_in_dict(node_dict, name):
|
||||
return node_dict[get_node(node_dict.keys(), name)]
|
||||
|
||||
|
||||
def test_resolve_import_as(callgraph):
|
||||
imports = get_in_dict(callgraph.uses_edges, "test_code.submodule2")
|
||||
get_node(imports, "test_code.submodule1")
|
||||
assert len(imports) == 1, "only one effective import"
|
||||
|
||||
imports = get_in_dict(callgraph.uses_edges, "test_code.submodule1")
|
||||
get_node(imports, "test_code.subpackage1.submodule1.A")
|
||||
get_node(imports, "test_code.subpackage1")
|
||||
|
||||
|
||||
def test_import_relative(callgraph):
|
||||
imports = get_in_dict(callgraph.uses_edges, "test_code.subpackage1.submodule1")
|
||||
get_node(imports, "test_code.submodule2.test_2")
|
||||
|
||||
|
||||
def test_resolve_use_in_class(callgraph):
|
||||
uses = get_in_dict(callgraph.uses_edges, "test_code.subpackage1.submodule1.A.__init__")
|
||||
get_node(uses, "test_code.submodule2.test_2")
|
||||
|
||||
|
||||
def test_resolve_use_in_function(callgraph):
|
||||
uses = get_in_dict(callgraph.uses_edges, "test_code.submodule2.test_2")
|
||||
get_node(uses, "test_code.submodule1.test_func1")
|
||||
get_node(uses, "test_code.submodule1.test_func2")
|
||||
|
||||
|
||||
def test_resolve_package_without___init__(callgraph):
|
||||
defines = get_in_dict(callgraph.defines_edges, "test_code.subpackage2.submodule_hidden1")
|
||||
get_node(defines, "test_code.subpackage2.submodule_hidden1.test_func1")
|
||||
|
||||
|
||||
def test_resolve_package_with_known_root():
|
||||
dirname = os.path.dirname(__file__)
|
||||
filenames = glob(os.path.join(dirname, "test_code/**/*.py"), recursive=True)
|
||||
callgraph = CallGraphVisitor(filenames, logger=logging.getLogger(), root=dirname)
|
||||
dirname_base = os.path.basename(dirname)
|
||||
defines = get_in_dict(callgraph.defines_edges, f"{dirname_base}.test_code.subpackage2.submodule_hidden1")
|
||||
get_node(defines, f"{dirname_base}.test_code.subpackage2.submodule_hidden1.test_func1")
|
||||
0
tests/test_code/__init__.py
Normal file
0
tests/test_code/__init__.py
Normal file
21
tests/test_code/submodule1.py
Normal file
21
tests/test_code/submodule1.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from test_code import subpackage1 as subpackage
|
||||
from test_code.subpackage1 import A
|
||||
|
||||
|
||||
def test_func1(a):
|
||||
return a
|
||||
|
||||
|
||||
def test_func2(a):
|
||||
return a
|
||||
|
||||
|
||||
class B:
|
||||
def __init__(self, k):
|
||||
self.a = 1
|
||||
|
||||
def to_A(self):
|
||||
return A(self)
|
||||
|
||||
def get_a_via_A(self):
|
||||
return test_func1(self.to_A().b.a)
|
||||
9
tests/test_code/submodule2.py
Normal file
9
tests/test_code/submodule2.py
Normal file
@@ -0,0 +1,9 @@
|
||||
import test_code.submodule1 as b
|
||||
|
||||
from . import submodule1
|
||||
|
||||
A = 32
|
||||
|
||||
|
||||
def test_2(a):
|
||||
return submodule1.test_func2(a) + A + b.test_func1(a)
|
||||
3
tests/test_code/subpackage1/__init__.py
Normal file
3
tests/test_code/subpackage1/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from test_code.subpackage1.submodule1 import A
|
||||
|
||||
__all__ = ["A"]
|
||||
6
tests/test_code/subpackage1/submodule1.py
Normal file
6
tests/test_code/subpackage1/submodule1.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from ..submodule2 import test_2
|
||||
|
||||
|
||||
class A:
|
||||
def __init__(self, b):
|
||||
self.b = test_2(b)
|
||||
2
tests/test_code/subpackage2/submodule_hidden1.py
Normal file
2
tests/test_code/subpackage2/submodule_hidden1.py
Normal file
@@ -0,0 +1,2 @@
|
||||
def test_func1():
|
||||
pass
|
||||
3
uploaddist.sh
Executable file
3
uploaddist.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
VERSION="$1"
|
||||
twine upload dist/pyan3-${VERSION}.tar.gz dist/pyan3-${VERSION}-py3-none-any.whl
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
echo -ne "Pyan architecture: generating architecture.{dot,svg}\n"
|
||||
./pyan.py pyan/*.py --no-defines --uses --colored --annotate --dot -V >architecture.dot 2>architecture.log
|
||||
python3 -m pyan pyan/*.py --no-defines --uses --colored --annotate --dot -V >architecture.dot 2>architecture.log
|
||||
dot -Tsvg architecture.dot >architecture.svg
|
||||
|
||||
Reference in New Issue
Block a user