Skip to content

Commit

Permalink
[DOCS] Fix sphinx warnings (apache#4917)
Browse files Browse the repository at this point in the history
* Fix Python docstrings

* More fixes

* Fix lint
  • Loading branch information
comaniac authored and alexwong committed Feb 28, 2020
1 parent 911c3b4 commit fd28647
Show file tree
Hide file tree
Showing 16 changed files with 148 additions and 191 deletions.
2 changes: 1 addition & 1 deletion docs/api/python/contrib.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ tvm.contrib

tvm.contrib.cblas
~~~~~~~~~~~~~~~~~
.. automodule:: tvm.contrib.cc
.. automodule:: tvm.contrib.cblas
:members:


Expand Down
6 changes: 0 additions & 6 deletions docs/api/python/relay/base.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,5 @@ tvm.relay.base
.. autoclass:: tvm.relay.base.RelayNode
:members:

.. autoclass:: tvm.relay.base.Span
:members:

.. autoclass:: tvm.relay.base.SourceName
:members:

.. autoclass:: tvm.relay.base.Id
:members:
6 changes: 0 additions & 6 deletions docs/api/python/relay/expr.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,6 @@ tvm.relay.expr
.. autoclass:: tvm.relay.expr.Tuple
:members:

.. autoclass:: tvm.relay.expr.Var
:members:

.. autoclass:: tvm.relay.expr.GlobalVar
:members:

.. autoclass:: tvm.relay.expr.Function
:members:

Expand Down
44 changes: 0 additions & 44 deletions docs/api/python/tensor.rst

This file was deleted.

6 changes: 2 additions & 4 deletions python/tvm/autotvm/task/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,7 @@ class ApplyHistoryBest(DispatchContext):
records : str or iterator of (MeasureInput, MeasureResult)
Collection of tuning records.
If is str, then it should be the filename of a records log file.
Each row of this file is an encoded record pair.
Otherwise, it is an iterator.
Each row of this file is an encoded record pair. Otherwise, it is an iterator.
"""
def __init__(self, records):
super(ApplyHistoryBest, self).__init__()
Expand All @@ -279,8 +278,7 @@ def load(self, records):
records : str or iterator of (MeasureInput, MeasureResult)
Collection of tuning records.
If is str, then it should be the filename of a records log file.
Each row of this file is an encoded record pair.
Otherwise, it is an iterator.
Each row of this file is an encoded record pair. Otherwise, it is an iterator.
"""
# pylint: disable=import-outside-toplevel
from pathlib import Path
Expand Down
54 changes: 30 additions & 24 deletions python/tvm/autotvm/task/space.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@ class TransformSpace(object):
"""Base class for transform space
TransformSpace is the node in the computation graph of axes
Note
----
We can regard our schedule code as a transformation graph of axes.
Starting from raw axes in the definition of tvm.compute, we can transform these axes
by some operators. The operator includes 'split', 'reorder' and 'annotate'.
Each operator has some tunable parameters (e.g. the split factor).
Then the tuning process is just to find good parameters of these op.
.. note::
We can regard our schedule code as a transformation graph of axes.
Starting from raw axes in the definition of tvm.compute, we can transform these axes
by some operators. The operator includes 'split', 'reorder' and 'annotate'.
Each operator has some tunable parameters (e.g. the split factor).
Then the tuning process is just to find good parameters of these op.
So the all the combinations of the parameters of these op forms our search space.
Expand Down Expand Up @@ -109,7 +109,8 @@ class VirtualAxis(TransformSpace):
var: int or tvm.schedule.IterVar
If is int, return a virtual axis whose length is the provided argument.
If is IterVar, return a virtual axis whose length is extracted from
the IterVar's extent domain.
the IterVar's extent domain.
name: str
"""
name_ct = 0
Expand Down Expand Up @@ -253,9 +254,9 @@ class SplitEntity(object):
Parameters
----------
size: Array of int
the size of every axis after split
the size of every axis after split.
e.g. an axis of extent 128, we split it into 3 axes, a possible
size is [4, 4, 8] (4x4x8 = 128)
size is [4, 4, 8] (4x4x8 = 128).
"""
def __init__(self, size):
self.size = size
Expand Down Expand Up @@ -626,7 +627,7 @@ def axis(var):
var: int or tvm.schedule.IterVar
If is int, return an axis whose length is the provided argument.
If is IterVar, return an axis whose length is extracted from the
IterVar's extent domain.
IterVar's extent domain.
"""
return VirtualAxis(var)

Expand All @@ -647,18 +648,19 @@ def define_split(self, name, axis, policy='factors', **kwargs):
If is 'power2', the tuner will try power-of-two factors less or equal to the length.
If is 'verbose', the tuner will try all candidates in above two policies.
If is 'candidate', try given candidates.
kwargs: dict
**kwargs:
extra arguments for policy
max_factor: int
the maximum split factor.
filter: function(int) -> bool
see examples below for how to use filter.
num_outputs: int
the total number of axis after split.
no_tail: bool
should we only include divisible numbers as split factors.
candidate: list
(policy=candidate) manual candidate list.
``max_factor``:
the maximum split factor (`int`).
``filter``:
see examples below for how to use filter (`Callable[[int], bool]`).
``num_outputs``:
the total number of axis after split (`int`).
``no_tail``:
should we only include divisible numbers as split factors (`bool`).
`candidate``:
(policy=candidate) manual candidate list (`List`).
Examples
--------
Expand All @@ -668,6 +670,7 @@ def define_split(self, name, axis, policy='factors', **kwargs):
>>> # use a filter that only accepts the split scheme whose inner most tile is less then 4
>>> cfg.define_split('tile_y', y, policy='factors', filter=lambda x: x.size[-1] <= 4)
"""

axes = [axis]
return self._add_new_transform(SplitSpace, name, axes, policy, **kwargs)

Expand Down Expand Up @@ -749,8 +752,11 @@ def raise_error(self, msg):

def valid(self):
"""Check whether the config meets all the constraints
Note: This check should be called after instantiation of task,
because the ConfigEntity/ConfigSpace collects errors during instantiation
.. note::
This check should be called after instantiation of task,
because the ConfigEntity/ConfigSpace collects errors during instantiation
Returns
-------
Expand Down
29 changes: 15 additions & 14 deletions python/tvm/contrib/cblas.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,23 +21,22 @@

def matmul(lhs, rhs, transa=False, transb=False, **kwargs):
"""Create an extern op that compute matrix mult of A and rhs with CrhsLAS
This function serves as an example on how to call external libraries.
Parameters
----------
lhs : Tensor
lhs: Tensor
The left matrix operand
rhs : Tensor
rhs: Tensor
The right matrix operand
transa : bool
transa: bool
Whether transpose lhs
transb : bool
transb: bool
Whether transpose rhs
Returns
-------
C : Tensor
C: Tensor
The result tensor.
"""
n = lhs.shape[1] if transa else lhs.shape[0]
Expand All @@ -55,20 +54,22 @@ def matmul(lhs, rhs, transa=False, transb=False, **kwargs):

def batch_matmul(lhs, rhs, transa=False, transb=False, iterative=False, **kwargs):
"""Create an extern op that compute batched matrix mult of A and rhs with CBLAS
This function serves as an example on how to call external libraries.
Parameters
This function serves as an example on how to call external libraries.
Parameters
----------
lhs : Tensor
lhs: Tensor
The left matrix operand
rhs : Tensor
rhs: Tensor
The right matrix operand
transa : bool
transa: bool
Whether transpose lhs
transb : bool
transb: bool
Whether transpose rhs
Returns
Returns
-------
C : Tensor
C: Tensor
The result tensor.
"""
b = lhs.shape[0]
Expand Down
10 changes: 5 additions & 5 deletions python/tvm/ir/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ def astext(self, show_meta_data=True, annotate=None):
Optionally annotate function to provide additional
information in the comment block.
Note
----
The meta data section is necessary to fully parse the text format.
However, it can contain dumps that are big (e.g constant weights),
so it can be helpful to skip printing the meta data section.
.. note::
The meta data section is necessary to fully parse the text format.
However, it can contain dumps that are big (e.g constant weights),
so it can be helpful to skip printing the meta data section.
Returns
-------
Expand Down
3 changes: 2 additions & 1 deletion python/tvm/ir/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ class Sequential(Pass):
Some typical usage of the sequential pass are:
1. Users provide a list of passes for optimization.
2. Only an optimization level is provided so that the backend system has
to glob all passes at this level and below to perform the optimizations.
to glob all passes at this level and below to perform the optimizations.
Note that users can also provide a series of passes that they don't want to
apply when running a sequential pass. Pass dependency will be resolved in
the backend as well.
Expand Down
60 changes: 30 additions & 30 deletions python/tvm/relay/op/reduce.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,21 +145,21 @@ def all(data, axis=None, keepdims=False, exclude=False):
--------
.. code-block:: python
data = relay.Constant(tvm.nd.array([[[ True, True, True],
[ True, True, True],
[False, True, False]],
[[ True, False, False],
[ True, True, False],
[False, True, True]]]))
relay.all(data, axis=1)
# [[False, True, False],
# [False, False, False]]
relay.all(data, axis=0)
# [[ True, False, False],
# [ True, True, False],
# [False, True, False]]
data = relay.Constant(tvm.nd.array([[[ True, True, True],
[ True, True, True],
[False, True, False]],
[[ True, False, False],
[ True, True, False],
[False, True, True]]]))
relay.all(data, axis=1)
# [[False, True, False],
# [False, False, False]]
relay.all(data, axis=0)
# [[ True, False, False],
# [ True, True, False],
# [False, True, False]]
"""
axis = [axis] if isinstance(axis, int) else axis
Expand Down Expand Up @@ -197,21 +197,21 @@ def any(data, axis=None, keepdims=False, exclude=False):
--------
.. code-block:: python
data = relay.Constant(tvm.nd.array([[[ True, True, True],
[ True, True, True],
[False, True, False]],
[[ True, False, False],
[ True, True, False],
[False, True, True]]]))
relay.any(data, axis=1)
# [[True, True, True],
# [True, True, True]]
relay.any(data, axis=0)
# [[ True, True, True],
# [ True, True, True],
# [False, True, True]]
data = relay.Constant(tvm.nd.array([[[ True, True, True],
[ True, True, True],
[False, True, False]],
[[ True, False, False],
[ True, True, False],
[False, True, True]]]))
relay.any(data, axis=1)
# [[True, True, True],
# [True, True, True]]
relay.any(data, axis=0)
# [[ True, True, True],
# [ True, True, True],
# [False, True, True]]
"""
axis = [axis] if isinstance(axis, int) else axis
Expand Down
Loading

0 comments on commit fd28647

Please sign in to comment.