use numpydoc instead of napoleon

This commit is contained in:
iclementine 2020-12-18 11:12:22 +08:00
parent bbc50faef2
commit 49c9cb38be
2 changed files with 8 additions and 2 deletions

View File

@ -34,7 +34,8 @@ extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.autodoc',
'sphinx.ext.viewcode', 'sphinx.ext.viewcode',
"sphinx_rtd_theme", "sphinx_rtd_theme",
'sphinx.ext.napoleon', 'sphinx.ext.mathjax',
'numpydoc',
] ]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
@ -59,4 +60,5 @@ html_theme = "sphinx_rtd_theme"
html_static_path = ['_static'] html_static_path = ['_static']
source_suffix = ['.rst', '.md'] source_suffix = ['.rst', '.md']
# -- Extension configuration ------------------------------------------------- # -- Extension configuration -------------------------------------------------
numpydoc_show_class_members = False

View File

@ -36,10 +36,13 @@ def scaled_dot_product_attention(q,
q: Tensor [shape=(*, T_q, d)] q: Tensor [shape=(*, T_q, d)]
the query tensor. the query tensor.
k: Tensor [shape=(*, T_k, d)] k: Tensor [shape=(*, T_k, d)]
the key tensor. the key tensor.
v: Tensor [shape=(*, T_k, d_v)] v: Tensor [shape=(*, T_k, d_v)]
the value tensor. the value tensor.
mask: Tensor, [shape=(*, T_q, T_k) or broadcastable shape], optional mask: Tensor, [shape=(*, T_q, T_k) or broadcastable shape], optional
the mask tensor, zeros correspond to paddings. Defaults to None. the mask tensor, zeros correspond to paddings. Defaults to None.
@ -47,6 +50,7 @@ def scaled_dot_product_attention(q,
---------- ----------
out: Tensor [shape(*, T_q, d_v)] out: Tensor [shape(*, T_q, d_v)]
the context vector. the context vector.
attn_weights [Tensor shape(*, T_q, T_k)] attn_weights [Tensor shape(*, T_q, T_k)]
the attention weights. the attention weights.
""" """