Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fix Sphinx docstring formatting error. (#13004, #13005, #13006) (#13175)
Browse files Browse the repository at this point in the history
  • Loading branch information
frankfliu authored and nswamy committed Nov 9, 2018
1 parent 3a6dcc7 commit 0166793
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 12 deletions.
6 changes: 3 additions & 3 deletions python/mxnet/gluon/rnn/rnn_cell.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ class RNNCell(HybridRecurrentCell):
Initializer for the bias vector.
h2h_bias_initializer : str or Initializer, default 'zeros'
Initializer for the bias vector.
prefix : str, default 'rnn_'
prefix : str, default ``'rnn_'``
Prefix for name of `Block`s
(and name of weight if params is `None`).
params : Parameter or None
Expand Down Expand Up @@ -440,7 +440,7 @@ class LSTMCell(HybridRecurrentCell):
Initializer for the bias vector.
h2h_bias_initializer : str or Initializer, default 'zeros'
Initializer for the bias vector.
prefix : str, default 'lstm_'
prefix : str, default ``'lstm_'``
Prefix for name of `Block`s
(and name of weight if params is `None`).
params : Parameter or None, default None
Expand Down Expand Up @@ -565,7 +565,7 @@ class GRUCell(HybridRecurrentCell):
Initializer for the bias vector.
h2h_bias_initializer : str or Initializer, default 'zeros'
Initializer for the bias vector.
prefix : str, default 'gru_'
prefix : str, default ``'gru_'``
prefix for name of `Block`s
(and name of weight if params is `None`).
params : Parameter or None, default None
Expand Down
18 changes: 9 additions & 9 deletions python/mxnet/rnn/rnn_cell.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ class RNNCell(BaseRNNCell):
Number of units in output symbol.
activation : str or Symbol, default 'tanh'
Type of activation function. Options are 'relu' and 'tanh'.
prefix : str, default 'rnn_'
prefix : str, default ``'rnn_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down Expand Up @@ -412,7 +412,7 @@ class LSTMCell(BaseRNNCell):
----------
num_hidden : int
Number of units in output symbol.
prefix : str, default 'lstm_'
prefix : str, default ``'lstm_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down Expand Up @@ -475,7 +475,7 @@ class GRUCell(BaseRNNCell):
----------
num_hidden : int
Number of units in output symbol.
prefix : str, default 'gru_'
prefix : str, default ``'gru_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down Expand Up @@ -554,7 +554,7 @@ class FusedRNNCell(BaseRNNCell):
Whether to return the states that can be used as starting states next time.
forget_bias : bias added to forget gate, default 1.0.
Jozefowicz et al. 2015 recommends setting this to 1.0
prefix : str, default '$mode_' such as 'lstm_'
prefix : str, default ``'$mode_'`` such as ``'lstm_'``
Prefix for names of layers
(this prefix is also used for names of weights if `params` is None
i.e. if `params` are being created and not reused)
Expand Down Expand Up @@ -832,7 +832,7 @@ class DropoutCell(BaseRNNCell):
dropout : float
Percentage of elements to drop out, which
is 1 - percentage to retain.
prefix : str, default 'dropout_'
prefix : str, default ``'dropout_'``
Prefix for names of layers
(this prefix is also used for names of weights if `params` is None
i.e. if `params` are being created and not reused)
Expand Down Expand Up @@ -1007,7 +1007,7 @@ class BidirectionalCell(BaseRNNCell):
params : RNNParams, default None.
Container for weight sharing between cells.
A new RNNParams container is created if `params` is None.
output_prefix : str, default 'bi_'
output_prefix : str, default ``'bi_'``
prefix for name of output
"""
def __init__(self, l_cell, r_cell, params=None, output_prefix='bi_'):
Expand Down Expand Up @@ -1207,7 +1207,7 @@ class ConvRNNCell(BaseConvRNNCell):
activation : str or Symbol,
default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2)
Type of activation function.
prefix : str, default 'ConvRNN_'
prefix : str, default ``'ConvRNN_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down Expand Up @@ -1287,7 +1287,7 @@ class ConvLSTMCell(BaseConvRNNCell):
activation : str or Symbol
default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2)
Type of activation function.
prefix : str, default 'ConvLSTM_'
prefix : str, default ``'ConvLSTM_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down Expand Up @@ -1379,7 +1379,7 @@ class ConvGRUCell(BaseConvRNNCell):
activation : str or Symbol,
default functools.partial(symbol.LeakyReLU, act_type='leaky', slope=0.2)
Type of activation function.
prefix : str, default 'ConvGRU_'
prefix : str, default ``'ConvGRU_'``
Prefix for name of layers (and name of weight if params is None).
params : RNNParams, default None
Container for weight sharing between cells. Created if None.
Expand Down

0 comments on commit 0166793

Please sign in to comment.