Skip to content

Commit

Permalink
Dev add docstring2 (#3305)
Browse files Browse the repository at this point in the history
* add docs to env_util.py function_util.py

* add docs to config_util.py

* add docs to session_util.py

* add docs to layer.py

* add docs to layers.upsample_2d

* add docs nn_ops

* format refine

* modify name_scope.py

* add docs >> placement_util.py

* add docs to distribute.py

* add docs to data_ops.py

* add docs to user_op_builder.py

* add docs to math_ops.py

* fix conflict

* add docs to initializer_util.py

* refine docstring

* Final Cut (#3354)

* bump version

* rm pip install

Co-authored-by: tsai <caishenghang@1f-dev.kbaeegfb1x0ubnoznzequyxzve.bx.internal.cloudapp.net>

* remove windows directory (#3358)

* install python pkgs from dev-requirements.txt in configure time, remove PY3 flag (#3351)

* install python pkgs from dev-requirements.txt in configure time

* remove PY3 cmake flag, add dev-requirements.txt to CMAKE_CONFIGURE_DEPENDS, remove unused cmake functions

* build nccl with -jN

* special tag for gpu (#3362)

Co-authored-by: tsai <[email protected]>
Co-authored-by: daquexian <[email protected]>

* ci master (#3366)

Co-authored-by: Shenghang Tsai <[email protected]>
Co-authored-by: tsai <caishenghang@1f-dev.kbaeegfb1x0ubnoznzequyxzve.bx.internal.cloudapp.net>
Co-authored-by: OuYang Yu <[email protected]>
Co-authored-by: daquexian <[email protected]>
Co-authored-by: tsai <[email protected]>
Former-commit-id: 8b07d01
  • Loading branch information
6 people authored Aug 14, 2020
1 parent ef24426 commit c7b9f35
Show file tree
Hide file tree
Showing 7 changed files with 477 additions and 39 deletions.
2 changes: 1 addition & 1 deletion oneflow/python/experimental/name_scope.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def name_scope(name: str) -> None:
...
Args:
name: Name of this name scope
name: Name of this namespace
"""
assert isinstance(name, str)
Expand Down
28 changes: 28 additions & 0 deletions oneflow/python/framework/distribute.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ def deprecated_consistent_strategy():
@oneflow_export("scope.consistent_view")
class DistributeConsistentStrategy(distribute_ctx.DistributeStrategy):
r"""Create a scope in consistent view. All operators within the scope will be automatically parallelized among diffierent accelerators for best performance and least data transfer.
Usage::
with oneflow.scope.consistent_view():
Expand Down Expand Up @@ -155,18 +156,45 @@ def ConsistentStrategyEnabled() -> bool:

@oneflow_export("distribute.split")
def split(axis: int) -> SplitDistribute:
r"""Generate a split scheme in which op will be splitted at `axis`.
Args:
axis (int): At `axis` the op will be splitted.
Returns:
SplitDistribute: Split scheme object, often required by `with_distribute` method of `Blob` or `oneflow.get_variable`.
Example::
weight = weight.with_distribute(distribute.split(1))
"""
assert type(axis) is int
assert str(axis) in _axis_str2split_axis_obj, "not a valid split. expected: [0, 11)"
return _axis_str2split_axis_obj[str(axis)]


@oneflow_export("distribute.broadcast")
def broadcast() -> BroadcastDistribute:
r"""Generate a broadcast scheme.
Returns:
BroadcastDistribute: Broadcast scheme object, often required by `with_distribute` method of `Blob` or `oneflow.get_variable`.
Example::
segment_ids = segment_ids.with_distribute(flow.distribute.broadcast())
"""
return _broadcast


@oneflow_export("distribute.auto")
def auto() -> AutoDistribute:
r"""Generate a broadcast scheme.
Returns:
AutoDistribute: Auto distribute scheme object, often required by `with_distribute` method of `Blob` or `oneflow.get_variable`.
"""
return _auto


Expand Down
27 changes: 27 additions & 0 deletions oneflow/python/framework/placement_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,16 @@

@oneflow_export("placement.current_scope")
def api_current_placement_scope() -> placement_ctx.PlacementScope:
r"""Get current placement scope object.
For instance:
if "cpu" == flow.placement.current_scope().default_device_tag:
print("ops shall run in the cpu mode only")
Returns:
placement_ctx.PlacementScope: [description]
"""
print(
"WARNING: oneflow.placement.current_scope has been deprecated. "
"Please use oneflow.current_scope.device_parallel_desc_symbol instead."
Expand Down Expand Up @@ -66,6 +76,23 @@ def deprecated_placement(*args, **kwargs):
def api_placement(
device_tag: str, machine_device_ids: str
) -> placement_ctx.PlacementScope:
r"""Create a scope. All ops within the scope will run on specified device that placed by "device_tag" and "machine_device_ids".
Args:
device_tag (str): Device tag, "cpu" or "gpu" only
machine_device_ids (str): String that specifies what device(s) to use in the format "<NODE INDEX (RANGE)>:<DEVICE INDEX (RANGE)>". For example, "0:0" means use the device 0 of machine 0, and "1:4-6" means use device 4, 5, 6 of machine 1.
Returns:
placement_ctx.DevicePriorPlacementScope: Placement scope
For instance::
with flow.fixed_placement("gpu", "0:0"):
logits = lenet(images, train=False)
loss = flow.nn.sparse_softmax_cross_entropy_with_logits(labels, logits, name="softmax_loss")
flow.losses.add_loss(loss)
"""
from oneflow.python.compatibility import with_cuda

if with_cuda == False:
Expand Down
16 changes: 16 additions & 0 deletions oneflow/python/ops/data_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,22 @@ def ofrecord_reader(
shuffle_after_epoch: bool = False,
name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
r"""Get ofrecord object from ofrecord dataset.
Args:
ofrecord_dir (str): Path to ofrecord dataset.
batch_size (int, optional): Batch size. Defaults to 1.
data_part_num (int, optional): Number of dataset's partitions. Defaults to 1.
part_name_prefix (str, optional): Prefix of dataset's parition file. Defaults to "part-".
part_name_suffix_length (int, optional): Total length of padded suffix number , -1 means no padding. eg: 3 for `part-001`. Defaults to -1.
random_shuffle (bool, optional): Determines records shuffled or not. Defaults to False.
shuffle_buffer_size (int, optional): Shuffle buffer size. Defaults to 1024.
shuffle_after_epoch (bool, optional): Shuffled or not after each epoch. Defaults to False.
name (Optional[str], optional): Optional name. Defaults to None.
Returns:
remote_blob_util.BlobDef: [description]
"""
if name is None:
name = id_util.UniqueStr("OFRecord_Reader_")

Expand Down
97 changes: 66 additions & 31 deletions oneflow/python/ops/initializer_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,16 @@ def constant_initializer(
value: float = 0, dtype: dtype_util.dtype = dtype_util.float
) -> op_conf_util.InitializerConf:
r"""Initializer that generates blob with constant values.
Args:
value: A Python scalar. All elements of the initialized variable
will be set to the corresponding value.
dtype: Default data type.
value (float, optional): A Python scalar. All elements of the initialized variable . Defaults to 0.
dtype (dtype_util.dtype, optional): . Default data type. Defaults to dtype_util.float.
Raises:
NotImplementedError: Do not support such data type.
Returns:
An InitializerConf object.
op_conf_util.InitializerConf: An InitializerConf object.
"""
initializer = op_conf_util.InitializerConf()
if dtype in [dtype_util.float, dtype_util.double]:
Expand All @@ -57,30 +60,48 @@ def constant_initializer(
def zeros_initializer(
dtype: dtype_util.dtype = dtype_util.float,
) -> op_conf_util.InitializerConf:
r"""Initializer that generates tensors initialized to 0
Args:
dtype (dtype_util.dtype, optional): . Defaults to dtype_util.float.
Returns:
op_conf_util.InitializerConf: constant_initializer
"""
return constant_initializer(0.0, dtype)


@oneflow_export("ones_initializer")
def ones_initializer(
dtype: dtype_util.dtype = dtype_util.float,
) -> op_conf_util.InitializerConf:
r"""Initializer that generates tensors initialized to 1.
Args:
dtype (dtype_util.dtype, optional): . Defaults to dtype_util.float.
Returns:
op_conf_util.InitializerConf: constant_initializer
"""
return constant_initializer(1.0, dtype)


@oneflow_export("random_uniform_initializer")
def random_uniform_initializer(
minval: float = 0, maxval: float = 1, dtype: dtype_util.dtype = dtype_util.float
) -> op_conf_util.InitializerConf:
r"""Initializer that generates blob with a uniform distribution.
r"""[summary]
Args:
minval: A python scalar. Lower bound of the range of random values to generate.
maxval: A python scalar. Upper bound of the range of random values to generate.
Defaults to 1 for float types.
seed: None. Not support yet.
dtype: Default data type
minval (float, optional): A python scalar. Lower bound of the range of random values to generate. Defaults to 0.
maxval (float, optional): A python scalar. Upper bound of the range of random values to generate. Defaults to 1.
dtype (dtype_util.dtype, optional): Default data type. Defaults to dtype_util.float.
Raises:
NotImplementedError: Do not support such data type.
Returns:
An InitializerConf object.
op_conf_util.InitializerConf: Initial configuration
"""
assert minval <= maxval
initializer = op_conf_util.InitializerConf()
Expand Down Expand Up @@ -110,12 +131,14 @@ def random_normal_initializer(
r"""Initializer that generates blob with a normal distribution.
Args:
mean: a python scalar. Mean of the random values to generate.
stddev: a python scalar. Standard deviation of the random values to generate.
seed: None. Not support yet.
dtype: None. Not applicable in OneFlow
mean (float, optional): A python scalar. Mean of the random values to generate.. Defaults to 0.0.
stddev (float, optional): A python scalar. Standard deviation of the random values to generate. Defaults to 1.0.
seed (Optional[int], optional): None. Not support yet. Defaults to None.
dtype (Optional[dtype_util.dtype], optional): . Defaults to None.
Returns:
An InitializerConf object.
op_conf_util.InitializerConf: Initial configuration
"""
assert seed is None
assert dtype is None
Expand All @@ -135,8 +158,11 @@ def truncated_normal_initializer(
r"""Initializer that generates a truncated normal distribution.
Args:
mean: A scalar (float)
stddev: A scalar (float)
mean (float, optional): A scalar (float). Defaults to 0.0.
stddev (float, optional): A scalar (float). Defaults to 1.0.
Returns:
op_conf_util.InitializerConf: Initial configuration
"""
initializer = op_conf_util.InitializerConf()
setattr(initializer.truncated_normal_conf, "mean", float(mean))
Expand All @@ -162,15 +188,17 @@ def variance_scaling_initializer(
data_format: str = "",
) -> op_conf_util.InitializerConf:
r"""Initializer that generates a truncated normal distribution
or a random normal distribution or a random uniform distribution
with a scale adapting to it.
or a random normal distribution or a random uniform distribution
with a scale adapting to it.
Args:
scale: Scaling factor (positive float).
mode: One of "fan_in", "fan_out", "fan_avg".
distribution: Random distribution to use. One of "truncated_normal",
"random_normal", "random_uniform".
data_format: A string be one of "N...C" or "NC..."
scale (float, optional): Scaling factor (positive float). Defaults to 1.0.
mode (str, optional): One of "fan_in", "fan_out", "fan_avg".. Defaults to "fan_in".
distribution (str, optional): Random distribution to use. One of "truncated_normal",. Defaults to "truncated_normal".
data_format (str, optional): A string be one of "N...C" or "NC...". Defaults to "".
Returns:
op_conf_util.InitializerConf:
"""
initializer = op_conf_util.InitializerConf()
setattr(initializer.variance_scaling_conf, "scale", float(scale))
Expand Down Expand Up @@ -202,11 +230,18 @@ def kaiming_initializer(
- He, K. et al. (2015), using a normal or uniform distribution.
Args:
distribution: 'random_normal' or 'random_uniform'
mode: 'fan_in', 'fan_out' or 'fan_avg'
nonlinearity: None, 'tanh', 'sigmoid', 'relu' or 'leaky_relu'
negative_slope: the negative slope of leaky_relu
data_format: 'NCHW', 'NHWC'
shape (Sequence[int]): Blob shape.
distribution (str, optional): 'random_normal' or 'random_uniform'. Defaults to "random_normal".
mode (str, optional): 'fan_in', 'fan_out' or 'fan_avg'. Defaults to "fan_in".
nonlinearity (str, optional): None, 'tanh', 'sigmoid', 'relu' or 'leaky_relu'. Defaults to "leaky_relu".
negative_slope (float, optional): The negative slope of leaky_relu. Defaults to 0.0.
data_format (str, optional): 'NCHW', 'NHWC'. Defaults to "NCHW".
Raises:
NotImplementedError: Only support normal and uniform distribution
Returns:
[type]: flow.random_normal_initializer or flow.random_uniform_initializer
"""
assert isinstance(shape, tuple)
# Kaiming Initialization only deals with FC, Conv and Deconv's weight
Expand Down
Loading

0 comments on commit c7b9f35

Please sign in to comment.