-
Notifications
You must be signed in to change notification settings - Fork 115
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
113 changed files
with
14,628 additions
and
8,597 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
*.gz | ||
*.npz | ||
*.pyc | ||
*~ | ||
.DS_Store | ||
.idea | ||
.spyproject/ | ||
build/ | ||
dist | ||
docs/_build | ||
tensorlayer.egg-info | ||
tensorlayer/__pacache__ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
import logging as _logger | ||
|
||
logging = _logger.getLogger('tensorlayer') | ||
logging.setLevel(_logger.INFO) | ||
_hander = _logger.StreamHandler() | ||
formatter = _logger.Formatter('[TL] %(message)s') | ||
_hander.setFormatter(formatter) | ||
logging.addHandler(_hander) | ||
|
||
|
||
def info(fmt, *args): | ||
logging.info(fmt, *args) | ||
|
||
|
||
def warning(fmt, *args): | ||
logging.warning(fmt, *args) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,109 +1,154 @@ | ||
#! /usr/bin/python | ||
# -*- coding: utf8 -*- | ||
# -*- coding: utf-8 -*- | ||
|
||
import tensorflow as tf | ||
from tensorflow.python.util.deprecation import deprecated | ||
|
||
__all__ = [ | ||
'identity', | ||
'ramp', | ||
'leaky_relu', | ||
'swish', | ||
'pixel_wise_softmax', | ||
'linear', | ||
'lrelu', | ||
] | ||
|
||
import tensorflow as tf | ||
|
||
def identity(x, name=None): | ||
"""The identity activation function, Shortcut is ``linear``. | ||
@deprecated("2018-06-30", "This API will be deprecated soon as tf.identity can do the same thing.") | ||
def identity(x): | ||
"""The identity activation function. | ||
Shortcut is ``linear``. | ||
Parameters | ||
---------- | ||
x : a tensor input | ||
input(s) | ||
x : Tensor | ||
input. | ||
Returns | ||
-------- | ||
A `Tensor` with the same type as `x`. | ||
------- | ||
Tensor | ||
A ``Tensor`` in the same type as ``x``. | ||
""" | ||
return x | ||
|
||
# Shortcut | ||
linear = identity | ||
|
||
def ramp(x=None, v_min=0, v_max=1, name=None): | ||
def ramp(x, v_min=0, v_max=1, name=None): | ||
"""The ramp activation function. | ||
Parameters | ||
---------- | ||
x : a tensor input | ||
input(s) | ||
x : Tensor | ||
input. | ||
v_min : float | ||
if input(s) smaller than v_min, change inputs to v_min | ||
cap input to v_min as a lower bound. | ||
v_max : float | ||
if input(s) greater than v_max, change inputs to v_max | ||
name : a string or None | ||
An optional name to attach to this activation function. | ||
cap input to v_max as a upper bound. | ||
name : str | ||
The function name (optional). | ||
Returns | ||
-------- | ||
A `Tensor` with the same type as `x`. | ||
------- | ||
Tensor | ||
A ``Tensor`` in the same type as ``x``. | ||
""" | ||
return tf.clip_by_value(x, clip_value_min=v_min, clip_value_max=v_max, name=name) | ||
|
||
def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"): | ||
|
||
def leaky_relu(x, alpha=0.1, name="lrelu"): | ||
"""The LeakyReLU, Shortcut is ``lrelu``. | ||
Modified version of ReLU, introducing a nonzero gradient for negative | ||
input. | ||
Modified version of ReLU, introducing a nonzero gradient for negative input. | ||
Parameters | ||
---------- | ||
x : A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`, | ||
`int16`, or `int8`. | ||
alpha : `float`. slope. | ||
name : a string or None | ||
An optional name to attach to this activation function. | ||
x : Tensor | ||
Support input type ``float``, ``double``, ``int32``, ``int64``, ``uint8``, | ||
``int16``, or ``int8``. | ||
alpha : float | ||
Slope. | ||
name : str | ||
The function name (optional). | ||
Examples | ||
--------- | ||
>>> network = tl.layers.DenseLayer(network, n_units=100, name = 'dense_lrelu', | ||
... act= lambda x : tl.act.lrelu(x, 0.2)) | ||
-------- | ||
>>> net = tl.layers.DenseLayer(net, 100, act=lambda x : tl.act.lrelu(x, 0.2), name='dense') | ||
Returns | ||
------- | ||
Tensor | ||
A ``Tensor`` in the same type as ``x``. | ||
References | ||
------------ | ||
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`_ | ||
- `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`__ | ||
""" | ||
with tf.name_scope(name) as scope: | ||
# x = tf.nn.relu(x) | ||
# m_x = tf.nn.relu(-x) | ||
# x -= alpha * m_x | ||
x = tf.maximum(x, alpha * x) | ||
# with tf.name_scope(name) as scope: | ||
# x = tf.nn.relu(x) | ||
# m_x = tf.nn.relu(-x) | ||
# x -= alpha * m_x | ||
x = tf.maximum(x, alpha * x, name=name) | ||
return x | ||
|
||
#Shortcut | ||
lrelu = leaky_relu | ||
|
||
def pixel_wise_softmax(output, name='pixel_wise_softmax'): | ||
def swish(x, name='swish'): | ||
"""The Swish function. | ||
See `Swish: a Self-Gated Activation Function <https://arxiv.org/abs/1710.05941>`__. | ||
Parameters | ||
---------- | ||
x : Tensor | ||
input. | ||
name: str | ||
function name (optional). | ||
Returns | ||
------- | ||
Tensor | ||
A ``Tensor`` in the same type as ``x``. | ||
""" | ||
with tf.name_scope(name): | ||
x = tf.nn.sigmoid(x) * x | ||
return x | ||
|
||
|
||
@deprecated("2018-06-30", "This API will be deprecated soon as tf.nn.softmax can do the same thing.") | ||
def pixel_wise_softmax(x, name='pixel_wise_softmax'): | ||
"""Return the softmax outputs of images, every pixels have multiple label, the sum of a pixel is 1. | ||
Usually be used for image segmentation. | ||
Parameters | ||
------------ | ||
output : tensor | ||
- For 2d image, 4D tensor [batch_size, height, weight, channel], channel >= 2. | ||
- For 3d image, 5D tensor [batch_size, depth, height, weight, channel], channel >= 2. | ||
---------- | ||
x : Tensor | ||
input. | ||
- For 2d image, 4D tensor (batch_size, height, weight, channel), where channel >= 2. | ||
- For 3d image, 5D tensor (batch_size, depth, height, weight, channel), where channel >= 2. | ||
name : str | ||
function name (optional) | ||
Returns | ||
------- | ||
Tensor | ||
A ``Tensor`` in the same type as ``x``. | ||
Examples | ||
--------- | ||
-------- | ||
>>> outputs = pixel_wise_softmax(network.outputs) | ||
>>> dice_loss = 1 - dice_coe(outputs, y_, epsilon=1e-5) | ||
References | ||
----------- | ||
- `tf.reverse <https://www.tensorflow.org/versions/master/api_docs/python/array_ops.html#reverse>`_ | ||
---------- | ||
- `tf.reverse <https://www.tensorflow.org/versions/master/api_docs/python/array_ops.html#reverse>`__ | ||
""" | ||
with tf.name_scope(name) as scope: | ||
return tf.nn.softmax(output) | ||
## old implementation | ||
# exp_map = tf.exp(output) | ||
# if output.get_shape().ndims == 4: # 2d image | ||
# evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, True])) | ||
# elif output.get_shape().ndims == 5: # 3d image | ||
# evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, False, True])) | ||
# else: | ||
# raise Exception("output parameters should be 2d or 3d image, not %s" % str(output._shape)) | ||
# return tf.div(exp_map, evidence) | ||
with tf.name_scope(name): | ||
return tf.nn.softmax(x) | ||
|
||
|
||
# Alias | ||
linear = identity | ||
lrelu = leaky_relu |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
"""The tensorlayer.cli module provides a command-line tool for some common tasks.""" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import argparse | ||
|
||
from tensorlayer.cli import train | ||
|
||
if __name__ == "__main__": | ||
parser = argparse.ArgumentParser(prog='tl') | ||
subparsers = parser.add_subparsers(dest='cmd') | ||
train_parser = subparsers.add_parser('train', help='train a model using multiple local GPUs or CPUs.') | ||
train.build_arg_parser(train_parser) | ||
args = parser.parse_args() | ||
if args.cmd == 'train': | ||
train.main(args) | ||
else: | ||
parser.print_help() |
Oops, something went wrong.