Skip to content

Commit

Permalink
add missing configuration functions in v2 API.
Browse files Browse the repository at this point in the history
  • Loading branch information
lcy-seso committed Jun 6, 2017
1 parent e4c97e4 commit 692d251
Show file tree
Hide file tree
Showing 7 changed files with 62 additions and 57 deletions.
10 changes: 9 additions & 1 deletion doc/api/v2/config/layer.rst
Original file line number Diff line number Diff line change
Expand Up @@ -434,10 +434,18 @@ smooth_l1_cost
.. autoclass:: paddle.v2.layer.smooth_l1_cost
:noindex:

Check Layer
Check Layer
============

eos
---
.. autoclass:: paddle.v2.layer.eos
:noindex:

Miscs
=====

dropout
--------------
.. autoclass:: paddle.v2.layer.dropout
:noindex:
8 changes: 0 additions & 8 deletions doc/api/v2/config/networks.rst
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,3 @@ simple_attention
:members: simple_attention
:noindex:

Miscs
=====

dropout_layer
--------------
.. automodule:: paddle.v2.networks
:members: dropout_layer
:noindex:
12 changes: 6 additions & 6 deletions python/paddle/trainer/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -3546,11 +3546,7 @@ def update_g_config():
return g_config


def begin_parse(config_arg_str=''):
'''
@param config_arg_str: a string of the form var1=val1,var2=val2. It will be
passed to config script as a dictionary CONFIG_ARGS
'''
def begin_parse():
init_config_environment()
for hook in _parse_config_hooks:
hook()
Expand All @@ -3568,8 +3564,12 @@ def begin_parse(config_arg_str=''):


def parse_config(trainer_config, config_arg_str):
begin_parse(config_arg_str)
'''
@param config_arg_str: a string of the form var1=val1,var2=val2. It will be
passed to config script as a dictionary CONFIG_ARGS
'''

begin_parse()
config_args = {}

if config_arg_str:
Expand Down
26 changes: 24 additions & 2 deletions python/paddle/trainer_config_helpers/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@
'smooth_l1_cost',
'layer_support',
'multiplex_layer',
'dropout_layer',
]


Expand Down Expand Up @@ -3760,7 +3761,6 @@ def rnn_step(input):
assert generated_input_index != -1

gipt = input[generated_input_index]
assert isinstance(gipt, BaseGeneratedInput)

gipt.bos_id = bos_id
gipt.eos_id = eos_id
Expand All @@ -3780,7 +3780,6 @@ def __real_step__(*args):
predict = gipt.after_real_step(step(*args))

eos_layer(input=predict, eos_id=eos_id, name=eos_name)

return predict

tmp = recurrent_group(
Expand Down Expand Up @@ -5543,3 +5542,26 @@ def multiplex_layer(input, name=None, layer_attr=None):
layer_type=LayerType.MULTIPLEX_LAYER,
parents=input,
size=l.config.size)


############################################################################
# Miscs #
############################################################################


@wrap_name_default("dropout")
def dropout_layer(input, dropout_rate, name=None):
"""
@TODO(yuyang18): Add comments.
:param name:
:param input:
:param dropout_rate:
:return:
"""
return addto_layer(
name=name,
input=input,
act=LinearActivation(),
bias_attr=False,
layer_attr=ExtraAttr(drop_rate=dropout_rate))
31 changes: 4 additions & 27 deletions python/paddle/trainer_config_helpers/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,10 @@

__all__ = [
'sequence_conv_pool', 'simple_lstm', "simple_img_conv_pool",
"img_conv_bn_pool", 'dropout_layer', 'lstmemory_group', 'lstmemory_unit',
'small_vgg', 'img_conv_group', 'vgg_16_network', 'gru_unit', 'gru_group',
'simple_gru', 'simple_attention', 'simple_gru2', 'bidirectional_gru',
'text_conv_pool', 'bidirectional_lstm', 'inputs', 'outputs'
"img_conv_bn_pool", 'lstmemory_group', 'lstmemory_unit', 'small_vgg',
'img_conv_group', 'vgg_16_network', 'gru_unit', 'gru_group', 'simple_gru',
'simple_attention', 'simple_gru2', 'bidirectional_gru', 'text_conv_pool',
'bidirectional_lstm', 'inputs', 'outputs'
]

######################################################
Expand Down Expand Up @@ -1366,29 +1366,6 @@ def simple_attention(encoded_sequence,
input=scaled, pooling_type=SumPooling(), name="%s_pooling" % name)


############################################################################
# Miscs #
############################################################################


@wrap_name_default("dropout")
def dropout_layer(input, dropout_rate, name=None):
"""
@TODO(yuyang18): Add comments.
:param name:
:param input:
:param dropout_rate:
:return:
"""
return addto_layer(
name=name,
input=input,
act=LinearActivation(),
bias_attr=False,
layer_attr=ExtraAttr(drop_rate=dropout_rate))


def inputs(layers, *args):
"""
Declare the inputs of network. The order of input should be as same as
Expand Down
29 changes: 17 additions & 12 deletions python/paddle/v2/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
# limitations under the License.
"""
`paddle.v2.layer` is a part of model config packages in paddle.v2. In API v2,
we want to make Paddle a plain Python package. The model config package defined
we want to make Paddle a plain Python package. The model config package defines
the way how to configure a neural network topology in Paddle Python code.
The primary usage shows below.
Expand All @@ -30,7 +30,6 @@
# use prediction instance where needed.
parameters = paddle.parameters.create(cost)
"""

import collections
import copy
import re
Expand All @@ -44,16 +43,19 @@


def __need_to_keep__(name):
if name in ['StaticInput', 'LayerType', 'layer_support']:
return False
return True
return name in [
'StaticInput', 'SubsequenceInput', 'GeneratedInput', 'LayerType',
'layer_support'
]


def __need_to_wrap__(name):
return name not in ['AggregateLevel', 'ExpandLevel']


def __convert_name__(inname):
if __need_to_keep__(inname):
return inname
if inname == 'maxid_layer':
return 'max_id'
elif inname.endswith('memory') or inname.endswith(
Expand All @@ -74,8 +76,6 @@ def __convert_name__(inname):

for name in v1_layers.__all__:
obj = getattr(v1_layers, name)
if not __need_to_keep__(name):
continue
new_name = __convert_name__(name)
if callable(obj) and __need_to_wrap__(name):
globals()[new_name] = __convert_to_v2__(obj, new_name, __name__)
Expand Down Expand Up @@ -107,7 +107,7 @@ def __map_data_docstr__(doc):
data = __convert_to_v2__(__data_layer__, 'name', __name__)


def __get_used_layers__(output_layers, extra_layers=None):
def __get_used_layers__(output_layers):
layer_names = set()
parents = {}

Expand Down Expand Up @@ -175,6 +175,8 @@ def __get_used_submodels__(layer_names):
for submodel in cp.g_config.model_config.sub_models:
if submodel.name in layer_names:
submodel_names.add(submodel.name)
if submodel.is_recurrent_layer_group:
layer_names |= set(submodel.layer_names)
return submodel_names


Expand Down Expand Up @@ -248,18 +250,21 @@ def parse_network(output_layers, extra_layers=None):

model_config = ModelConfig()
model_config.type = cp.g_config.model_config.type

for layer in output_layers:
model_config.output_layer_names.append(layer.full_name)
output_layer_names.add(layer.full_name)

for l in cp.g_config.model_config.layers:
if l.name not in layer_names:
continue
model_config.layers.extend([l])
if l.type == 'data':
if l.name in model_config.output_layer_names:
continue
model_config.input_layer_names.append(l.name)
input_layer_names.add(l.name)

for layer in output_layers:
model_config.output_layer_names.append(layer.full_name)
output_layer_names.add(layer.full_name)

for e in cp.g_config.model_config.evaluators:
if e.name in evaluator_names:
model_config.evaluators.extend([e])
Expand Down
3 changes: 2 additions & 1 deletion python/paddle/v2/topology.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,9 @@ def data_type(self):
[('image', dense_vector(768)), ('label', integer_value(10))]
"""
data_layers = self.data_layers()

return [(nm, data_layers[nm].data_type)
for nm in self.proto().input_layer_names]
for nm in self.proto().input_layer_names if nm in data_layers]

def get_layer_proto(self, name):
for layer in self.__model_config__.layers:
Expand Down

0 comments on commit 692d251

Please sign in to comment.