@@ -868,26 +868,34 @@ class MathNormLayer(_ConcatInputLayer):
868
868
"""
869
869
layer_class = "math_norm"
870
870
871
- def __init__ (self , p , axes , keep_dims = False , ** kwargs ):
871
+ def __init__ (self , p , axis = NotSpecified , axes = NotSpecified , keep_dims = False , ** kwargs ):
872
872
"""
873
873
:param int|float p:
874
- :param str|list[str] axes:
874
+ :param Dim|str|list[Dim|str] axis:
875
+ :param Dim|str|list[Dim|str] axes:
875
876
:param bool keep_dims:
876
877
"""
878
+ if axis is not NotSpecified :
879
+ assert axes is NotSpecified
880
+ axes = axis
877
881
super (MathNormLayer , self ).__init__ (** kwargs )
878
882
x = self .input_data .copy ()
879
883
x .placeholder = tf .abs (x .placeholder ) ** p
880
884
self .output .placeholder = ReduceLayer .reduce (x , mode = "sum" , axes = axes , keep_dims = keep_dims ) ** (1. / p )
881
885
882
886
@classmethod
883
- def get_out_data_from_opts (cls , name , sources , axes , keep_dims = False , ** kwargs ):
887
+ def get_out_data_from_opts (cls , name , sources , axis = NotSpecified , axes = NotSpecified , keep_dims = False , ** kwargs ):
884
888
"""
885
889
:param str name:
886
890
:param list[LayerBase] sources:
887
- :param str|list[str] axes:
891
+ :param Dim|str|list[Dim|str] axis:
892
+ :param Dim|str|list[Dim|str] axes:
888
893
:param bool keep_dims:
889
894
:rtype: Data
890
895
"""
896
+ if axis is not NotSpecified :
897
+ assert axes is NotSpecified
898
+ axes = axis
891
899
return ReduceLayer .get_out_data_from_opts (name = name , sources = sources , axes = axes , keep_dims = keep_dims )
892
900
893
901
0 commit comments