class perceptron_layer(layer):
def __init__(
self,
m: int, n: int,
name: str = 'perceptron_layer',
channel_num: int = 1,
width: int = 1,
# data expansion function
with_bspline: bool = False,
with_taylor: bool = False, d: int = 2,
with_hybrid_expansion: bool = False,
# parameter reconciliation function parameters
with_dual_lphm: bool = False,
with_lorr: bool = False, r: int = 3,
enable_bias: bool = True,
# remainder function parameters
with_residual: bool = False,
# output processing function parameters
with_batch_norm: bool = False,
with_relu: bool = True,
with_dropout: bool = True, p: float = 0.5,
with_softmax: bool = True,
# other parameters
parameters_init_method: str = 'xavier_normal',
device: str = 'cpu', *args, **kwargs
):
print('* perceptron_layer, width:', width)
heads = [
perceptron_head(
m=m, n=n,
channel_num=channel_num,
# --------------------
with_bspline=with_bspline,
with_taylor=with_taylor, d=d,
with_hybrid_expansion=with_hybrid_expansion,
# --------------------
with_dual_lphm=with_dual_lphm,
with_lorr=with_lorr, r=r,
enable_bias=enable_bias,
# --------------------
with_residual=with_residual,
# --------------------
with_batch_norm=with_batch_norm,
with_relu=with_relu,
with_dropout=with_dropout, p=p,
with_softmax=with_softmax,
# --------------------
parameters_init_method=parameters_init_method,
device=device, *args, **kwargs
)
] * width
print('--------------------------')
super().__init__(name=name, m=m, n=n, heads=heads, device=device, *args, **kwargs)