class pgm_head(head):
def __init__(
self, m: int, n: int,
name: str = 'perceptron_head',
distribution: str = 'normal',
d: int = 2, with_replacement: bool = False,
enable_bias: bool = False,
# optional parameters
with_lorr: bool = False,
r: int = 3,
with_residual: bool = False,
channel_num: int = 1,
# other parameters
parameters_init_method: str = 'xavier_normal',
device: str = 'cpu', *args, **kwargs
):
if distribution == 'normal':
data_transformation = combinatorial_normal_expansion(
d=d, with_replacement=with_replacement,
device=device,
)
else:
raise ValueError('tinybig only supports normal, exponential, cauchy, gamma, laplace or chi2 distributions...')
if with_lorr:
parameter_fabrication = lorr_reconciliation(
r=r,
enable_bias=enable_bias,
device=device,
)
else:
parameter_fabrication = identity_reconciliation(
enable_bias=enable_bias,
device=device,
)
if with_residual:
remainder = linear_remainder(
device=device
)
else:
remainder = zero_remainder(
device=device,
)
super().__init__(
m=m, n=n, name=name,
data_transformation=data_transformation,
parameter_fabrication=parameter_fabrication,
remainder=remainder,
channel_num=channel_num,
parameters_init_method=parameters_init_method,
device=device, *args, **kwargs
)