deepfold.modules.layer\_norm.LayerNorm ====================================== .. currentmodule:: deepfold.modules.layer_norm .. autoclass:: LayerNorm .. automethod:: __init__ .. rubric:: Methods .. autosummary:: ~LayerNorm.__init__ ~LayerNorm.add_module ~LayerNorm.apply ~LayerNorm.bfloat16 ~LayerNorm.buffers ~LayerNorm.children ~LayerNorm.compile ~LayerNorm.cpu ~LayerNorm.cuda ~LayerNorm.double ~LayerNorm.eval ~LayerNorm.extra_repr ~LayerNorm.float ~LayerNorm.forward ~LayerNorm.get_buffer ~LayerNorm.get_extra_state ~LayerNorm.get_parameter ~LayerNorm.get_submodule ~LayerNorm.half ~LayerNorm.ipu ~LayerNorm.load_state_dict ~LayerNorm.modules ~LayerNorm.named_buffers ~LayerNorm.named_children ~LayerNorm.named_modules ~LayerNorm.named_parameters ~LayerNorm.parameters ~LayerNorm.register_backward_hook ~LayerNorm.register_buffer ~LayerNorm.register_forward_hook ~LayerNorm.register_forward_pre_hook ~LayerNorm.register_full_backward_hook ~LayerNorm.register_full_backward_pre_hook ~LayerNorm.register_load_state_dict_post_hook ~LayerNorm.register_module ~LayerNorm.register_parameter ~LayerNorm.register_state_dict_pre_hook ~LayerNorm.requires_grad_ ~LayerNorm.set_extra_state ~LayerNorm.share_memory ~LayerNorm.state_dict ~LayerNorm.to ~LayerNorm.to_empty ~LayerNorm.train ~LayerNorm.type ~LayerNorm.xpu ~LayerNorm.zero_grad .. rubric:: Attributes .. autosummary:: ~LayerNorm.T_destination ~LayerNorm.call_super_init ~LayerNorm.dump_patches ~LayerNorm.training