deepfold.modules.attention.SelfAttentionWithGate ================================================ .. currentmodule:: deepfold.modules.attention .. autoclass:: SelfAttentionWithGate .. automethod:: __init__ .. rubric:: Methods .. autosummary:: ~SelfAttentionWithGate.__init__ ~SelfAttentionWithGate.add_module ~SelfAttentionWithGate.apply ~SelfAttentionWithGate.bfloat16 ~SelfAttentionWithGate.buffers ~SelfAttentionWithGate.children ~SelfAttentionWithGate.compile ~SelfAttentionWithGate.cpu ~SelfAttentionWithGate.cuda ~SelfAttentionWithGate.double ~SelfAttentionWithGate.eval ~SelfAttentionWithGate.extra_repr ~SelfAttentionWithGate.float ~SelfAttentionWithGate.forward ~SelfAttentionWithGate.get_buffer ~SelfAttentionWithGate.get_extra_state ~SelfAttentionWithGate.get_parameter ~SelfAttentionWithGate.get_submodule ~SelfAttentionWithGate.half ~SelfAttentionWithGate.ipu ~SelfAttentionWithGate.load_state_dict ~SelfAttentionWithGate.modules ~SelfAttentionWithGate.named_buffers ~SelfAttentionWithGate.named_children ~SelfAttentionWithGate.named_modules ~SelfAttentionWithGate.named_parameters ~SelfAttentionWithGate.parameters ~SelfAttentionWithGate.register_backward_hook ~SelfAttentionWithGate.register_buffer ~SelfAttentionWithGate.register_forward_hook ~SelfAttentionWithGate.register_forward_pre_hook ~SelfAttentionWithGate.register_full_backward_hook ~SelfAttentionWithGate.register_full_backward_pre_hook ~SelfAttentionWithGate.register_load_state_dict_post_hook ~SelfAttentionWithGate.register_module ~SelfAttentionWithGate.register_parameter ~SelfAttentionWithGate.register_state_dict_pre_hook ~SelfAttentionWithGate.requires_grad_ ~SelfAttentionWithGate.set_extra_state ~SelfAttentionWithGate.share_memory ~SelfAttentionWithGate.state_dict ~SelfAttentionWithGate.to ~SelfAttentionWithGate.to_empty ~SelfAttentionWithGate.train ~SelfAttentionWithGate.type ~SelfAttentionWithGate.xpu ~SelfAttentionWithGate.zero_grad .. rubric:: Attributes .. autosummary:: ~SelfAttentionWithGate.T_destination ~SelfAttentionWithGate.call_super_init ~SelfAttentionWithGate.dump_patches ~SelfAttentionWithGate.training