Module pysimt.layers.attention
Attention variants.
Expand source code
"""Attention variants."""
from .mlp import MLPAttention
from .dot import DotAttention
from .hierarchical import HierarchicalAttention
from .uniform import UniformAttention
from .scaled_dot import ScaledDotAttention
from .multihead import MultiheadAttention
def get_attention(type_):
return {
'mlp': MLPAttention,
'dot': DotAttention,
'hier': HierarchicalAttention,
'uniform': UniformAttention,
'multihead': MultiheadAttention,
'scaled_dot': ScaledDotAttention,
}[type_]
Sub-modules
pysimt.layers.attention.dot
pysimt.layers.attention.hierarchical
pysimt.layers.attention.mlp
pysimt.layers.attention.multihead
pysimt.layers.attention.scaled_dot
pysimt.layers.attention.uniform
Functions
def get_attention(type_)
-
Expand source code
def get_attention(type_): return { 'mlp': MLPAttention, 'dot': DotAttention, 'hier': HierarchicalAttention, 'uniform': UniformAttention, 'multihead': MultiheadAttention, 'scaled_dot': ScaledDotAttention, }[type_]