Skip to content

Commit

Permalink
final fix before release 0.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
giotto-learn committed Jun 17, 2022
1 parent 548c1f2 commit d441ca7
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 4 deletions.
1 change: 1 addition & 0 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,5 @@ jobs:
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist bdist_wheel
twine check dist/*
twine upload dist/*
2 changes: 2 additions & 0 deletions gdeep/topology_layers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,6 @@
'PersformerConfig',
'Persformer',
'PersformerWrapper',
"attention",
"pooling_layers"
]
4 changes: 4 additions & 0 deletions gdeep/topology_layers/attention/attention_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,11 @@
# Type aliases
Tensor = torch.Tensor


class AttentionBase(Module, ABC):
"""Base class for attention layers. This class
can be used in generic transformer models.
"""
def __init__(self, config: PersformerConfig):
super().__init__()
self.config = config
Expand Down
1 change: 1 addition & 0 deletions gdeep/topology_layers/attention/induced_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@


class InducedAttention(AttentionBase):
"""Class implementing the induced attention"""
def __init__(self, config: PersformerConfig) -> None:
super().__init__(config)
raise NotImplementedError
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ def __init__(self,
dropout=config.attention_probs_dropout_prob,
batch_first=True)
self.dropout = Dropout(config.hidden_dropout_prob)



def forward(self, # type: ignore
input: Tensor,
attention_mask: Optional[Tensor] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@
# Type aliases
Tensor = torch.Tensor


class AttentionPoolingLayer(Module):

"""This class implements the attention mechanism
with a pooling layer to enforce permutation
invariance"""
config: PersformerConfig

def __init__(self, config: PersformerConfig):
Expand Down
2 changes: 1 addition & 1 deletion gdeep/topology_layers/pooling_layers/max_pooling_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


class MaxPoolingLayer(Module):

"""Implementation of the max pooling layer"""
config: PersformerConfig

def __init__(self, config: PersformerConfig):
Expand Down

0 comments on commit d441ca7

Please sign in to comment.