8000 Remove last_dim_softmax as it's deprecated and scheduled for removal. by schmmd · Pull Request #2207 · allenai/allennlp · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content
This repository was archived by the owner on Dec 16, 2022. It is now read-only.

Remove last_dim_softmax as it's deprecated and scheduled for removal. #2207

Merged
merged 1 commit into from
Dec 18, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 0 additions & 31 deletions allennlp/nn/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -475,37 +475,6 @@ def get_text_field_mask(text_field_tensors: Dict[str, torch.Tensor],
raise ValueError("Expected a tensor with dimension 2 or 3, found {}".format(smallest_dim))


def last_dim_softmax(tensor: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:
"""
Takes a tensor with 3 or more dimensions and does a masked softmax over the last dimension. We
assume the tensor has shape ``(batch_size, ..., sequence_length)`` and that the mask (if given)
has shape ``(batch_size, sequence_length)``.

.. deprecated:: 0.6.1
``last_dim_softmax`` was deprecated in favor of just using ``masked_softmax`` in version
0.6.1. It will be removed in version 0.8.
"""
warnings.warn("``last_dim_softmax`` was deprecated in favor of just using ``masked_softmax`` "
"in version 0.6.1. It will be removed in version 0.8.", DeprecationWarning)
return masked_softmax(tensor, mask, dim=-1)


def last_dim_log_softmax(tensor: torch.Tensor, mask: Optional[torch.Tensor] = None) -> torch.Tensor:
"""
Takes a tensor with 3 or more dimensions and does a masked log softmax over the last dimension.
We assume the tensor has shape ``(batch_size, ..., sequence_length)`` and that the mask (if given)
has shape ``(batch_size, sequence_length)``.

.. deprecated:: 0.6.1
``last_dim_log_softmax`` was deprecated in favor of just using ``masked_log_softmax`` in
version 0.6.1. It will be removed in version 0.8.
"""
warnings.warn("``last_dim_log_softmax`` was deprecated in favor of just using "
"``masked_log_softmax`` in version 0.6.1. It will be removed in version 0.8.",
DeprecationWarning)
return masked_log_softmax(tensor, mask, dim=-1)


def weighted_sum(matrix: torch.Tensor, attention: torch.Tensor) -> torch.Tensor:
"""
Takes a matrix of vectors and a set of weights over the rows in the matrix (which we call an
Expand Down
0