|
62 | 62 |
|
63 | 63 | If you are only interested in performant attention score modifications, please |
64 | 64 | check out the `FlexAttention blog <https://pytorch.org/blog/flexattention/>`_ that |
65 | | -contains a `gym of masks <https://github.com/pytorch-labs/attention-gym>`_. |
| 65 | +contains a `gym of masks <https://github.com/meta-pytorch/attention-gym>`_. |
66 | 66 |
|
67 | 67 | """ |
68 | 68 |
|
@@ -675,7 +675,7 @@ def benchmark(func, *args, **kwargs): |
675 | 675 | # of the ``MultiheadAttention`` layer that allows for arbitrary modifications |
676 | 676 | # to the attention score. The example below takes the ``alibi_mod`` |
677 | 677 | # that implements `ALiBi <https://arxiv.org/abs/2108.12409>`_ from |
678 | | -# `attention gym <https://github.com/pytorch-labs/attention-gym>`_ and uses it |
| 678 | +# `attention gym <https://github.com/meta-pytorch/attention-gym>`_ and uses it |
679 | 679 | # with nested input tensors. |
680 | 680 |
|
681 | 681 | from torch.nn.attention.flex_attention import flex_attention |
@@ -892,8 +892,8 @@ def forward(self, x): |
892 | 892 | # etc. Further, there are several good examples of using various performant building blocks to |
893 | 893 | # implement various transformer architectures. Some examples include |
894 | 894 | # |
895 | | -# * `gpt-fast <https://github.com/pytorch-labs/gpt-fast>`_ |
896 | | -# * `segment-anything-fast <https://github.com/pytorch-labs/segment-anything-fast>`_ |
| 895 | +# * `gpt-fast <https://github.com/meta-pytorch/gpt-fast>`_ |
| 896 | +# * `segment-anything-fast <https://github.com/meta-pytorch/segment-anything-fast>`_ |
897 | 897 | # * `lucidrains implementation of NaViT with nested tensors <https://github.com/lucidrains/vit-pytorch/blob/73199ab486e0fad9eced2e3350a11681db08b61b/vit_pytorch/na_vit_nested_tensor.py>`_ |
898 | 898 | # * `torchtune's implementation of VisionTransformer <https://github.com/pytorch/torchtune/blob/a8a64ec6a99a6ea2be4fdaf0cd5797b03a2567cf/torchtune/modules/vision_transformer.py#L16>`_ |
899 | 899 |
|
|
0 commit comments