From 22750b87343c45a494c3362c88fa6cab8c924cd3 Mon Sep 17 00:00:00 2001
From: janEbert <janpublicebert@posteo.net>
Date: Thu, 2 Nov 2023 09:45:18 +0100
Subject: [PATCH] Rename library to `lee_transformers`

Short import can now be `leet`. This was done due to a name conflict
with Transformer iN Transformer and because I did not like top-notch
that much.
---
 README.md                                              | 10 +++++-----
 docs/Makefile                                          |  2 +-
 docs/index.rst                                         |  2 +-
 {tn_transformers => lee_transformers}/__init__.py      |  0
 .../attention_masks.py                                 |  0
 {tn_transformers => lee_transformers}/common.py        |  0
 .../initialization.py                                  |  0
 .../layers/__init__.py                                 |  0
 .../layers/adaptive_computation_time.py                |  0
 .../layers/glu_layers.py                               |  0
 .../layers/positional_encodings.py                     |  0
 .../layers/resampling.py                               |  0
 .../layers/rms_norm.py                                 |  0
 .../layers/rpe_mha.py                                  |  0
 .../layers/seq_pool.py                                 |  0
 {tn_transformers => lee_transformers}/layers/utils.py  |  0
 .../models/__init__.py                                 |  0
 {tn_transformers => lee_transformers}/models/common.py |  0
 .../models/lpe_transformer.py                          |  0
 .../models/rpe_transformer.py                          |  0
 .../models/transformer.py                              |  0
 {tn_transformers => lee_transformers}/schedules.py     |  0
 {tn_transformers => lee_transformers}/typing.py        |  0
 setup.py                                               |  2 +-
 test/test_layers.py                                    |  2 +-
 test/test_masks.py                                     |  2 +-
 test/test_models.py                                    |  4 ++--
 27 files changed, 12 insertions(+), 12 deletions(-)
 rename {tn_transformers => lee_transformers}/__init__.py (100%)
 rename {tn_transformers => lee_transformers}/attention_masks.py (100%)
 rename {tn_transformers => lee_transformers}/common.py (100%)
 rename {tn_transformers => lee_transformers}/initialization.py (100%)
 rename {tn_transformers => lee_transformers}/layers/__init__.py (100%)
 rename {tn_transformers => lee_transformers}/layers/adaptive_computation_time.py (100%)
 rename {tn_transformers => lee_transformers}/layers/glu_layers.py (100%)
 rename {tn_transformers => lee_transformers}/layers/positional_encodings.py (100%)
 rename {tn_transformers => lee_transformers}/layers/resampling.py (100%)
 rename {tn_transformers => lee_transformers}/layers/rms_norm.py (100%)
 rename {tn_transformers => lee_transformers}/layers/rpe_mha.py (100%)
 rename {tn_transformers => lee_transformers}/layers/seq_pool.py (100%)
 rename {tn_transformers => lee_transformers}/layers/utils.py (100%)
 rename {tn_transformers => lee_transformers}/models/__init__.py (100%)
 rename {tn_transformers => lee_transformers}/models/common.py (100%)
 rename {tn_transformers => lee_transformers}/models/lpe_transformer.py (100%)
 rename {tn_transformers => lee_transformers}/models/rpe_transformer.py (100%)
 rename {tn_transformers => lee_transformers}/models/transformer.py (100%)
 rename {tn_transformers => lee_transformers}/schedules.py (100%)
 rename {tn_transformers => lee_transformers}/typing.py (100%)

diff --git a/README.md b/README.md
index 4e44889..83a634a 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# Top-notch Transformers
+# Lean, elegant, explorative Transformers
 
 This package implements Transformers using standard PyTorch code.
 Transformer models implement enhancements and encourage best-practices
@@ -34,7 +34,7 @@ python -m pip install git+<repository-uri>@pytorch-2.0
 ```
 import torch
 
-from tn_transformers import LPETransformer
+from lee_transformers import LPETransformer
 
 
 batch_size = 2
@@ -64,7 +64,7 @@ assert outputs.shape == (batch_size, output_seq_len, num_outputs)
 ```
 import torch
 
-from tn_transformers import LPETransformer
+from lee_transformers import LPETransformer
 
 
 batch_size = 2
@@ -90,7 +90,7 @@ assert outputs.shape == (batch_size, input_seq_len, num_outputs)
 ```
 import torch
 
-from tn_transformers import LPETransformer
+from lee_transformers import LPETransformer
 
 
 batch_size = 2
@@ -117,7 +117,7 @@ assert outputs.shape == (batch_size, input_seq_len, num_outputs)
 ```
 import torch
 
-from tn_transformers import LPETransformer
+from lee_transformers import LPETransformer
 
 
 batch_size = 2
diff --git a/docs/Makefile b/docs/Makefile
index 80bde15..e013d60 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -23,5 +23,5 @@ clean:
 # Catch-all target: route all unknown targets to Sphinx using the new
 # "make mode" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).
 %: Makefile
-	@$(SPHINXPREBUILD) -o "$(GENERATEDDIR)" "$(SOURCEDIR)"/../tn_transformers
+	@$(SPHINXPREBUILD) -o "$(GENERATEDDIR)" "$(SOURCEDIR)"/../lee_transformers
 	@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/index.rst b/docs/index.rst
index 7a31f8e..25811a4 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,6 +1,6 @@
 .. TN-Transformers documentation master file.
 
-.. Assumes `sphinx-apidoc -o generated ../tn_transformers` to have been
+.. Assumes `sphinx-apidoc -o generated ../lee_transformers` to have been
    ran from this directory.
 
 TN-Transformers
diff --git a/tn_transformers/__init__.py b/lee_transformers/__init__.py
similarity index 100%
rename from tn_transformers/__init__.py
rename to lee_transformers/__init__.py
diff --git a/tn_transformers/attention_masks.py b/lee_transformers/attention_masks.py
similarity index 100%
rename from tn_transformers/attention_masks.py
rename to lee_transformers/attention_masks.py
diff --git a/tn_transformers/common.py b/lee_transformers/common.py
similarity index 100%
rename from tn_transformers/common.py
rename to lee_transformers/common.py
diff --git a/tn_transformers/initialization.py b/lee_transformers/initialization.py
similarity index 100%
rename from tn_transformers/initialization.py
rename to lee_transformers/initialization.py
diff --git a/tn_transformers/layers/__init__.py b/lee_transformers/layers/__init__.py
similarity index 100%
rename from tn_transformers/layers/__init__.py
rename to lee_transformers/layers/__init__.py
diff --git a/tn_transformers/layers/adaptive_computation_time.py b/lee_transformers/layers/adaptive_computation_time.py
similarity index 100%
rename from tn_transformers/layers/adaptive_computation_time.py
rename to lee_transformers/layers/adaptive_computation_time.py
diff --git a/tn_transformers/layers/glu_layers.py b/lee_transformers/layers/glu_layers.py
similarity index 100%
rename from tn_transformers/layers/glu_layers.py
rename to lee_transformers/layers/glu_layers.py
diff --git a/tn_transformers/layers/positional_encodings.py b/lee_transformers/layers/positional_encodings.py
similarity index 100%
rename from tn_transformers/layers/positional_encodings.py
rename to lee_transformers/layers/positional_encodings.py
diff --git a/tn_transformers/layers/resampling.py b/lee_transformers/layers/resampling.py
similarity index 100%
rename from tn_transformers/layers/resampling.py
rename to lee_transformers/layers/resampling.py
diff --git a/tn_transformers/layers/rms_norm.py b/lee_transformers/layers/rms_norm.py
similarity index 100%
rename from tn_transformers/layers/rms_norm.py
rename to lee_transformers/layers/rms_norm.py
diff --git a/tn_transformers/layers/rpe_mha.py b/lee_transformers/layers/rpe_mha.py
similarity index 100%
rename from tn_transformers/layers/rpe_mha.py
rename to lee_transformers/layers/rpe_mha.py
diff --git a/tn_transformers/layers/seq_pool.py b/lee_transformers/layers/seq_pool.py
similarity index 100%
rename from tn_transformers/layers/seq_pool.py
rename to lee_transformers/layers/seq_pool.py
diff --git a/tn_transformers/layers/utils.py b/lee_transformers/layers/utils.py
similarity index 100%
rename from tn_transformers/layers/utils.py
rename to lee_transformers/layers/utils.py
diff --git a/tn_transformers/models/__init__.py b/lee_transformers/models/__init__.py
similarity index 100%
rename from tn_transformers/models/__init__.py
rename to lee_transformers/models/__init__.py
diff --git a/tn_transformers/models/common.py b/lee_transformers/models/common.py
similarity index 100%
rename from tn_transformers/models/common.py
rename to lee_transformers/models/common.py
diff --git a/tn_transformers/models/lpe_transformer.py b/lee_transformers/models/lpe_transformer.py
similarity index 100%
rename from tn_transformers/models/lpe_transformer.py
rename to lee_transformers/models/lpe_transformer.py
diff --git a/tn_transformers/models/rpe_transformer.py b/lee_transformers/models/rpe_transformer.py
similarity index 100%
rename from tn_transformers/models/rpe_transformer.py
rename to lee_transformers/models/rpe_transformer.py
diff --git a/tn_transformers/models/transformer.py b/lee_transformers/models/transformer.py
similarity index 100%
rename from tn_transformers/models/transformer.py
rename to lee_transformers/models/transformer.py
diff --git a/tn_transformers/schedules.py b/lee_transformers/schedules.py
similarity index 100%
rename from tn_transformers/schedules.py
rename to lee_transformers/schedules.py
diff --git a/tn_transformers/typing.py b/lee_transformers/typing.py
similarity index 100%
rename from tn_transformers/typing.py
rename to lee_transformers/typing.py
diff --git a/setup.py b/setup.py
index 2a39bb8..bbbb6c7 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
 from setuptools import find_packages, setup
 
 setup(
-    name='tn_transformers',
+    name='lee_transformers',
     python_requires='>=3.8',
     version='0.1.6',
     packages=find_packages(),
diff --git a/test/test_layers.py b/test/test_layers.py
index cada1db..cada158 100644
--- a/test/test_layers.py
+++ b/test/test_layers.py
@@ -2,7 +2,7 @@ import unittest
 
 import torch as th
 
-from tn_transformers.layers import (
+from lee_transformers.layers import (
     AdaptiveComputationTime,
     DownscalingXPosEncoding,
     GBST,
diff --git a/test/test_masks.py b/test/test_masks.py
index a6f7166..676497d 100644
--- a/test/test_masks.py
+++ b/test/test_masks.py
@@ -2,7 +2,7 @@ import unittest
 
 import torch as th
 
-from tn_transformers.attention_masks import (
+from lee_transformers.attention_masks import (
     alibi,
     causal,
     full,
diff --git a/test/test_models.py b/test/test_models.py
index b837ffd..7156ce1 100644
--- a/test/test_models.py
+++ b/test/test_models.py
@@ -2,8 +2,8 @@ import unittest
 
 import torch as th
 
-from tn_transformers import LPETransformer, RPETransformer, Transformer
-from tn_transformers.layers import PositionalEncoding
+from lee_transformers import LPETransformer, RPETransformer, Transformer
+from lee_transformers.layers import PositionalEncoding
 from .common import check_grad
 
 
-- 
GitLab