From c680e98e68e2430ccd3e9db16ae6dc7143da2e7a Mon Sep 17 00:00:00 2001
From: janEbert <janpublicebert@posteo.net>
Date: Thu, 2 Nov 2023 09:55:42 +0100
Subject: [PATCH] Rename `typing` module to `typing_utils`

So we do not have clashes with the built-in module.
---
 lee_transformers/layers/adaptive_computation_time.py | 2 +-
 lee_transformers/layers/glu_layers.py                | 2 +-
 lee_transformers/layers/resampling.py                | 2 +-
 lee_transformers/models/lpe_transformer.py           | 2 +-
 lee_transformers/models/rpe_transformer.py           | 2 +-
 lee_transformers/models/transformer.py               | 2 +-
 lee_transformers/{typing.py => typing_utils.py}      | 0
 7 files changed, 6 insertions(+), 6 deletions(-)
 rename lee_transformers/{typing.py => typing_utils.py} (100%)

diff --git a/lee_transformers/layers/adaptive_computation_time.py b/lee_transformers/layers/adaptive_computation_time.py
index 8209635..d0b5ab6 100644
--- a/lee_transformers/layers/adaptive_computation_time.py
+++ b/lee_transformers/layers/adaptive_computation_time.py
@@ -9,7 +9,7 @@ from typing import Any, cast, Optional, Tuple
 import torch
 import torch as th
 
-from ..typing import ActivationFn
+from ..typing_utils import ActivationFn
 
 
 class AdaptiveComputationTime(th.nn.Module):
diff --git a/lee_transformers/layers/glu_layers.py b/lee_transformers/layers/glu_layers.py
index 46e2c1e..d6ee6dc 100644
--- a/lee_transformers/layers/glu_layers.py
+++ b/lee_transformers/layers/glu_layers.py
@@ -3,7 +3,7 @@ from typing import Optional
 import torch
 import torch as th
 
-from ..typing import ActivationFn
+from ..typing_utils import ActivationFn
 
 
 class GLULayer(th.nn.Module):
diff --git a/lee_transformers/layers/resampling.py b/lee_transformers/layers/resampling.py
index 85f6dda..7565d04 100644
--- a/lee_transformers/layers/resampling.py
+++ b/lee_transformers/layers/resampling.py
@@ -6,7 +6,7 @@ import torch as th
 
 from .glu_layers import GLULayer
 from .rms_norm import RMSNorm
-from ..typing import ActivationFn
+from ..typing_utils import ActivationFn
 
 
 def shift_right(
diff --git a/lee_transformers/models/lpe_transformer.py b/lee_transformers/models/lpe_transformer.py
index 692f405..ffa2047 100644
--- a/lee_transformers/models/lpe_transformer.py
+++ b/lee_transformers/models/lpe_transformer.py
@@ -7,7 +7,7 @@ import torch as th
 
 from . import common
 from ..layers import GLULayer, RMSNorm
-from ..typing import ActivationFn, Decoder, DecoderLayer
+from ..typing_utils import ActivationFn, Decoder, DecoderLayer
 
 
 class LPETransformer(th.nn.Module):
diff --git a/lee_transformers/models/rpe_transformer.py b/lee_transformers/models/rpe_transformer.py
index 31bfce1..3fc4e0b 100644
--- a/lee_transformers/models/rpe_transformer.py
+++ b/lee_transformers/models/rpe_transformer.py
@@ -7,7 +7,7 @@ import torch as th
 
 from . import common
 from ..layers import GLULayer, RMSNorm, RPEMultiheadAttention
-from ..typing import ActivationFn, Decoder, DecoderLayer
+from ..typing_utils import ActivationFn, Decoder, DecoderLayer
 
 
 # Encoder self-attention block (private method of
diff --git a/lee_transformers/models/transformer.py b/lee_transformers/models/transformer.py
index d270ae3..151da33 100644
--- a/lee_transformers/models/transformer.py
+++ b/lee_transformers/models/transformer.py
@@ -7,7 +7,7 @@ import torch as th
 
 from . import common
 from ..layers import GLULayer, RMSNorm
-from ..typing import ActivationFn, Decoder, DecoderLayer
+from ..typing_utils import ActivationFn, Decoder, DecoderLayer
 
 
 class Transformer(th.nn.Module):
diff --git a/lee_transformers/typing.py b/lee_transformers/typing_utils.py
similarity index 100%
rename from lee_transformers/typing.py
rename to lee_transformers/typing_utils.py
-- 
GitLab