diff --git a/.github/workflows/mypy-type-check.yml b/.github/workflows/mypy-type-check.yml
index e87cf1c28..a22f339c5 100644
--- a/.github/workflows/mypy-type-check.yml
+++ b/.github/workflows/mypy-type-check.yml
@@ -39,4 +39,9 @@ jobs:
           tiatoolbox/__main__.py \
           tiatoolbox/typing.py \
           tiatoolbox/tiatoolbox.py \
-          tiatoolbox/utils/*.py
+          tiatoolbox/utils/*.py \
+          tiatoolbox/tools/__init__.py \
+          tiatoolbox/tools/stainextract.py \
+          tiatoolbox/tools/pyramid.py \
+          tiatoolbox/tools/tissuemask.py \
+          tiatoolbox/tools/graph.py
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 4ce2bf445..0ef67b49e 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -30,7 +30,7 @@ jobs:
         sudo apt update
         sudo apt-get install -y libopenslide-dev openslide-tools libopenjp2-7 libopenjp2-tools
         python -m pip install --upgrade pip
-        python -m pip install ruff==0.1.13 pytest pytest-cov pytest-runner
+        python -m pip install ruff==0.2.1 pytest pytest-cov pytest-runner
         pip install -r requirements/requirements.txt
     - name: Cache tiatoolbox static assets
       uses: actions/cache@v3
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index bc0650353..935cf209b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -68,7 +68,7 @@ repos:
         language: python
   - repo: https://github.com/astral-sh/ruff-pre-commit
     # Ruff version.
-    rev: v0.1.14
+    rev: v0.2.1
     hooks:
       - id: ruff
         args: [--fix, --exit-non-zero-on-fix]
diff --git a/benchmarks/annotation_nquery.ipynb b/benchmarks/annotation_nquery.ipynb
index 458ecbb22..64a58794a 100644
--- a/benchmarks/annotation_nquery.ipynb
+++ b/benchmarks/annotation_nquery.ipynb
@@ -71,7 +71,7 @@
     "from shapely.geometry import Polygon\n",
     "\n",
     "sys.path.append(\"..\")  # If running locally without pypi installed tiatoolbox\n",
-    "from tiatoolbox.annotation.storage import (  # noqa: E402\n",
+    "from tiatoolbox.annotation.storage import (\n",
     "    Annotation,\n",
     "    AnnotationStore,\n",
     "    DictionaryStore,\n",
diff --git a/benchmarks/annotation_store.ipynb b/benchmarks/annotation_store.ipynb
index 128ad387c..6c8b83d65 100644
--- a/benchmarks/annotation_store.ipynb
+++ b/benchmarks/annotation_store.ipynb
@@ -207,8 +207,8 @@
     "\n",
     "sys.path.append(\"..\")  # If running locally without pypi installed tiatoolbox\n",
     "\n",
-    "from tiatoolbox import logger  # noqa: E402\n",
-    "from tiatoolbox.annotation.storage import (  # noqa: E402\n",
+    "from tiatoolbox import logger\n",
+    "from tiatoolbox.annotation.storage import (\n",
     "    Annotation,\n",
     "    DictionaryStore,\n",
     "    SQLiteStore,\n",
diff --git a/benchmarks/annotation_store_alloc.py b/benchmarks/annotation_store_alloc.py
index 41b85043f..d5b6df9cb 100644
--- a/benchmarks/annotation_store_alloc.py
+++ b/benchmarks/annotation_store_alloc.py
@@ -139,12 +139,12 @@ def __exit__(self: memray, *args: object) -> None:
                 # Intentionally blank.
 
 
-import numpy as np  # noqa: E402
-import psutil  # noqa: E402
-from shapely.geometry import Polygon  # noqa: E402
-from tqdm import tqdm  # noqa: E402
+import numpy as np
+import psutil
+from shapely.geometry import Polygon
+from tqdm import tqdm
 
-from tiatoolbox.annotation.storage import (  # noqa: E402
+from tiatoolbox.annotation.storage import (
     Annotation,
     DictionaryStore,
     SQLiteStore,
diff --git a/examples/full-pipelines/slide-graph.ipynb b/examples/full-pipelines/slide-graph.ipynb
index de6f2b60f..54d1cdbde 100644
--- a/examples/full-pipelines/slide-graph.ipynb
+++ b/examples/full-pipelines/slide-graph.ipynb
@@ -397,7 +397,7 @@
     "# https://docs.gdc.cancer.gov/Encyclopedia/pages/TCGA_Barcode/\n",
     "wsi_patient_codes = np.array([\"-\".join(v.split(\"-\")[:3]) for v in wsi_names])\n",
     "wsi_labels = np.array(\n",
-    "    [clinical_info[v] if v in clinical_info else np.nan for v in wsi_patient_codes],\n",
+    "    [clinical_info.get(v, np.nan) for v in wsi_patient_codes],\n",
     ")\n",
     "\n",
     "# * Filter the WSIs and paths that do not have labels\n",
diff --git a/pyproject.toml b/pyproject.toml
index dbf71f456..05463efe8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -72,7 +72,7 @@ search = 'TOOLBOX_VER: {current_version}'
 replace = 'TOOLBOX_VER: {new_version}'
 
 [tool.ruff]
-select = [
+lint.select = [
   "A",     # flake8-builtins
   "B",     # flake8-bugbear
   "D",     # pydocstyle, need to enable for docstrings check.
@@ -126,13 +126,13 @@ select = [
   "SLOT",  # flake8-slots
   "ASYNC", # flake8-async
 ]
-ignore = []
+lint.ignore = []
 # Allow Ruff to discover `*.ipynb` files.
 include = ["*.py", "*.pyi", "**/pyproject.toml", "*.ipynb"]
 
 # Allow autofix for all enabled rules (when `--fix`) is provided.
-fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
-unfixable = []
+lint.fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"]
+lint.unfixable = []
 
 # Exclude a variety of commonly ignored directories.
 exclude = [
@@ -149,27 +149,27 @@ exclude = [
 ]
 
 # Ignore `F401` (import violations) in all `__init__.py` files.
-per-file-ignores = {"__init__.py" = ["F401"], "tests/*" = ["T201", "PGH001", "SLF001", "S101", "PLR2004"], "benchmarks/*" = ["T201", "INP001"], "pre-commit/*" = ["T201", "INP001"], "tiatoolbox/cli/*" = ["PLR0913"]}
+lint.per-file-ignores = {"__init__.py" = ["F401"], "tests/*" = ["T201", "PGH001", "SLF001", "S101", "PLR2004"], "benchmarks/*" = ["T201", "INP001"], "pre-commit/*" = ["T201", "INP001"], "tiatoolbox/cli/*" = ["PLR0913"]}
 
 # Same as Black.
 line-length = 88
 
 # Allow unused variables when underscore-prefixed.
-dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
 
 # Minimum Python version 3.8.
 target-version = "py38"
 
-[tool.ruff.mccabe]
+[tool.ruff.lint.mccabe]
 # Unlike Flake8, default to a complexity level of 10.
 max-complexity = 14
 
 # need to enable for docstrings check.
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
  # Use Google-style docstrings.
 convention = "google"
 
-[tool.ruff.pylint]
+[tool.ruff.lint.pylint]
 max-args = 10
 
 [tool.mypy]
diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt
index 6911165c5..7c58e0703 100644
--- a/requirements/requirements_dev.txt
+++ b/requirements/requirements_dev.txt
@@ -12,7 +12,7 @@ pytest>=7.2.0
 pytest-cov>=4.0.0
 pytest-runner>=6.0
 pytest-xdist[psutil]
-ruff==0.1.13  # This will be updated by pre-commit bot to latest version
+ruff==0.2.1  # This will be updated by pre-commit bot to latest version
 toml>=0.10.2
 twine>=4.0.1
 wheel>=0.37.1
diff --git a/tests/test_dsl.py b/tests/test_dsl.py
index e09753556..ad811ac6e 100644
--- a/tests/test_dsl.py
+++ b/tests/test_dsl.py
@@ -101,7 +101,7 @@ class TestSQLite:
     @staticmethod
     def test_prop_or_prop() -> None:
         """Test OR operator between two prop accesses."""
-        query = eval(  # skipcq: PYL-W0123  # noqa: S307
+        query = eval(  # skipcq: PYL-W0123
             "(props['int'] == 2) | (props['int'] == 3)",
             SQL_GLOBALS,
             {},
@@ -143,7 +143,7 @@ def test_number_binary_operations(
         """Check that binary operations between ints does not error."""
         for op in BINARY_OP_STRINGS:
             query = f"2 {op} 2"
-            result = eval(  # skipcq: PYL-W0123  # noqa: S307
+            result = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -159,7 +159,7 @@ def test_property_binary_operations(
         """Check that binary operations between properties does not error."""
         for op in BINARY_OP_STRINGS:
             query = f"props['int'] {op} props['int']"
-            result = eval(  # skipcq: PYL-W0123  # noqa: S307
+            result = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -175,7 +175,7 @@ def test_r_binary_operations(
         """Test right hand binary operations between numbers and properties."""
         for op in BINARY_OP_STRINGS:
             query = f"2 {op} props['int']"
-            result = eval(  # skipcq: PYL-W0123  # noqa: S307
+            result = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -191,7 +191,7 @@ def test_number_prefix_operations(
         """Test prefix operations on numbers."""
         for op in PREFIX_OP_STRINGS:
             query = f"{op}1"
-            result = eval(  # skipcq: PYL-W0123  # noqa: S307
+            result = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -207,7 +207,7 @@ def test_property_prefix_operations(
         """Test prefix operations on properties."""
         for op in PREFIX_OP_STRINGS:
             query = f"{op}props['int']"
-            result = eval(  # skipcq: PYL-W0123  # noqa: S307
+            result = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -222,7 +222,7 @@ def test_regex_nested_props(
     ) -> None:
         """Test regex on nested properties."""
         query = "props['nesting']['fib'][4]"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -237,7 +237,7 @@ def test_regex_str_props(
     ) -> None:
         """Test regex on string properties."""
         query = "regexp('Hello', props['string'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -252,7 +252,7 @@ def test_regex_str_str(
     ) -> None:
         """Test regex on string and string."""
         query = "regexp('Hello', 'Hello world!')"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -267,7 +267,7 @@ def test_regex_props_str(
     ) -> None:
         """Test regex on property and string."""
         query = "regexp(props['string'], 'Hello world!')"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -282,7 +282,7 @@ def test_regex_ignore_case(
     ) -> None:
         """Test regex with ignorecase flag."""
         query = "regexp('hello', props['string'], re.IGNORECASE)"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -297,7 +297,7 @@ def test_regex_no_match(
     ) -> None:
         """Test regex with no match."""
         query = "regexp('Yello', props['string'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -312,7 +312,7 @@ def test_has_key(
     ) -> None:
         """Test has_key function."""
         query = "has_key(props, 'foo')"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -327,7 +327,7 @@ def test_is_none(
     ) -> None:
         """Test is_none function."""
         query = "is_none(props['null'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -342,7 +342,7 @@ def test_is_not_none(
     ) -> None:
         """Test is_not_none function."""
         query = "is_not_none(props['int'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -357,7 +357,7 @@ def test_nested_has_key(
     ) -> None:
         """Test nested has_key function."""
         query = "has_key(props['dict'], 'a')"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -372,7 +372,7 @@ def test_list_sum(
     ) -> None:
         """Test sum function on a list."""
         query = "sum(props['list'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -387,7 +387,7 @@ def test_abs(
     ) -> None:
         """Test abs function."""
         query = "abs(props['neg'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -402,7 +402,7 @@ def test_not(
     ) -> None:
         """Test not operator."""
         query = "not props['bool']"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -417,7 +417,7 @@ def test_props_int_keys(
     ) -> None:
         """Test props with int keys."""
         query = "props['list'][1]"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -432,7 +432,7 @@ def test_props_get(
     ) -> None:
         """Test props.get function."""
         query = "is_none(props.get('foo'))"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -447,7 +447,7 @@ def test_props_get_default(
     ) -> None:
         """Test props.get function with default."""
         query = "props.get('foo', 42)"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -462,7 +462,7 @@ def test_in_list(
     ) -> None:
         """Test in operator for list."""
         query = "1 in props.get('list')"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -478,7 +478,7 @@ def test_has_key_exception(
         """Test has_key function with exception."""
         query = "has_key(1, 'a')"
         with pytest.raises(TypeError, match="(not iterable)|(Unsupported type)"):
-            _ = eval(  # skipcq: PYL-W0123  # noqa: S307
+            _ = eval(  # skipcq: PYL-W0123
                 query,
                 eval_globals,
                 eval_locals,
@@ -492,7 +492,7 @@ def test_logical_and(
     ) -> None:
         """Test logical and operator."""
         query = "props['bool'] & is_none(props['null'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -507,7 +507,7 @@ def test_logical_or(
     ) -> None:
         """Test logical or operator."""
         query = "props['bool'] | (props['int'] < 2)"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -522,7 +522,7 @@ def test_nested_logic(
     ) -> None:
         """Test nested logical operators."""
         query = "(props['bool'] | (props['int'] < 2)) & abs(props['neg'])"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -537,7 +537,7 @@ def test_contains_list(
     ) -> None:
         """Test contains operator for list."""
         query = "1 in props['list']"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -552,7 +552,7 @@ def test_contains_dict(
     ) -> None:
         """Test contains operator for dict."""
         query = "'a' in props['dict']"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -567,7 +567,7 @@ def test_contains_str(
     ) -> None:
         """Test contains operator for str."""
         query = "'Hello' in props['string']"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
@@ -582,7 +582,7 @@ def test_key_with_period(
     ) -> None:
         """Test key with period."""
         query = "props['dot.key']"
-        result = eval(  # skipcq: PYL-W0123  # noqa: S307
+        result = eval(  # skipcq: PYL-W0123
             query,
             eval_globals,
             eval_locals,
diff --git a/tests/test_wsireader.py b/tests/test_wsireader.py
index 390751b5d..76a5d3861 100644
--- a/tests/test_wsireader.py
+++ b/tests/test_wsireader.py
@@ -204,7 +204,7 @@ def read_bounds_level_consistency(wsi: WSIReader, bounds: IntBounds) -> None:
     # from interpolation when calculating the downsampled levels. This
     # adds some tolerance for the comparison.
     blurred = [cv2.GaussianBlur(img, (5, 5), cv2.BORDER_REFLECT) for img in resized]
-    as_float = [img.astype(np.float_) for img in blurred]
+    as_float = [img.astype(np.float64) for img in blurred]
 
     # Pair-wise check resolutions for mean squared error
     for i, a in enumerate(as_float):
@@ -2646,7 +2646,7 @@ def test_read_rect_level_consistency(wsi: WSIReader) -> None:
     # from interpolation when calculating the downsampled levels. This
     # adds some tolerance for the comparison.
     blurred = [cv2.GaussianBlur(img, (5, 5), cv2.BORDER_REFLECT) for img in resized]
-    as_float = [img.astype(np.float_) for img in blurred]
+    as_float = [img.astype(np.float64) for img in blurred]
 
     # Pair-wise check resolutions for mean squared error
     for i, a in enumerate(as_float):
diff --git a/tiatoolbox/annotation/storage.py b/tiatoolbox/annotation/storage.py
index 429beee80..19e3f959f 100644
--- a/tiatoolbox/annotation/storage.py
+++ b/tiatoolbox/annotation/storage.py
@@ -2028,7 +2028,9 @@ def transform(
         transformed_geoms = {
             key: transform(annotation.geometry) for key, annotation in self.items()
         }
-        self.patch_many(transformed_geoms.keys(), transformed_geoms.values())
+        _keys = transformed_geoms.keys()
+        _values = transformed_geoms.values()
+        self.patch_many(_keys, _values)
 
     def __del__(self: AnnotationStore) -> None:
         """Implements destructor method.
diff --git a/tiatoolbox/tools/graph.py b/tiatoolbox/tools/graph.py
index 6114b9b48..c3b138ddd 100644
--- a/tiatoolbox/tools/graph.py
+++ b/tiatoolbox/tools/graph.py
@@ -18,7 +18,7 @@
     from numpy.typing import ArrayLike
 
 
-def delaunay_adjacency(points: ArrayLike, dthresh: Number) -> list:
+def delaunay_adjacency(points: ArrayLike, dthresh: float) -> list:
     """Create an adjacency matrix via Delaunay triangulation from a list of coordinates.
 
     Points which are further apart than dthresh will not be connected.
@@ -28,7 +28,7 @@ def delaunay_adjacency(points: ArrayLike, dthresh: Number) -> list:
     Args:
         points (ArrayLike):
             An nxm list of coordinates.
-        dthresh (int):
+        dthresh (float):
             Distance threshold for triangulation.
 
     Returns:
@@ -57,6 +57,7 @@ def delaunay_adjacency(points: ArrayLike, dthresh: Number) -> list:
     tessellation = Delaunay(points)
     # Find all connected neighbours for each point in the set of
     # triangles. Starting with an empty dictionary.
+    triangle_neighbours: defaultdict
     triangle_neighbours = defaultdict(set)
     # Iterate over each triplet of point indexes which denotes a
     # triangle within the tessellation.
@@ -157,7 +158,7 @@ def edge_index_to_triangles(edge_index: ArrayLike) -> ArrayLike:
 
 def affinity_to_edge_index(
     affinity_matrix: torch.Tensor | ArrayLike,
-    threshold: Number = 0.5,
+    threshold: float = 0.5,
 ) -> torch.tensor | ArrayLike:
     """Convert an affinity matrix (similarity matrix) to an edge index.
 
@@ -233,12 +234,12 @@ def _umap_reducer(graph: dict[str, ArrayLike]) -> ArrayLike:
     def build(
         points: ArrayLike,
         features: ArrayLike,
-        lambda_d: Number = 3.0e-3,
-        lambda_f: Number = 1.0e-3,
-        lambda_h: Number = 0.8,
-        connectivity_distance: Number = 4000,
-        neighbour_search_radius: Number = 2000,
-        feature_range_thresh: Number | None = 1e-4,
+        lambda_d: float = 3.0e-3,
+        lambda_f: float = 1.0e-3,
+        lambda_h: float = 0.8,
+        connectivity_distance: int = 4000,
+        neighbour_search_radius: int = 2000,
+        feature_range_thresh: float | None = 1e-4,
     ) -> dict[str, ArrayLike]:
         """Build a graph via hybrid clustering in spatial and feature space.
 
@@ -416,7 +417,7 @@ def build(
 
     @classmethod
     def visualise(
-        cls: SlideGraphConstructor,
+        cls: type[SlideGraphConstructor],
         graph: dict[str, ArrayLike],
         color: ArrayLike | str | Callable | None = None,
         node_size: Number | ArrayLike | Callable = 25,
@@ -510,8 +511,8 @@ def visualise(
         # Plot the nodes
         plt.scatter(
             *nodes.T,
-            c=color(graph) if isinstance(color, Callable) else color,
-            s=node_size(graph) if isinstance(node_size, Callable) else node_size,
+            c=color(graph) if callable(color) else color,
+            s=node_size(graph) if callable(node_size) else node_size,
             zorder=2,
         )
 
diff --git a/tiatoolbox/tools/pyramid.py b/tiatoolbox/tools/pyramid.py
index 1a797ebc3..a6506fb46 100644
--- a/tiatoolbox/tools/pyramid.py
+++ b/tiatoolbox/tools/pyramid.py
@@ -129,7 +129,7 @@ def level_count(self: TilePyramidGenerator) -> int:
         total_level_count = super_level_count + 1 + self.sub_tile_level_count
         return int(total_level_count)
 
-    def get_thumb_tile(self: TilePyramidGenerator) -> Image:
+    def get_thumb_tile(self: TilePyramidGenerator) -> Image.Image:
         """Return a thumbnail which fits the whole slide in one tile.
 
         The thumbnail output size has the longest edge equal to the tile
@@ -157,7 +157,7 @@ def get_tile(
         pad_mode: str = "constant",
         interpolation: str = "optimise",
         transparent_value: int | None = None,
-    ) -> Image:
+    ) -> Image.Image:
         """Get a tile at a given level and coordinate.
 
         Note that levels are in the reverse order of those in WSIReader.
@@ -223,7 +223,7 @@ def get_tile(
             )
             output_size = np.repeat(output_size, 2).astype(int)
             thumb = self.get_thumb_tile()
-            thumb.thumbnail(output_size)
+            thumb.thumbnail((output_size[0], output_size[1]))
             return thumb
         slide_dimensions = np.array(self.wsi.info.slide_dimensions)
         if all(slide_dimensions < [baseline_x, baseline_y]):
@@ -331,7 +331,7 @@ def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 msg = "Unsupported compression for zip."
                 raise ValueError(msg)
 
-            archive = zipfile.ZipFile(
+            zip_archive = zipfile.ZipFile(
                 path,
                 mode="w",
                 compression=compression2enum[compression],
@@ -343,7 +343,7 @@ def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 tile.save(bio, format="jpeg")
                 bio.seek(0)
                 data = bio.read()
-                archive.writestr(
+                zip_archive.writestr(
                     str(tile_path),
                     data,
                     compress_type=compression2enum[compression],
@@ -360,7 +360,7 @@ def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 msg = "Unsupported compression for tar."
                 raise ValueError(msg)
 
-            archive = tarfile.TarFile.open(path, mode=compression2mode[compression])
+            tar_archive = tarfile.TarFile.open(path, mode=compression2mode[compression])
 
             def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 """Write the tile to the output zip."""
@@ -368,9 +368,9 @@ def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 tile.save(bio, format="jpeg")
                 bio.seek(0)
                 tar_info = tarfile.TarInfo(name=str(tile_path))
-                tar_info.mtime = time.time()
+                tar_info.mtime = int(time.time())
                 tar_info.size = bio.tell()
-                archive.addfile(tarinfo=tar_info, fileobj=bio)
+                tar_archive.addfile(tarinfo=tar_info, fileobj=bio)
 
         for level in range(self.level_count):
             for x, y in np.ndindex(self.tile_grid_size(level)):
@@ -378,13 +378,17 @@ def save_tile(tile_path: Path, tile: Image.Image) -> None:
                 tile_path = self.tile_path(level, x, y)
                 save_tile(tile_path, tile)
 
-        if container is not None:
-            archive.close()
+        if container == "zip":
+            zip_archive.close()
+        if container == "tar":
+            tar_archive.close()
 
     def __len__(self: TilePyramidGenerator) -> int:
         """Return length of instance attributes."""
-        return sum(
-            np.prod(self.tile_grid_size(level)) for level in range(self.level_count)
+        return int(
+            sum(
+                np.prod(self.tile_grid_size(level)) for level in range(self.level_count)
+            ),
         )
 
     def __iter__(self: TilePyramidGenerator) -> Iterator:
@@ -452,7 +456,7 @@ def tile_group(self: ZoomifyGenerator, level: int, x: int, y: int) -> int:
         cumulative_sum = sum(np.prod(self.tile_grid_size(n)) for n in range(level))
         index_in_level = np.ravel_multi_index((y, x), self.tile_grid_size(level)[::-1])
         tile_index = cumulative_sum + index_in_level
-        return tile_index // 256  # the tile group
+        return int(tile_index // 256)  # the tile group
 
     def tile_path(self: ZoomifyGenerator, level: int, x: int, y: int) -> Path:
         """Generate the Zoomify path for a specified tile.
@@ -537,7 +541,7 @@ def __init__(
             mapper = {key: (*color, 1) for key, color in zip(types, colors)}
             self.renderer.mapper = lambda x: mapper[x]
 
-    def get_thumb_tile(self: AnnotationTileGenerator) -> Image:
+    def get_thumb_tile(self: AnnotationTileGenerator) -> Image.Image:
         """Return a thumbnail which fits the whole slide in one tile.
 
         The thumbnail output size has the longest edge equal to the tile
@@ -587,7 +591,7 @@ def get_tile(
         pad_mode: str | None = None,
         interpolation: str | None = None,
         transparent_value: int | None = None,  # noqa: ARG002
-    ) -> Image:
+    ) -> Image.Image:
         """Render a tile at a given level and coordinate.
 
         Note that levels are in the reverse order of those in WSIReader.
@@ -646,20 +650,21 @@ def get_tile(
         scale = self.level_downsample(level)
         baseline_x = (x * self.tile_size * scale) - (self.overlap * scale)
         baseline_y = (y * self.tile_size * scale) - (self.overlap * scale)
-        coord = [baseline_x, baseline_y]
+        coord = (int(baseline_x), int(baseline_y))
         if level < self.sub_tile_level_count:
             output_size = self.output_tile_size // 2 ** (
                 self.sub_tile_level_count - level
             )
             output_size = np.repeat(output_size, 2).astype(int)
             thumb = self.get_thumb_tile()
-            thumb.thumbnail(output_size)
+            thumb.thumbnail((output_size[0], output_size[1]))
             return thumb
         slide_dimensions = np.array(self.info.slide_dimensions)
         if all(slide_dimensions < [baseline_x, baseline_y]):
             raise IndexError
 
-        bounds = locsize2bounds(coord, [self.output_tile_size * scale] * 2)
+        size = [self.output_tile_size * scale] * 2
+        bounds = locsize2bounds(coord, (int(size[0]), int(size[1])))
         tile = self.renderer.render_annotations(
             self.store,
             bounds,
diff --git a/tiatoolbox/tools/tissuemask.py b/tiatoolbox/tools/tissuemask.py
index ac99490d8..c2ea74d80 100644
--- a/tiatoolbox/tools/tissuemask.py
+++ b/tiatoolbox/tools/tissuemask.py
@@ -18,11 +18,6 @@ class TissueMasker(ABC):
 
     """
 
-    def __init__(self: TissueMasker) -> None:
-        """Initialize :class:`TissueMasker`."""
-        super().__init__()
-        self.fitted = False
-
     @abstractmethod
     def fit(
         self: TissueMasker,
@@ -55,9 +50,6 @@ def transform(self: TissueMasker, images: np.ndarray) -> np.ndarray:
                 e.g. regions of tissue vs background.
 
         """
-        if not self.fitted:
-            msg = "Fit must be called before transform."
-            raise SyntaxError(msg)
 
     def fit_transform(
         self: TissueMasker,
@@ -76,7 +68,7 @@ def fit_transform(
             **kwargs (dict):
                 Other key word arguments passed to fit.
         """
-        self.fit(images, **kwargs)
+        self.fit(images, masks=None, **kwargs)
         return self.transform(images)
 
 
@@ -97,13 +89,15 @@ class OtsuTissueMasker(TissueMasker):
 
     """
 
-    def __init__(self: TissueMasker) -> None:
+    def __init__(self: OtsuTissueMasker) -> None:
         """Initialize :class:`OtsuTissueMasker`."""
-        super().__init__()
+        self.threshold: float | None
+        self.fitted: bool
         self.threshold = None
+        self.fitted = False
 
     def fit(
-        self: TissueMasker,
+        self: OtsuTissueMasker,
         images: np.ndarray,
         masks: np.ndarray | None = None,  # noqa: ARG002
     ) -> None:
@@ -141,7 +135,7 @@ def fit(
 
         self.fitted = True
 
-    def transform(self: TissueMasker, images: np.ndarray) -> np.ndarray:
+    def transform(self: OtsuTissueMasker, images: np.ndarray) -> np.ndarray:
         """Create masks using the threshold found during :func:`fit`.
 
         Args:
@@ -155,7 +149,9 @@ def transform(self: TissueMasker, images: np.ndarray) -> np.ndarray:
                 channels).
 
         """
-        super().transform(images)
+        if not self.fitted:
+            msg = "Fit must be called before transform."
+            raise SyntaxError(msg)
 
         masks = []
         for image in images:
@@ -165,7 +161,7 @@ def transform(self: TissueMasker, images: np.ndarray) -> np.ndarray:
             mask = (grey < self.threshold).astype(bool)
             masks.append(mask)
 
-        return masks
+        return np.array(masks)
 
 
 class MorphologicalMasker(OtsuTissueMasker):
@@ -206,7 +202,7 @@ class MorphologicalMasker(OtsuTissueMasker):
     """
 
     def __init__(
-        self: TissueMasker,
+        self: MorphologicalMasker,
         *,
         mpp: float | tuple[float, float] | None = None,
         power: float | tuple[float, float] | None = None,
@@ -250,18 +246,19 @@ def __init__(
 
         # Convert MPP to an integer kernel_size
         if mpp is not None:
-            mpp = np.array(mpp)
-            if mpp.size != 2:  # noqa: PLR2004
-                mpp = mpp.repeat(2)
-            kernel_size = np.max([32 / mpp, [1, 1]], axis=0)
+            mpp_array = np.array(mpp)
+            if mpp_array.size != 2:  # noqa: PLR2004
+                mpp_array = mpp_array.repeat(2)
+            kernel_size = np.max([32 / mpp_array, [1, 1]], axis=0)
 
         # Ensure kernel_size is a length 2 numpy array
-        kernel_size = np.array(kernel_size)
-        if kernel_size.size != 2:  # noqa: PLR2004
-            kernel_size = kernel_size.repeat(2)
+        kernel_size_array = np.array(kernel_size)
+        if kernel_size_array.size != 2:  # noqa: PLR2004
+            kernel_size_array = kernel_size_array.repeat(2)
 
         # Convert to an integer double/ pair
-        self.kernel_size = tuple(np.round(kernel_size).astype(int))
+        self.kernel_size: tuple[int, int]
+        self.kernel_size = tuple(np.round(kernel_size_array).astype(int))
 
         # Create structuring element for morphological operations
         self.kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, self.kernel_size)
@@ -270,7 +267,7 @@ def __init__(
         if self.min_region_size is None:
             self.min_region_size = np.sum(self.kernel)
 
-    def transform(self: TissueMasker, images: np.ndarray) -> None:
+    def transform(self: MorphologicalMasker, images: np.ndarray) -> np.ndarray:
         """Create masks using the found threshold followed by morphological operations.
 
         Args:
@@ -284,7 +281,9 @@ def transform(self: TissueMasker, images: np.ndarray) -> None:
                 channels).
 
         """
-        super().transform(images)
+        if not self.fitted:
+            msg = "Fit must be called before transform."
+            raise SyntaxError(msg)
 
         results = []
         for image in images:
@@ -304,4 +303,4 @@ def transform(self: TissueMasker, images: np.ndarray) -> None:
             mask = cv2.morphologyEx(mask, cv2.MORPH_DILATE, self.kernel)
 
             results.append(mask.astype(bool))
-        return results
+        return np.array(results)
diff --git a/tiatoolbox/utils/misc.py b/tiatoolbox/utils/misc.py
index 49ef21bad..4eb01b833 100644
--- a/tiatoolbox/utils/misc.py
+++ b/tiatoolbox/utils/misc.py
@@ -1308,7 +1308,7 @@ def dict_to_zarr(
     compressor = (
         kwargs["compressor"] if "compressor" in kwargs else numcodecs.Zstd(level=1)
     )
-    chunks = kwargs["chunks"] if "chunks" in kwargs else 10000
+    chunks = kwargs.get("chunks", 10000)
 
     # ensure proper zarr extension
     save_path = save_path.parent.absolute() / (save_path.stem + ".zarr")
diff --git a/tiatoolbox/utils/visualization.py b/tiatoolbox/utils/visualization.py
index 3e7c9da46..e75b7376c 100644
--- a/tiatoolbox/utils/visualization.py
+++ b/tiatoolbox/utils/visualization.py
@@ -633,6 +633,7 @@ def __init__(  # noqa: PLR0913
         self.secondary_cmap = secondary_cmap
         self.blur_radius = blur_radius
         self.function_mapper = function_mapper
+        self.blur: ImageFilter.GaussianBlur | None
         if blur_radius > 0:
             self.blur = ImageFilter.GaussianBlur(blur_radius)
             self.edge_thickness = 0
diff --git a/tiatoolbox/visualization/bokeh_app/main.py b/tiatoolbox/visualization/bokeh_app/main.py
index 608dc23a9..0f29a4aea 100644
--- a/tiatoolbox/visualization/bokeh_app/main.py
+++ b/tiatoolbox/visualization/bokeh_app/main.py
@@ -64,14 +64,14 @@
 
 # GitHub actions seems unable to find TIAToolbox unless this is here
 sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
-from tiatoolbox import logger  # noqa: E402
-from tiatoolbox.models.engine.nucleus_instance_segmentor import (  # noqa: E402
+from tiatoolbox import logger
+from tiatoolbox.models.engine.nucleus_instance_segmentor import (
     NucleusInstanceSegmentor,
 )
-from tiatoolbox.tools.pyramid import ZoomifyGenerator  # noqa: E402
-from tiatoolbox.utils.visualization import random_colors  # noqa: E402
-from tiatoolbox.visualization.ui_utils import get_level_by_extent  # noqa: E402
-from tiatoolbox.wsicore.wsireader import WSIReader  # noqa: E402
+from tiatoolbox.tools.pyramid import ZoomifyGenerator
+from tiatoolbox.utils.visualization import random_colors
+from tiatoolbox.visualization.ui_utils import get_level_by_extent
+from tiatoolbox.wsicore.wsireader import WSIReader
 
 if TYPE_CHECKING:  # pragma: no cover
     from bokeh.document import Document