diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a5cd891886ea7ab74146495d9c385790f8299472..12e8e7d77e0115c003b46804231f8087d8636ffa 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -32,7 +32,7 @@ before_script:
 .parallel:
   parallel:
     matrix:
-      - IMAGE: [python:3.10, python:3.11, python:3.12]
+      - IMAGE: [python:3.8, python:3.9, python:3.10, python:3.11, python:3.12]
 
 test:
   stage: test
diff --git a/epicsarchiver/common/base_archiver.py b/epicsarchiver/common/base_archiver.py
index 4757879a1866a08046caaf079daa9d926d062f12..aeb64deba532678c0b9fb79a2ab60fe16de81d72 100644
--- a/epicsarchiver/common/base_archiver.py
+++ b/epicsarchiver/common/base_archiver.py
@@ -1,5 +1,7 @@
 """Base Archiver Client module for get, post etc requests."""
 
+from __future__ import annotations
+
 import logging
 import urllib.parse
 from typing import Any
diff --git a/epicsarchiver/common/command.py b/epicsarchiver/common/command.py
index 2564f75a11f30a03ed7143a6884fcf0e9b0e6476..3d3b06b14cb492a34c5c785485ac11c24f9d5d71 100644
--- a/epicsarchiver/common/command.py
+++ b/epicsarchiver/common/command.py
@@ -1,10 +1,15 @@
 """Shared Command methods module."""
 
+from __future__ import annotations
+
 import logging
+from typing import TYPE_CHECKING
 
-import click
 from rich.logging import RichHandler
 
+if TYPE_CHECKING:
+    import click
+
 LOG: logging.Logger = logging.getLogger(__name__)
 
 
diff --git a/epicsarchiver/mgmt/archive_files.py b/epicsarchiver/mgmt/archive_files.py
index f0629339e4ec3fd30ceacc4a44aa973d41bddc79..aa9aa8c0e68c301df9bda6c35056933dca787685 100644
--- a/epicsarchiver/mgmt/archive_files.py
+++ b/epicsarchiver/mgmt/archive_files.py
@@ -1,10 +1,14 @@
 """Handle parsing files of lists of PVs to submit for archiver operations."""
 
+from __future__ import annotations
+
 import itertools
 import logging
-from collections.abc import Generator
 from pathlib import Path
-from typing import Any
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+    from collections.abc import Generator
 
 LOG: logging.Logger = logging.getLogger(__name__)
 
@@ -26,7 +30,7 @@ def parse_archive_file(
         Generator[dict[str, str], None, None]: produces
         dictionary with keys {"pv", "policy", "appliance"}
     """
-    with filename.open(encoding="locale") as file:
+    with filename.open() as file:
         LOG.debug("PARSE archive file %s", filename)
         for line in file:
             stripped_line = line.strip()
@@ -72,7 +76,7 @@ def parse_rename_file(filename: Path) -> Generator[tuple[str, str], None, None]:
     Yields:
         Generator[tuple[str, str], None, None]: produces a pair old_pv_name, new_pv_name
     """
-    with filename.open(encoding="locale") as f:
+    with filename.open() as f:
         for line in f:
             if parsed_line := _parse_rename_line(line):
                 yield parsed_line
diff --git a/epicsarchiver/mgmt/archiver_mgmt.py b/epicsarchiver/mgmt/archiver_mgmt.py
index 79702625468dfa22598099dbe74329e1a522d8bc..3734f5e05a65c57c507834b45166cf909dda6de3 100644
--- a/epicsarchiver/mgmt/archiver_mgmt.py
+++ b/epicsarchiver/mgmt/archiver_mgmt.py
@@ -1,8 +1,10 @@
 """ArchiverMgmt module."""
 
+from __future__ import annotations
+
 import logging
 from pathlib import Path
-from typing import Any, cast
+from typing import Any, Dict, List, cast
 
 from epicsarchiver.common.base_archiver import BaseArchiverAppliance
 from epicsarchiver.mgmt import archive_files
@@ -42,7 +44,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/GetAllExpandedPVNames.html
         r = self._get("/getAllExpandedPVNames")
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
 
     def get_all_pvs(
         self,
@@ -72,7 +74,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         if regex is not None:
             params["regex"] = regex
         r = self._get("/getAllPVs", params=params)
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
 
     def get_pv_status(self, pv: str | list[str]) -> list[dict[str, str]]:
         """Return the status of a PV.
@@ -87,7 +89,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/GetPVStatusAction.html
         r = self._get("/getPVStatus", params={"pv": pv})
-        return cast(list[dict[str, str]], r.json())
+        return cast(List[Dict[str, str]], r.json())
 
     def get_pv_details(self, pv: str | list[str]) -> list[dict[str, str]]:
         """Return the details of a PV.
@@ -102,7 +104,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/GetPVDetailsAction.html
         r = self._get("/getPVDetails", params={"pv": pv})
-        return cast(list[dict[str, str]], r.json())
+        return cast(List[Dict[str, str]], r.json())
 
     def get_pv_status_from_files(
         self,
@@ -137,7 +139,23 @@ class ArchiverMgmt(BaseArchiverAppliance):
         if isinstance(pvs, list):
             pvs = ",".join(pvs)
         r = self._post("/unarchivedPVs", data={"pv": pvs})
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
+
+    def get_archived_pvs(self, pvs: str | list[str]) -> list[str]:
+        """Return the list of unarchived PVs out of PVs specified in pvs.
+
+        Args:
+            pvs: a list of PVs either in CSV format or as a python
+                string list
+
+        Returns:
+            list of unarchived PV names
+        """
+        # https://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/ArchivedPVsAction.html
+        if isinstance(pvs, list):
+            pvs = ",".join(pvs)
+        r = self._post("/archivedPVs", data={"pv": pvs})
+        return cast(List[str], r.json())
 
     def get_unarchived_pvs_from_files(
         self,
@@ -175,7 +193,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         params = {"pv": pv}
         params.update(kwargs)
         r = self._get("/archivePV", params=params)
-        return cast(list[dict[str, str]], r.json())
+        return cast(List[Dict[str, str]], r.json())
 
     def archive_pvs(self, pvs: list[dict[str, str]]) -> list[dict[str, str]]:
         """Archive a list of PVs.
@@ -188,7 +206,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/ArchivePVAction.html
         r = self._post("/archivePV", json=pvs)
-        return cast(list[dict[str, str]], r.json())
+        return cast(List[Dict[str, str]], r.json())
 
     def archive_pvs_from_files(
         self,
@@ -221,8 +239,8 @@ class ArchiverMgmt(BaseArchiverAppliance):
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/PauseArchivingPV.html
         response = self._get_or_post("/pauseArchivingPV", pv)
         if "," not in pv:
-            return cast(dict[str, str], response)
-        return cast(list[dict[str, str]], response)
+            return cast(Dict[str, str], response)
+        return cast(List[Dict[str, str]], response)
 
     def resume_pv(self, pv: str) -> list[dict[str, str]] | dict[str, str]:
         """Resume the archiving of a PV(s).
@@ -237,8 +255,8 @@ class ArchiverMgmt(BaseArchiverAppliance):
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/ResumeArchivingPV.html
         response = self._get_or_post("/resumeArchivingPV", pv)
         if "," not in pv:
-            return cast(dict[str, str], response)
-        return cast(list[dict[str, str]], response)
+            return cast(Dict[str, str], response)
+        return cast(List[Dict[str, str]], response)
 
     def abort_pv(self, pv: str) -> list[str]:
         """Abort any pending requests for archiving this PV.
@@ -251,7 +269,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/AbortArchiveRequest.html
         r = self._get("/abortArchivingPV", params={"pv": pv})
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
 
     def delete_pv(
         self,
@@ -272,7 +290,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # http://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/DeletePV.html
         r = self._get("/deletePV", params={"pv": pv, "delete_data": delete_data})
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
 
     def rename_pv(self, pv: str, newname: str) -> dict[str, str]:
         """Rename this pv to a new name.
@@ -288,7 +306,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         """
         # https://slacmshankar.github.io/epicsarchiver_docs/api/org/epics/archiverappliance/mgmt/bpl/RenamePVAction.html
         r = self._get("/renamePV", params={"pv": pv, "newname": newname})
-        return cast(dict[str, str], r.json())
+        return cast(Dict[str, str], r.json())
 
     def update_pv(
         self,
@@ -311,7 +329,7 @@ class ArchiverMgmt(BaseArchiverAppliance):
         if samplingmethod:
             params["samplingmethod"] = samplingmethod
         r = self._get("/changeArchivalParameters", params=params)
-        return cast(list[str], r.json())
+        return cast(List[str], r.json())
 
     def pause_rename_resume_pv(self, pv: str, new: str) -> None:
         """Pause, rename and resume a PV.
diff --git a/epicsarchiver/mgmt/command.py b/epicsarchiver/mgmt/command.py
index 079df62f4da3eed46d4caba1392a3c4f1442b854..c21fb293f80e665acf88eb316500d0a063f116ce 100644
--- a/epicsarchiver/mgmt/command.py
+++ b/epicsarchiver/mgmt/command.py
@@ -1,5 +1,7 @@
 """Command module."""
 
+from __future__ import annotations
+
 import logging
 from typing import TYPE_CHECKING
 
diff --git a/epicsarchiver/retrieval/archive_event.py b/epicsarchiver/retrieval/archive_event.py
index 188ba4ee0535c85bbfaa767f6e02254a08ee491e..3d5d425d07ba96ddb15d384aac380f44dcf845bf 100644
--- a/epicsarchiver/retrieval/archive_event.py
+++ b/epicsarchiver/retrieval/archive_event.py
@@ -1,5 +1,7 @@
 """Archive Event module for the ArchiveEvent class."""
 
+from __future__ import annotations
+
 from dataclasses import dataclass
 from datetime import datetime as pydt
 
diff --git a/epicsarchiver/retrieval/archiver_retrieval.py b/epicsarchiver/retrieval/archiver_retrieval.py
index 4c0b3020f28bda86f8c84b65535f34b005f29ccd..1426f6d1fd588e896a3e99b5d271d07663736d7d 100644
--- a/epicsarchiver/retrieval/archiver_retrieval.py
+++ b/epicsarchiver/retrieval/archiver_retrieval.py
@@ -1,16 +1,20 @@
 """Archiver Retrieval methods."""
 
+from __future__ import annotations
+
 import datetime
-from typing import Any
+from typing import TYPE_CHECKING, Any
 
 import pandas as pd
 from dateutil import parser
-from requests import Response
 
 from epicsarchiver.common.base_archiver import BaseArchiverAppliance
 from epicsarchiver.retrieval.archive_event import ArchiveEvent, dataframe_from_events
 from epicsarchiver.retrieval.pb import parse_pb_data
 
+if TYPE_CHECKING:
+    from requests import Response
+
 
 def format_date(date_or_str: datetime.datetime | str) -> str:
     """Return a string representing the date and time in ISO 8601 format.
diff --git a/epicsarchiver/retrieval/pb.py b/epicsarchiver/retrieval/pb.py
index 7b4f83e231d59cb1e3977822707f8ce7391adb09..6fc7476cc88d0a5872c4d6411d462d35b6deaa32 100644
--- a/epicsarchiver/retrieval/pb.py
+++ b/epicsarchiver/retrieval/pb.py
@@ -27,7 +27,7 @@ import collections
 import logging as log
 from collections import OrderedDict
 from pathlib import Path
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Union
 
 import pandas as pd
 from pandas import Timestamp
@@ -87,26 +87,26 @@ PB_REPLACEMENTS_UNESCAPING = collections.OrderedDict([
     (ESC_BYTE + b"\x01", ESC_BYTE),
 ])
 
-EeScalarEvent = (
-    ee.ScalarString
-    | ee.ScalarShort
-    | ee.ScalarFloat
-    | ee.ScalarEnum
-    | ee.ScalarByte
-    | ee.ScalarInt
-    | ee.ScalarDouble
-)
-EeVectorEvent = (
-    ee.VectorString
-    | ee.VectorShort
-    | ee.VectorFloat
-    | ee.VectorEnum
-    | ee.VectorChar
-    | ee.VectorInt
-    | ee.VectorDouble
-    | ee.V4GenericBytes
-)
-EeEvent = EeScalarEvent | EeVectorEvent
+EeScalarEvent = Union[
+    ee.ScalarString,
+    ee.ScalarShort,
+    ee.ScalarFloat,
+    ee.ScalarEnum,
+    ee.ScalarByte,
+    ee.ScalarInt,
+    ee.ScalarDouble,
+]
+EeVectorEvent = Union[
+    ee.VectorString,
+    ee.VectorShort,
+    ee.VectorFloat,
+    ee.VectorEnum,
+    ee.VectorChar,
+    ee.VectorInt,
+    ee.VectorDouble,
+    ee.V4GenericBytes,
+]
+EeEvent = Union[EeScalarEvent, EeVectorEvent]
 
 
 def unescape_bytes(byte_seq: bytes) -> bytes:
@@ -244,13 +244,15 @@ def _event_from_line(line: bytes, pv: str, year: int, event_type: int) -> Archiv
     val = event.val
     if isinstance(
         event,
-        ee.VectorDouble
-        | ee.VectorEnum
-        | ee.VectorFloat
-        | ee.VectorInt
-        | ee.VectorShort
-        | ee.VectorString,
-    ):
+        (
+            ee.VectorDouble,
+            ee.VectorEnum,
+            ee.VectorFloat,
+            ee.VectorInt,
+            ee.VectorShort,
+            ee.VectorString,
+        ),
+    ):  # Note purposefully not including all Vectortypes here
         vector_val = list(val)
         val = vector_val
     return ArchiveEvent(
diff --git a/epicsarchiver/statistics/_external_stats.py b/epicsarchiver/statistics/_external_stats.py
index 38f591d4874b459c9666c25073cdbf39e73eca70..5ecd4328bd37d01d06db953785d5dbca29fdd668 100644
--- a/epicsarchiver/statistics/_external_stats.py
+++ b/epicsarchiver/statistics/_external_stats.py
@@ -1,13 +1,13 @@
+from __future__ import annotations
+
 import asyncio
 import logging
 from os import listdir
-from pathlib import Path
+from typing import TYPE_CHECKING
 
 from numpy import mean
 
-from epicsarchiver.epicsarchiver import ArchiverAppliance
 from epicsarchiver.mgmt.archive_files import get_pvs_from_files
-from epicsarchiver.statistics.channelfinder import ChannelFinder
 from epicsarchiver.statistics.gitlab import Gitlab
 from epicsarchiver.statistics.stat_responses import (
     UNKNOWN_IOC,
@@ -17,6 +17,12 @@ from epicsarchiver.statistics.stat_responses import (
     NoConfigResponse,
 )
 
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from epicsarchiver.epicsarchiver import ArchiverAppliance
+    from epicsarchiver.statistics.channelfinder import ChannelFinder
+
 LOG: logging.Logger = logging.getLogger(__name__)
 
 
diff --git a/epicsarchiver/statistics/archiver_statistics.py b/epicsarchiver/statistics/archiver_statistics.py
index b3d417248fcab3ebf9c6664d4b37888c0a21db55..ecd4f95165e4b1599968db1c3ac7338f96a8a4dc 100644
--- a/epicsarchiver/statistics/archiver_statistics.py
+++ b/epicsarchiver/statistics/archiver_statistics.py
@@ -1,5 +1,7 @@
 """Archiver Statistics module."""
 
+from __future__ import annotations
+
 from epicsarchiver.common.base_archiver import BaseArchiverAppliance
 from epicsarchiver.statistics.stat_responses import (
     DisconnectedPVsResponse,
diff --git a/epicsarchiver/statistics/async_service.py b/epicsarchiver/statistics/async_service.py
index 85ecffa416c8128f2176fb79e2d7465726f50056..ac5ac6d2b0c837724fca8460c49d928336b3b0ac 100644
--- a/epicsarchiver/statistics/async_service.py
+++ b/epicsarchiver/statistics/async_service.py
@@ -1,13 +1,17 @@
 """Module to cover the ServiceClient for doing http calls."""
 
+from __future__ import annotations
+
 import asyncio
 import logging
 import urllib.parse
-from collections.abc import Mapping
-from typing import Any
+from typing import TYPE_CHECKING, Any
 
 from aiohttp import ClientResponse, ClientSession
 
+if TYPE_CHECKING:
+    from collections.abc import Mapping
+
 LOG: logging.Logger = logging.getLogger(__name__)
 
 
diff --git a/epicsarchiver/statistics/channelfinder.py b/epicsarchiver/statistics/channelfinder.py
index 602848903666bfc8609a8c469d0e44ab5b09b6ea..95f704796506d5a96ebc5397be144d7a54b5543d 100644
--- a/epicsarchiver/statistics/channelfinder.py
+++ b/epicsarchiver/statistics/channelfinder.py
@@ -1,5 +1,7 @@
 """Minimal Channel Finder interface for calculating archiver statistics."""
 
+from __future__ import annotations
+
 import asyncio
 import logging
 from dataclasses import dataclass
@@ -26,7 +28,7 @@ class Channel:
     tags: list[str]
 
     @classmethod
-    def from_json(cls, json: dict[str, Any]) -> "Channel":
+    def from_json(cls, json: dict[str, Any]) -> Channel:
         """Convert from json direct from channel finder to a "Channel".
 
         Args:
diff --git a/epicsarchiver/statistics/command.py b/epicsarchiver/statistics/command.py
index 683d58bcd6a30ed52163ca1f2a36b51bccd9dcc7..9f01f8fe74a985028a488e33b123a201c8f05321 100644
--- a/epicsarchiver/statistics/command.py
+++ b/epicsarchiver/statistics/command.py
@@ -1,5 +1,7 @@
 """Command module."""
 
+from __future__ import annotations
+
 import logging
 from datetime import timedelta
 from pathlib import Path
@@ -136,7 +138,7 @@ def stats(  # noqa: PLR0917, PLR0913
         ChannelFinder(channelfinder_hostname) if channelfinder_hostname else None
     )
 
-    with output.open("w", encoding="locale") as out_file:
+    with output.open("w") as out_file:
         config = ReportConfig(
             query_limit=limit,
             time_minimum=timedelta(days=time_minimum),
diff --git a/epicsarchiver/statistics/pv_details.py b/epicsarchiver/statistics/pv_details.py
index 916a062bc40475153b8c643fd261b8a85b2b7758..1b7aac4380eea5f5c02d2c24d0d5bb55ce82caf5 100644
--- a/epicsarchiver/statistics/pv_details.py
+++ b/epicsarchiver/statistics/pv_details.py
@@ -3,6 +3,7 @@
 from __future__ import annotations
 
 from enum import Enum
+from typing import Dict
 
 from epicsarchiver.statistics.report import Stat
 from epicsarchiver.statistics.stat_responses import (
@@ -66,7 +67,7 @@ class DetailEnum(str, Enum):
         return None
 
 
-class Details(dict[DetailEnum, str]):
+class Details(Dict[DetailEnum, str]):
     """Representation of the response from the pvDetails endpoint in archiver."""
 
     @classmethod
@@ -115,69 +116,62 @@ class Details(dict[DetailEnum, str]):
             tuple[Stat, BaseStatResponse] | None: Output
         """
         pv_name = self[DetailEnum.PVName]
-        match detail_enum:
-            case DetailEnum.LostEventsTimestamp:
-                return (
-                    Stat.IncorrectTimestamp,
-                    DroppedPVResponse(
-                        pv_name, int(value), DroppedReason.IncorrectTimestamp
-                    ),
-                )
-
-            case DetailEnum.LostEventsType:
-                return (
-                    Stat.TypeChange,
-                    DroppedPVResponse(pv_name, int(value), DroppedReason.TypeChange),
-                )
-
-            case DetailEnum.LostEventsBuffer:
-                return (
-                    Stat.BufferOverflow,
-                    DroppedPVResponse(
-                        pv_name, int(value), DroppedReason.BufferOverflow
-                    ),
-                )
-
-            case DetailEnum.Connnected:
-                if value != "yes":
-                    return (
-                        Stat.DisconnectedPVs,
-                        DisconnectedPVsResponse(
-                            pv_name,
-                            self[DetailEnum.Hostname],
-                            parse_archiver_datetime(
-                                self[DetailEnum.LastLostConnection]
-                            ),
-                            self[DetailEnum.Instance],
-                            int(self[DetailEnum.CommandThread]),
-                            0,
-                            parse_archiver_datetime(self[DetailEnum.LastEvent]),
-                        ),
-                    )
-
-            case DetailEnum.LastEvent:
-                if value == "Never":
-                    return (
-                        Stat.SilentPVs,
-                        SilentPVsResponse(pv_name, self[DetailEnum.Instance], None),
-                    )
-
-            case DetailEnum.LostConnections:
-                return (
-                    Stat.LostConnection,
-                    LostConnectionsResponse(
-                        pv_name,
-                        ConnectionStatus.CurrentlyConnected
-                        if self[DetailEnum.Connnected] == "yes"
-                        else ConnectionStatus.NotCurrentlyConnected,
-                        self[DetailEnum.Instance],
-                        int(value),
-                    ),
-                )
-
-            case DetailEnum.MBStorageRate:
-                return (
-                    Stat.StorageRates,
-                    StorageRatesResponse(pv_name, float(value), None, None),
-                )
+        if detail_enum == DetailEnum.LostEventsTimestamp:
+            return (
+                Stat.IncorrectTimestamp,
+                DroppedPVResponse(
+                    pv_name, int(value), DroppedReason.IncorrectTimestamp
+                ),
+            )
+
+        if detail_enum == DetailEnum.LostEventsType:
+            return (
+                Stat.TypeChange,
+                DroppedPVResponse(pv_name, int(value), DroppedReason.TypeChange),
+            )
+
+        if detail_enum == DetailEnum.LostEventsBuffer:
+            return (
+                Stat.BufferOverflow,
+                DroppedPVResponse(pv_name, int(value), DroppedReason.BufferOverflow),
+            )
+
+        if detail_enum == DetailEnum.Connnected and value != "yes":
+            return (
+                Stat.DisconnectedPVs,
+                DisconnectedPVsResponse(
+                    pv_name,
+                    self[DetailEnum.Hostname],
+                    parse_archiver_datetime(self[DetailEnum.LastLostConnection]),
+                    self[DetailEnum.Instance],
+                    int(self[DetailEnum.CommandThread]),
+                    0,
+                    parse_archiver_datetime(self[DetailEnum.LastEvent]),
+                ),
+            )
+
+        if detail_enum == DetailEnum.LastEvent and value == "Never":
+            return (
+                Stat.SilentPVs,
+                SilentPVsResponse(pv_name, self[DetailEnum.Instance], None),
+            )
+
+        if detail_enum == DetailEnum.LostConnections:
+            return (
+                Stat.LostConnection,
+                LostConnectionsResponse(
+                    pv_name,
+                    ConnectionStatus.CurrentlyConnected
+                    if self[DetailEnum.Connnected] == "yes"
+                    else ConnectionStatus.NotCurrentlyConnected,
+                    self[DetailEnum.Instance],
+                    int(value),
+                ),
+            )
+
+        if detail_enum == DetailEnum.MBStorageRate:
+            return (
+                Stat.StorageRates,
+                StorageRatesResponse(pv_name, float(value), None, None),
+            )
         return None
diff --git a/epicsarchiver/statistics/report.py b/epicsarchiver/statistics/report.py
index 9b1950c508c17ba8ada53143aa2c4cadc62ad3ee..0935b016465e90aef18603b25a435f7b6d3e2609 100644
--- a/epicsarchiver/statistics/report.py
+++ b/epicsarchiver/statistics/report.py
@@ -17,29 +17,27 @@ Examples:
 
 """
 
+from __future__ import annotations
+
 import asyncio
 import csv
 import datetime
 import enum
 import logging
 import operator
-from collections.abc import Sequence
 from dataclasses import dataclass
 from datetime import timedelta
-from pathlib import Path
-from typing import IO
+from typing import IO, TYPE_CHECKING
 
 import pytz
 from rich.console import Console
 
-from epicsarchiver.epicsarchiver import ArchiverAppliance
 from epicsarchiver.statistics._external_stats import (
     filter_by_ioc,
     get_double_archived,
     get_iocs,
     get_not_configured,
 )
-from epicsarchiver.statistics.channelfinder import ChannelFinder
 from epicsarchiver.statistics.stat_responses import (
     UNKNOWN_IOC,
     BaseStatResponse,
@@ -47,6 +45,13 @@ from epicsarchiver.statistics.stat_responses import (
     Ioc,
 )
 
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+    from pathlib import Path
+
+    from epicsarchiver.epicsarchiver import ArchiverAppliance
+    from epicsarchiver.statistics.channelfinder import ChannelFinder
+
 LOG: logging.Logger = logging.getLogger(__name__)
 
 
@@ -104,99 +109,99 @@ class Stat(str, enum.Enum):
         LOG.info("Found %s satisfying stat %s", len(responses), self)
         return {r.pv_name: r for r in responses}
 
-    async def _get_responses(  # noqa: PLR0911
+    async def _get_responses(  # noqa: PLR0911, C901
         self,
         archiver: ArchiverAppliance,
         config: ReportConfig,
     ) -> Sequence[BaseStatResponse]:
         """Produce a list of PVs and stats."""
-        match self:
-            case Stat.BufferOverflow:
-                return [
-                    f
-                    for f in archiver.get_pvs_dropped(
-                        DroppedReason.BufferOverflow,
-                        limit=config.query_limit,
-                    )
-                    if f.events_dropped > config.events_dropped_minimum
-                ]
-
-            case Stat.TypeChange:
-                return archiver.get_pvs_dropped(
-                    DroppedReason.TypeChange,
+        if self == Stat.BufferOverflow:
+            return [
+                f
+                for f in archiver.get_pvs_dropped(
+                    DroppedReason.BufferOverflow,
                     limit=config.query_limit,
                 )
+                if f.events_dropped > config.events_dropped_minimum
+            ]
+
+        if self == Stat.TypeChange:
+            return archiver.get_pvs_dropped(
+                DroppedReason.TypeChange,
+                limit=config.query_limit,
+            )
+
+        if self == Stat.IncorrectTimestamp:
+            return [
+                f
+                for f in archiver.get_pvs_dropped(
+                    DroppedReason.IncorrectTimestamp,
+                    limit=config.query_limit,
+                )
+                if f.events_dropped > config.events_dropped_minimum
+            ]
+
+        if self == Stat.SlowChanging:
+            return [
+                f
+                for f in archiver.get_pvs_dropped(
+                    DroppedReason.SlowChanging,
+                    limit=None,
+                )
+                if f.events_dropped > config.events_dropped_minimum
+            ]
+
+        if self == Stat.DisconnectedPVs:
+            return [
+                ev
+                for ev in archiver.get_disconnected_pvs()
+                if Stat._is_greater_than_time_minimum(
+                    ev.connection_lost_at,
+                    config.time_minimum,
+                )
+            ]
+
+        if self == Stat.SilentPVs:
+            return [
+                ev
+                for ev in archiver.get_silent_pvs(limit=config.query_limit)
+                if Stat._is_greater_than_time_minimum(
+                    ev.last_known_event,
+                    config.time_minimum,
+                )
+            ]
 
-            case Stat.IncorrectTimestamp:
-                return [
-                    f
-                    for f in archiver.get_pvs_dropped(
-                        DroppedReason.IncorrectTimestamp,
-                        limit=config.query_limit,
-                    )
-                    if f.events_dropped > config.events_dropped_minimum
-                ]
-
-            case Stat.SlowChanging:
-                return [
-                    f
-                    for f in archiver.get_pvs_dropped(
-                        DroppedReason.SlowChanging,
-                        limit=None,
-                    )
-                    if f.events_dropped > config.events_dropped_minimum
-                ]
-
-            case Stat.DisconnectedPVs:
-                return [
-                    ev
-                    for ev in archiver.get_disconnected_pvs()
-                    if Stat._is_greater_than_time_minimum(
-                        ev.connection_lost_at,
-                        config.time_minimum,
-                    )
-                ]
-
-            case Stat.SilentPVs:
-                return [
-                    ev
-                    for ev in archiver.get_silent_pvs(limit=config.query_limit)
-                    if Stat._is_greater_than_time_minimum(
-                        ev.last_known_event,
-                        config.time_minimum,
-                    )
-                ]
-
-            case Stat.LostConnection:
-                return [
-                    el
-                    for el in archiver.get_lost_connections_pvs(
-                        limit=config.query_limit,
-                    )
-                    if el.lost_connections > config.connection_drops_minimum
-                ]
-
-            case Stat.StorageRates:
-                return [
-                    r
-                    for r in archiver.get_storage_rates(limit=config.query_limit)
-                    if r.mb_per_day > config.mb_per_day_minimum
-                ]
-
-            case Stat.DoubleArchived:
-                if config.other_archiver:
-                    return await get_double_archived(archiver, config.other_archiver)
-                return []
-
-            case Stat.NotConfigured:
-                if config.config_gitlab_repo:
-                    return await get_not_configured(
-                        archiver,
-                        config.channelfinder,
-                        config.config_gitlab_repo,
-                        config.ioc_name,
-                    )
-                return []
+        if self == Stat.LostConnection:
+            return [
+                el
+                for el in archiver.get_lost_connections_pvs(
+                    limit=config.query_limit,
+                )
+                if el.lost_connections > config.connection_drops_minimum
+            ]
+
+        if self == Stat.StorageRates:
+            return [
+                r
+                for r in archiver.get_storage_rates(limit=config.query_limit)
+                if r.mb_per_day > config.mb_per_day_minimum
+            ]
+
+        if self == Stat.DoubleArchived:
+            if config.other_archiver:
+                return await get_double_archived(archiver, config.other_archiver)
+            return []
+
+        if self == Stat.NotConfigured:
+            if config.config_gitlab_repo:
+                return await get_not_configured(
+                    archiver,
+                    config.channelfinder,
+                    config.config_gitlab_repo,
+                    config.ioc_name,
+                )
+            return []
+        return []
 
     async def generate_stats(
         self,
@@ -265,7 +270,7 @@ async def generate_all_stats(
     gather_all_stats = await asyncio.gather(*[
         stat.generate_stats(archiver, config) for stat in Stat
     ])
-    inverted_data = _invert_data(dict(zip(list(Stat), gather_all_stats, strict=True)))
+    inverted_data = _invert_data(dict(zip(list(Stat), gather_all_stats)))
     if config.channelfinder:
         return await _organise_by_ioc(
             inverted_data,
diff --git a/epicsarchiver/statistics/stat_responses.py b/epicsarchiver/statistics/stat_responses.py
index fa5cca8981d6405279b73297daac14b9b1f7c023..e1d5afe7f686ebeb2d8096eb2c3fbce31e462675 100644
--- a/epicsarchiver/statistics/stat_responses.py
+++ b/epicsarchiver/statistics/stat_responses.py
@@ -1,12 +1,16 @@
 """Data structures for the statistics endpoints from the archiver."""
 
+from __future__ import annotations
+
 import datetime
 import enum
 from dataclasses import dataclass
+from typing import TYPE_CHECKING
 
 import pytz
 
-from epicsarchiver.statistics.channelfinder import Channel
+if TYPE_CHECKING:
+    from epicsarchiver.statistics.channelfinder import Channel
 
 
 class DroppedReason(str, enum.Enum):
@@ -40,7 +44,7 @@ class DroppedPVResponse(BaseStatResponse):
         cls,
         json: dict[str, str],
         dropped_reason: DroppedReason,
-    ) -> "DroppedPVResponse":
+    ) -> DroppedPVResponse:
         """Convert to DroppedPVResponse from dictionary generated from json.
 
         Args:
@@ -122,7 +126,7 @@ class DisconnectedPVsResponse(BaseStatResponse):
     last_known_event: datetime.datetime | None
 
     @classmethod
-    def from_json(cls, json: dict[str, str]) -> "DisconnectedPVsResponse":
+    def from_json(cls, json: dict[str, str]) -> DisconnectedPVsResponse:
         """Response from the endpoint in getCurrentlyDisconnectedPVs."""
         return DisconnectedPVsResponse(
             json["pvName"],
@@ -166,7 +170,7 @@ class SilentPVsResponse(BaseStatResponse):
     last_known_event: datetime.datetime | None
 
     @classmethod
-    def from_json(cls, json: dict[str, str]) -> "SilentPVsResponse":
+    def from_json(cls, json: dict[str, str]) -> SilentPVsResponse:
         """Response from the endpoint in getSilentPVsReport."""
         return SilentPVsResponse(
             json["pvName"],
@@ -217,7 +221,7 @@ class LostConnectionsResponse(BaseStatResponse):
     lost_connections: int
 
     @classmethod
-    def from_json(cls, json: dict[str, str]) -> "LostConnectionsResponse":
+    def from_json(cls, json: dict[str, str]) -> LostConnectionsResponse:
         """Response from the endpoint in getLostConnectionsReport."""
         return LostConnectionsResponse(
             json["pvName"],
@@ -260,7 +264,7 @@ class StorageRatesResponse(BaseStatResponse):
     gb_per_year: float | None
 
     @classmethod
-    def from_json(cls, json: dict[str, str]) -> "StorageRatesResponse":
+    def from_json(cls, json: dict[str, str]) -> StorageRatesResponse:
         """Response from the endpoint in getStorageRateReport."""
         return StorageRatesResponse(
             json["pvName"],
@@ -302,7 +306,7 @@ class PausedPVResponse(BaseStatResponse):
     modification_time: str
 
     @classmethod
-    def from_json(cls, json: dict[str, str]) -> "PausedPVResponse":
+    def from_json(cls, json: dict[str, str]) -> PausedPVResponse:
         """Response from the endpoint in getPausedPVsReport."""
         return PausedPVResponse(
             json["pvName"],
@@ -368,7 +372,7 @@ class Ioc:
     name: str
 
     @classmethod
-    def from_channel(cls, channel: Channel) -> "Ioc":
+    def from_channel(cls, channel: Channel) -> Ioc:
         """Gets IOC info from a channel."""
         return Ioc(channel.properties["hostName"], channel.properties["iocName"])
 
diff --git a/hatch.toml b/hatch.toml
index 1bb6741e41df5a682236e53517f477287996599e..20fadbd05157c615d6329f74d8e5dcb374d3292c 100644
--- a/hatch.toml
+++ b/hatch.toml
@@ -35,7 +35,7 @@ jupyter-check = "hatch run jupyter:check"
 all = ["hatch fmt --check", "type-check", "cov", "jupyter-check", "docs-check"]
 
 [[envs.test.matrix]]
-python = ["3.10", "3.11", "3.12"]
+python = ["3.8", "3.9", "3.10", "3.11", "3.12"]
 
 [envs.types]
 extra-dependencies = [
diff --git a/pyproject.toml b/pyproject.toml
index 190555e2222aa5aec41106416305de13db886d40..37b6af89232c8eb7a815038679f61040ffbf2253 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,7 +6,7 @@ build-backend = "hatchling.build"
 name = "py-epicsarchiver"
 description = "A python script for managing and pulling data from the Archiver Appliance"
 readme = "README.md"
-requires-python = ">=3.10"
+requires-python = ">=3.8"
 keywords = []
 authors = [
   { name = "Sky Brewer", email = "sky.brewer@ess.eu" },
@@ -15,6 +15,8 @@ authors = [
 classifiers = [
   "Development Status :: 4 - Beta",
   "Programming Language :: Python",
+  "Programming Language :: Python :: 3.8",
+  "Programming Language :: Python :: 3.9",
   "Programming Language :: Python :: 3.10",
   "Programming Language :: Python :: 3.11",
   "Programming Language :: Python :: 3.12",
@@ -41,7 +43,7 @@ Source = "https://gitlab.esss.lu.se/ics-software/py-epicsarchiver"
 
 [tool.ruff]
 extend = "ruff_defaults.toml"
-target-version = "py310"
+target-version = "py38"
 line-length = 88
 exclude = [
   "epicsarchiver/retrieval/EPICSEvent_pb2.py",
diff --git a/tests/mgmt/test_archiver_mgmt.py b/tests/mgmt/test_archiver_mgmt.py
index bc2139f21e6ddcd65fb79d07aaf55d0ae691a56f..022e77dfcc83d71b0db0b7c20ee944d3b16481a3 100644
--- a/tests/mgmt/test_archiver_mgmt.py
+++ b/tests/mgmt/test_archiver_mgmt.py
@@ -1,14 +1,19 @@
 """Tests for `epicsarchiver` package."""
 
+from __future__ import annotations
+
 import json
 import logging
-from pathlib import Path
+from typing import TYPE_CHECKING
 
 import pytest
 import responses
 
 from epicsarchiver.mgmt.archiver_mgmt import ArchiverMgmt, check_result
 
+if TYPE_CHECKING:
+    from pathlib import Path
+
 LOG: logging.Logger = logging.getLogger(__name__)
 
 
diff --git a/tests/retrieval/test_archiver_retrieval.py b/tests/retrieval/test_archiver_retrieval.py
index 20bf913bdb77c80e4788baac5ef8d2fa5d98fae3..9f4e2ed7a67f22250ea0c184dd779bbd6e2039ec 100644
--- a/tests/retrieval/test_archiver_retrieval.py
+++ b/tests/retrieval/test_archiver_retrieval.py
@@ -1,4 +1,4 @@
-from collections.abc import Sequence
+from typing import Sequence
 
 import pandas as pd
 import pytest
diff --git a/tests/statistics/test_pv_details.py b/tests/statistics/test_pv_details.py
index 98a0616d17c042244f985b8bb5548fe216cd38da..77c87e8375313322c7f60d6e6a68ed6bcbf1e0d3 100644
--- a/tests/statistics/test_pv_details.py
+++ b/tests/statistics/test_pv_details.py
@@ -15,7 +15,7 @@ LOG: logging.Logger = logging.getLogger(__name__)
 
 def test_convert_json_to_details_list() -> None:
     example_path = Path("tests/statistics/samples/example_pv_details.json")
-    with example_path.open(encoding="locale") as json_data:
+    with example_path.open() as json_data:
         data = json.load(json_data)
         result = Details.from_json(data)
         LOG.info(result)
diff --git a/tests/statistics/test_report.py b/tests/statistics/test_report.py
index fab7a9149c3f6dcd824630b6e58b7350df4e25dc..7f886aa88b5e9bc4d4540d94b36716cf2e6d55c8 100644
--- a/tests/statistics/test_report.py
+++ b/tests/statistics/test_report.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
 import datetime
 from datetime import timedelta
+from typing import TYPE_CHECKING
 from unittest.mock import AsyncMock
 
 import pytest
 import pytz
-from pytest_mock import MockFixture
 
 from epicsarchiver.epicsarchiver import ArchiverAppliance
 from epicsarchiver.statistics.channelfinder import Channel, ChannelFinder
@@ -27,6 +29,9 @@ from epicsarchiver.statistics.stat_responses import (
     StorageRatesResponse,
 )
 
+if TYPE_CHECKING:
+    from pytest_mock import MockFixture
+
 expected_all_stats: dict[Stat, BaseStatResponse] = {
     Stat.BufferOverflow: DroppedPVResponse("MY:PV", 11, DroppedReason.BufferOverflow),
     Stat.TypeChange: DroppedPVResponse("MY:PV", 11, DroppedReason.TypeChange),
@@ -106,15 +111,14 @@ def mock_get_pvs_dropped(
     reason: DroppedReason,
     limit: int,  # noqa: ARG001
 ) -> list[BaseStatResponse]:
-    match reason:
-        case DroppedReason.BufferOverflow:
-            return [expected_all_stats[Stat.BufferOverflow]]
-        case DroppedReason.IncorrectTimestamp:
-            return [expected_all_stats[Stat.IncorrectTimestamp]]
-        case DroppedReason.TypeChange:
-            return [expected_all_stats[Stat.TypeChange]]
-        case DroppedReason.SlowChanging:
-            return [expected_all_stats[Stat.SlowChanging]]
+    if reason == DroppedReason.BufferOverflow:
+        return [expected_all_stats[Stat.BufferOverflow]]
+    if reason == DroppedReason.IncorrectTimestamp:
+        return [expected_all_stats[Stat.IncorrectTimestamp]]
+    if reason == DroppedReason.TypeChange:
+        return [expected_all_stats[Stat.TypeChange]]
+    if reason == DroppedReason.SlowChanging:
+        return [expected_all_stats[Stat.SlowChanging]]
     return []