diff --git a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert_2018a.py b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
similarity index 92%
rename from src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert_2018a.py
rename to src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
index 34aa3fe051127b8f9649d74dfc93fcda019dd431..11fc1adbff08f0e0122b620630051850a66a2912 100644
--- a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert_2018a.py
+++ b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
@@ -20,6 +20,7 @@ from ..utils.netcdf_dimensions import NetcdfDimensions
 from ..utils.netcdf_keys import NetcdfKeys
 from ..utils.netcdf_parameters import NetcdfParameters
 from ..utils.netcdf_utils import NetcdfUtils
+from ..nshm import Nshm
 
 
 _ROOT_ATTRIBUTES: dict = {
@@ -35,14 +36,17 @@ _ROOT_ATTRIBUTES: dict = {
 _NETCDF_PATH_ENV = "NSHMP_NETCDF_FILE_PATH"
 
 
-class Convert2018A:
+class Convert:
     def __init__(self, inputs: ApplicationInputs, metadata: NetcdfMetadata):
         self.metadata = metadata
         self.inputs = inputs
-
-        hazard_parameters = NetcdfParameters().HazardParameters()
         nshm = self.metadata.database_info.nshm
         region = self.metadata.model_region
+
+        if (nshm != Nshm.NSHM_2018A):
+            raise ValueError(f"NSHM [{self.metadata.database_info.nshm.value}] not supported")
+
+        # hazard_parameters = NetcdfParameters().HazardParameters()
         console.print(f"\n[blue]Converting {nshm.label}")
 
         self._imt_indices: dict[Imt, int] = self._set_imt_indices()
@@ -72,19 +76,17 @@ class Convert2018A:
         self._site_class_mask_array = np.zeros(
             [self._dimensions.lat.size, self._dimensions.lon.size], int
         )
-        # self._imt_mask_array = np.zeros([self._dimensions.lat.size, self._dimensions.lon.size], int)
-
-        self._data_array = np.full(
-            [
-                self._dimensions.site_class.size,
-                self._dimensions.imt.size,
-                self._dimensions.lat.size,
-                self._dimensions.lon.size,
-                self._dimensions.iml.size,
-            ],
-            hazard_parameters.fill_value,
-            float,
-        )
+        # self._data_array = np.full(
+        #     [
+        #         self._dimensions.site_class.size,
+        #         self._dimensions.imt.size,
+        #         self._dimensions.lat.size,
+        #         self._dimensions.lon.size,
+        #         self._dimensions.iml.size,
+        #     ],
+        #     hazard_parameters.fill_value,
+        #     float,
+        # )
 
         self._write_netcdf_file()
         self._root_group.close()
@@ -243,6 +245,15 @@ class Convert2018A:
         if not curves_file.exists:
             raise Exception(f"File ({curves_file}) not found")
 
+        data_array = np.full(
+            [
+                self._dimensions.lat.size,
+                self._dimensions.lon.size,
+                self._dimensions.iml.size,
+            ],
+            NetcdfParameters.HazardParameters().fill_value,
+            float,
+        )
         imls = self.metadata.imls.get(netcdf_info.imt)
         imt_dir = curves_file.parent
         imt_index = self._get_imt_index(imt=netcdf_info.imt)
@@ -278,11 +289,18 @@ class Convert2018A:
                     longitude, self._get_longitude_index(longitude=longitude)
                 )
 
-                self._data_array[
-                    site_class_index, imt_index, latitude_index, longitude_index, :
+                data_array[
+                    latitude_index, longitude_index, :
                 ] = values
+
+                # self._data_array[
+                #     site_class_index, imt_index, latitude_index, longitude_index, :
+                # ] = values
                 imt_mask_array[latitude_index, longitude_index] = 1
             self._site_class_mask_array += imt_mask_array
+            hazard_netcdf_var[
+                site_class_index, imt_index, :, :, :
+            ] = data_array
 
     def _set_imt_indices(self) -> dict[Imt, int]:
         imt_indices: dict[Imt, int] = dict()
@@ -370,5 +388,5 @@ class Convert2018A:
 
         grid_mask_netcdf_var, hazard_netcdf_var = self._create_netcdf_variables()
         self._write_hazard_data(hazard_netcdf_var=hazard_netcdf_var)
-        hazard_netcdf_var[:, :, :, :, :] = self._data_array
+        # hazard_netcdf_var[:, :, :, :, :] = self._data_array
         grid_mask_netcdf_var[:, :] = self._site_class_mask_array
diff --git a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
index 22c6e49064f0123e9d8f93a45e5d4e75c3c88117..2f3f0dff908a14a72ec7d888631641321accf5e6 100644
--- a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
+++ b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
@@ -2,6 +2,7 @@ from concurrent.futures import Future, ThreadPoolExecutor
 from pathlib import Path
 
 import numpy as np
+from rich.progress import track
 
 from ..database.database_info import DatabaseInfo, DataInfo, NetcdfInfo, NetcdfMetadata
 from ..geo.location import Locations
@@ -30,25 +31,23 @@ class Preprocess:
 
     def _preprocess(self, database_info: DatabaseInfo) -> NetcdfMetadata:
         console.print(f"Preprocessing {database_info.nshm.label}", style="blue")
-        status_msg = f"[bold green]Parsing {database_info.nshm.label} files ..."
+        status_msg = f"[bold green]Parsing {database_info.nshm.label} files"
         futures: list[Future[DataInfo]] = []
-
-        with console.status(status_msg, spinner="pong") as status:
-            with ThreadPoolExecutor() as executor:
-                for _curve_file in database_info.curve_files:
-                    curve_file: Path = _curve_file
-                    future = executor.submit(
-                        DataPathParser.parse_path,
-                        database_info=database_info,
-                        curve_file=curve_file,
-                    )
-                    futures.append(future)
-        status.stop()
-
         data_info_list: list[DataInfo] = []
-        for _future in futures:
-            future: Future[DataInfo] = _future
-            data_info_list.append(future.result(timeout=30))
+
+        with ThreadPoolExecutor() as executor:
+            for _curve_file in database_info.curve_files:
+                curve_file: Path = _curve_file
+                future = executor.submit(
+                    DataPathParser.parse_path,
+                    database_info=database_info,
+                    curve_file=curve_file,
+                )
+                futures.append(future)
+
+            for _future in track(futures, description=status_msg):
+                future: Future[DataInfo] = _future
+                data_info_list.append(future.result(timeout=120))
 
         console.print("Preprocessing done", style="bold yellow")
         return self._condense_data_info(database_info=database_info, data_info_list=data_info_list)