diff --git a/.gitlab/Dockerfile b/.gitlab/Dockerfile
index 92e3544eaec87be95526a95d516bb0021c7f3150..b53d05de8940267e1a11da238118554816e1256f 100644
--- a/.gitlab/Dockerfile
+++ b/.gitlab/Dockerfile
@@ -34,9 +34,9 @@ RUN yum install -y epel-release dnf-plugins-core \
 WORKDIR /python
 
 RUN curl -sS  "${PYTHON_URL}" | tar xvz --strip-components=1 \
-    && ./configure --enable-optimizations \
-    && make \
-    && make install \
+    && ./configure --enable-optimizations --silent \
+    && make --silent\
+    && make install --silent \
     && curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3
 
 WORKDIR /
diff --git a/Dockerfile b/Dockerfile
index 012cfc5895222d4e49ea529b6a24ab5802a5efe5..8eadf738b859a7b2c23e8043d229c238d585d9da 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -59,6 +59,7 @@ ENV PROJECT ${project}
 ENV NETCDF_FILE ${netcdf_file}
 
 ENV PYTHON_URL="https://www.python.org/ftp/python/3.9.2/Python-3.9.2.tgz"
+ENV PIP_CERT=${SSL_CERT_FILE}
 
 RUN yum install -y epel-release dnf-plugins-core \
     && yum config-manager --set-enabled powertools \
@@ -74,10 +75,10 @@ RUN yum install -y epel-release dnf-plugins-core \
 
 WORKDIR /python
 
-RUN curl -sS  "${PYTHON_URL}" | tar xvz --strip-components=1 \
-    && ./configure --enable-optimizations \
-    && make \
-    && make install \
+RUN curl -sS  "${PYTHON_URL}" | tar xz --strip-components=1 \
+    && ./configure --enable-optimizations --silent \
+    # && make --silent \
+    && make install --silent\
     && curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3
 
 WORKDIR ${workdir}
@@ -87,7 +88,7 @@ COPY src/main/python src/main/python
 COPY src/main/resources src/main/resources
 COPY poetry.lock .
 COPY pyproject.toml .
-COPY bash-entrypoint.sh .
+COPY docker-entrypoint.sh .
 
 EXPOSE 8080
 ENTRYPOINT [ "bash", "docker-entrypoint.sh" ]
diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh
index 6a765a641e6a98c54b5368f15d638a13cf3c4c4d..731524116144963e6cd3542a68791ed2c7efe5dc 100644
--- a/docker-entrypoint.sh
+++ b/docker-entrypoint.sh
@@ -7,7 +7,10 @@ PROJECT="${PROJECT:-nshmp-netcdf}";
 
 # TODO: Remove option to install database in container when deployed to AWS
 if [ "${INSTALL_DATABASE}" == "true" ]; then
-  "${HOME}"/.poetry/bin/poetry install;
+  export REQUESTS_CA_BUNDLE="${SSL_CERT_FILE}";
+  export PATH="$HOME/.poetry/bin:$PATH";
+
+  poetry install;
 
   case "${NSHM}" in
     "CONUS_2018A")
diff --git a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/__main__.py b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/__main__.py
index 47fbfcb60b798758bb281ef4dcd7b4a479d15622..628d42d4d93e8cadc8b2fbc97fda855460d2e1dc 100644
--- a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/__main__.py
+++ b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/__main__.py
@@ -10,7 +10,7 @@ Supported NSHMs for download and conversion:
 from rich.traceback import install
 
 from .application_inputs import ApplicationInputs
-from .converters.convert_2018a import Convert2018A
+from .converters.convert import Convert
 from .converters.preprocess import Preprocess
 from .database.database import Database
 from .parsers.args_parser import parser
@@ -38,6 +38,6 @@ def run():
             if not inputs.download_only:
                 for database_info in database.database_info:
                     preprocess = Preprocess(database_info=database_info)
-                    Convert2018A(inputs=inputs, metadata=preprocess.netcdf_metadata)
+                    Convert(inputs=inputs, metadata=preprocess.netcdf_metadata)
     except Exception as error:
         raise error
diff --git a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
index 11fc1adbff08f0e0122b620630051850a66a2912..c89f39243b0f2a23c906e0b39b561a886da5b77f 100644
--- a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
+++ b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/convert.py
@@ -7,6 +7,11 @@ from dataclasses import dataclass
 from datetime import datetime
 from pathlib import Path
 from typing import Union
+from rich.progress import (
+    BarColumn,
+    Progress,
+    TimeElapsedColumn,
+)
 
 import netCDF4 as netcdf
 import numpy as np
@@ -46,7 +51,6 @@ class Convert:
         if (nshm != Nshm.NSHM_2018A):
             raise ValueError(f"NSHM [{self.metadata.database_info.nshm.value}] not supported")
 
-        # hazard_parameters = NetcdfParameters().HazardParameters()
         console.print(f"\n[blue]Converting {nshm.label}")
 
         self._imt_indices: dict[Imt, int] = self._set_imt_indices()
@@ -76,18 +80,12 @@ class Convert:
         self._site_class_mask_array = np.zeros(
             [self._dimensions.lat.size, self._dimensions.lon.size], int
         )
-        # self._data_array = np.full(
-        #     [
-        #         self._dimensions.site_class.size,
-        #         self._dimensions.imt.size,
-        #         self._dimensions.lat.size,
-        #         self._dimensions.lon.size,
-        #         self._dimensions.iml.size,
-        #     ],
-        #     hazard_parameters.fill_value,
-        #     float,
-        # )
-
+        self._progress = Progress(
+            "[progress.description]{task.description}",
+            BarColumn(),
+            "[progress.percentage]{task.percentage:>3.0f}%",
+            TimeElapsedColumn()
+        )
         self._write_netcdf_file()
         self._root_group.close()
         self._clean_ascii()
@@ -171,25 +169,24 @@ class Convert:
 
     def _get_hazard_data(
         self,
-        hazard_netcdf_var: netcdf.Variable,
-        netcdf_info: list[NetcdfInfo],
+        hazard_netcdf_var: netcdf.Variable
     ):
         futures: list[Future] = []
-
-        with ThreadPoolExecutor() as executor:
-            for index, _info in enumerate(netcdf_info):
-
-                info: NetcdfInfo = _info
-                futures.append(
-                    executor.submit(
-                        self._read_curves_file,
-                        hazard_netcdf_var=hazard_netcdf_var,
-                        netcdf_info=info,
+        status_msg = f"[bold green]Converting {self.metadata.database_info.nshm.label} files"
+
+        with self._progress:
+            with ThreadPoolExecutor() as executor:
+                for _info in self.metadata.netcdf_info:
+                    info: NetcdfInfo = _info
+                    futures.append(
+                        executor.submit(
+                            self._read_curves_file,
+                            hazard_netcdf_var=hazard_netcdf_var,
+                            netcdf_info=info,
+                        )
                     )
-                )
-
-        for future in futures:
-            future.result(timeout=120)
+                for future in self._progress.track(futures, description=status_msg):
+                    future.result(timeout=120)
 
     def _get_imt_index(self, imt: Imt):
         return self._imt_indices.get(imt)
@@ -366,15 +363,6 @@ class Convert:
             enum_dict=site_class_dict,
         )
 
-    def _write_hazard_data(self, hazard_netcdf_var: netcdf.Variable):
-        status_msg = f"[bold green]Converting {self.metadata.database_info.nshm.label} files ..."
-        with console.status(status_msg, spinner="pong") as status:
-            self._get_hazard_data(
-                netcdf_info=self.metadata.netcdf_info,
-                hazard_netcdf_var=hazard_netcdf_var,
-            )
-        status.stop()
-
     def _write_netcdf_file(self):
         self._root_group.setncatts(_ROOT_ATTRIBUTES)
         self._dataset_group.description = self.metadata.database_info.description
@@ -387,6 +375,5 @@ class Convert:
             )
 
         grid_mask_netcdf_var, hazard_netcdf_var = self._create_netcdf_variables()
-        self._write_hazard_data(hazard_netcdf_var=hazard_netcdf_var)
-        # hazard_netcdf_var[:, :, :, :, :] = self._data_array
+        self._get_hazard_data(hazard_netcdf_var=hazard_netcdf_var)
         grid_mask_netcdf_var[:, :] = self._site_class_mask_array
diff --git a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
index 2f3f0dff908a14a72ec7d888631641321accf5e6..d7fbdbc977dc6d6a9132d0df1fe17d475bf33b2c 100644
--- a/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
+++ b/src/main/python/gov/usgs/earthquake/nshmp/netcdf/converters/preprocess.py
@@ -2,7 +2,11 @@ from concurrent.futures import Future, ThreadPoolExecutor
 from pathlib import Path
 
 import numpy as np
-from rich.progress import track
+from rich.progress import (
+    BarColumn,
+    Progress,
+    TimeElapsedColumn,
+)
 
 from ..database.database_info import DatabaseInfo, DataInfo, NetcdfInfo, NetcdfMetadata
 from ..geo.location import Locations
@@ -35,19 +39,27 @@ class Preprocess:
         futures: list[Future[DataInfo]] = []
         data_info_list: list[DataInfo] = []
 
-        with ThreadPoolExecutor() as executor:
-            for _curve_file in database_info.curve_files:
-                curve_file: Path = _curve_file
-                future = executor.submit(
-                    DataPathParser.parse_path,
-                    database_info=database_info,
-                    curve_file=curve_file,
-                )
-                futures.append(future)
-
-            for _future in track(futures, description=status_msg):
-                future: Future[DataInfo] = _future
-                data_info_list.append(future.result(timeout=120))
+        progress = Progress(
+            "[progress.description]{task.description}",
+            BarColumn(),
+            "[progress.percentage]{task.percentage:>3.0f}%",
+            TimeElapsedColumn()
+        )
+
+        with progress:
+            with ThreadPoolExecutor() as executor:
+                for _curve_file in database_info.curve_files:
+                    curve_file: Path = _curve_file
+                    future = executor.submit(
+                        DataPathParser.parse_path,
+                        database_info=database_info,
+                        curve_file=curve_file,
+                    )
+                    futures.append(future)
+
+                for _future in progress.track(futures, description=status_msg):
+                    future: Future[DataInfo] = _future
+                    data_info_list.append(future.result(timeout=120))
 
         console.print("Preprocessing done", style="bold yellow")
         return self._condense_data_info(database_info=database_info, data_info_list=data_info_list)