From eee99051bf28ad6cd7179007dbce98d172760554 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Fri, 8 Nov 2024 15:52:08 -0700
Subject: [PATCH 01/16] add docker-compose and wrote some scripts

---
 localdev/cleanup_docker.sh  |  5 +++++
 localdev/docker-compose.yml | 43 +++++++++++++++++++++++++++++++++++++
 localdev/restart_web.sh     |  6 ++++++
 localdev/start_services.sh  | 13 +++++++++++
 4 files changed, 67 insertions(+)
 create mode 100755 localdev/cleanup_docker.sh
 create mode 100644 localdev/docker-compose.yml
 create mode 100755 localdev/restart_web.sh
 create mode 100755 localdev/start_services.sh

diff --git a/localdev/cleanup_docker.sh b/localdev/cleanup_docker.sh
new file mode 100755
index 00000000..15736cce
--- /dev/null
+++ b/localdev/cleanup_docker.sh
@@ -0,0 +1,5 @@
+docker stop $(docker ps -aq)
+
+docker rm -vf $(docker ps -aq)
+
+docker rmi -f $(docker images -aq)
diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
new file mode 100644
index 00000000..2c71ff77
--- /dev/null
+++ b/localdev/docker-compose.yml
@@ -0,0 +1,43 @@
+version: '3'
+services:
+  database:
+    image: mysql:8.0.30
+    ports:
+      - '3306:3306'
+    environment:
+      - MYSQL_ROOT_PASSWORD=$MYSQL_ROOT_PASSWORD
+      - MYSQL_DATABASE=geomag_operations
+    volumes:
+      - $INIT_SQL_PATH:/docker-entrypoint-initdb.d/init.sql
+    platform: linux/amd64
+
+  edge:
+    build: $EDGE_PATH
+    ports:
+      - '7981:7981'
+      - '2060:2060'
+      - '7974:7974'
+      - '2061:2061'
+    volumes:
+      - $EDGE_DATA_PATH:/data
+  
+  web:
+    build:
+      context: ../.
+      dockerfile: ./Dockerfile
+    ports:
+      - '8000:8000'
+    extra_hosts:
+      - host.docker.internal:host-gateway
+    environment:
+      - DATA_HOST=host.docker.internal
+      - DATABASE_URL=$DATABASE_URL
+      - OPENID_CLIENT_ID=$OPENID_CLIENT_ID
+      - OPENID_CLIENT_SECRET=$OPENID_CLIENT_SECRET
+      - OPENID_METADATA_URL=$OPENID_METADATA_URL
+      - SECRET_KEY=$SECRET_KEY
+      - SECRET_SALT=$SECRET_SALT
+      - WEBSERVICE=true
+    volumes:
+      - $GEOMAG_ALGORITHMS_PATH:/data
+    restart: always
diff --git a/localdev/restart_web.sh b/localdev/restart_web.sh
new file mode 100755
index 00000000..8f0d69f4
--- /dev/null
+++ b/localdev/restart_web.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -ex
+
+docker-compose build web
+
+docker-compose up -d web
diff --git a/localdev/start_services.sh b/localdev/start_services.sh
new file mode 100755
index 00000000..78d8fc1d
--- /dev/null
+++ b/localdev/start_services.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+set -ex
+
+docker-compose build edge database
+
+docker-compose up -d edge database
+
+# let database initialize
+sleep 10;
+
+docker-compose build web
+
+docker-compose up -d web
-- 
GitLab


From 907a592dbe327bd4264ac4792d3fd53feb5f3082 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 12 Nov 2024 09:55:59 -0700
Subject: [PATCH 02/16] some cleanup and added a readme

---
 localdev/docker-compose.yml   |  4 +--
 localdev/init.sql             | 39 +++++++++++++++++++++++++
 localdev/local_development.md | 54 +++++++++++++++++++++++++++++++++++
 localdev/start_services.sh    |  2 +-
 4 files changed, 95 insertions(+), 4 deletions(-)
 create mode 100644 localdev/init.sql
 create mode 100644 localdev/local_development.md

diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
index 2c71ff77..1b18a11d 100644
--- a/localdev/docker-compose.yml
+++ b/localdev/docker-compose.yml
@@ -8,7 +8,7 @@ services:
       - MYSQL_ROOT_PASSWORD=$MYSQL_ROOT_PASSWORD
       - MYSQL_DATABASE=geomag_operations
     volumes:
-      - $INIT_SQL_PATH:/docker-entrypoint-initdb.d/init.sql
+      - ./init.sql:/docker-entrypoint-initdb.d/init.sql
     platform: linux/amd64
 
   edge:
@@ -38,6 +38,4 @@ services:
       - SECRET_KEY=$SECRET_KEY
       - SECRET_SALT=$SECRET_SALT
       - WEBSERVICE=true
-    volumes:
-      - $GEOMAG_ALGORITHMS_PATH:/data
     restart: always
diff --git a/localdev/init.sql b/localdev/init.sql
new file mode 100644
index 00000000..5cf5bb6a
--- /dev/null
+++ b/localdev/init.sql
@@ -0,0 +1,39 @@
+CREATE DATABASE IF NOT EXISTS geomag_operations;
+CREATE TABLE metadata (
+    id int NOT NULL AUTO_INCREMENT,
+    created_by varchar(255),
+    created_time TIMESTAMP,
+    updated_by varchar(255),
+    updated_time TIMESTAMP,
+    starttime TIMESTAMP,
+    endtime TIMESTAMP,
+    network varchar(255),
+    station varchar(255),
+    channel varchar(255),
+    location varchar(255),
+    category varchar(255),
+    priority int,
+    data_valid boolean,
+    status varchar(255),
+    metadata JSON,
+    comment TEXT,
+    review_comment TEXT,
+    PRIMARY KEY (id)
+);
+
+CREATE INDEX index_category_time ON metadata (
+    category,
+    starttime,
+    endtime
+);
+
+CREATE TABLE session (
+    id int NOT NULL AUTO_INCREMENT,
+    session_id varchar(255),
+    data TEXT,
+    updated TIMESTAMP,
+    PRIMARY KEY (id)
+);
+
+INSERT INTO metadata (category, created_by, network, station, priority, data_valid, metadata) 
+VALUES ('instrument', 'test_metadata.py', 'NT', 'BDT', 1, true, '{"type": "FGE","channels": {"U": [{ "channel": "U_Volt", "offset": 0, "scale": 313.2 }],"V": [{ "channel": "V_Volt", "offset": 0, "scale": 312.3 }],"W": [{ "channel": "W_Volt", "offset": 0, "scale": 312.0 }]},"electronics": {"serial": "E0542","x-scale": 313.2,"y-scale": 312.3,"z-scale": 312.0,"temperature-scale": 0.01},"sensor": {"serial": "S0419","x-constant": 36958,"y-constant": 36849,"z-constant": 36811}}');
diff --git a/localdev/local_development.md b/localdev/local_development.md
new file mode 100644
index 00000000..1e70afe2
--- /dev/null
+++ b/localdev/local_development.md
@@ -0,0 +1,54 @@
+# Local Development
+
+## Background
+This docker-compose can replace the docker-compose in geomag-stacks. This docker-compose uses the existing
+Dockerfile to create a `localdev-web` container. Ideally, this will more closely mimic what is happening in
+production. This docker-compose is also creating a mysql container and initializing the metadata and session
+table. It is also creating a `localdev-edge` container using the geomag-edge Dockerfile in geomag-stacks. 
+geomag-edge was not moved out of geomag-stacks out of an abudance of caution to prevent internal information
+from being revealed.
+
+## Prerequisites
+For mac development:
+1. Download colima using homebrew.
+2. Start colima and edit virtual machine sizes using `colima start --edit`. Increase the number of CPUs to 16 and the memory to 16.
+3. Add the following env vars to your .bashrc or .zshrc. Replace the EDGE_PATH and 
+EDGE_DATA_PATH with the absolute path to your geomag-stack. See the instructions in metadata_webservice.md for instructions on creating the OPENID_CLIENT_ID and
+OPENID_CLIENT_SECRET. The rest of the values can be found in the geomag-stack docker-compose. Run the source command to pick up the new values (ex `source ./bashrc`).
+```
+export EDGE_PATH="/Users/{user}/geomag-stack/geomag-edge"
+export EDGE_DATA_PATH="/Users/{user}/geomag-stack/edge-data"
+export OPENID_CLIENT_ID=""
+export OPENID_CLIENT_SECRET=""
+export DATABASE_URL=""
+export OPENID_METADATA_URL=""
+export SECRET_KEY=""
+export SECRET_SALT=""
+export MYSQL_ROOT_PASSWORD=""
+```
+
+## Use
+To simply start build and run all the containers, make sure you are in the /localdev directory and run the following command.
+```
+docker-compose up
+```
+
+For more granual control, there are a few helpful scripts in /localdev that you can run from root or from the folder. 
+
+`start_services.sh`: Use this command to build and run all the services.
+
+`restart_web.sh`: Use this command to just pick up changes in the webservice code 
+without having to rebuild the database and edge containers.
+
+`cleanup_docker.sh`: Use this command to stop all docker containers, remove them, 
+and remove the images.
+
+## Helpful Docker Commands
+`docker ps` shows the running containers. 
+
+`docker ps -a` shows all the containers, even ones that have exited. This is helpful to see logs for containers that have unexpectedly exited.
+
+`docker logs -f {CONTAINER ID or NAME}` shows the logs. The `-f` continues streaming the new output.
+
+`docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now
+you can use SQL to explore the db.
diff --git a/localdev/start_services.sh b/localdev/start_services.sh
index 78d8fc1d..faf2cb26 100755
--- a/localdev/start_services.sh
+++ b/localdev/start_services.sh
@@ -6,7 +6,7 @@ docker-compose build edge database
 docker-compose up -d edge database
 
 # let database initialize
-sleep 10;
+sleep 15;
 
 docker-compose build web
 
-- 
GitLab


From 16e5ad1159ee291a980c9b10b65c9b7b3a24394b Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 12 Nov 2024 10:00:18 -0700
Subject: [PATCH 03/16] edit readme

---
 localdev/local_development.md | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/localdev/local_development.md b/localdev/local_development.md
index 1e70afe2..63f5b919 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -2,10 +2,9 @@
 
 ## Background
 This docker-compose can replace the docker-compose in geomag-stacks. This docker-compose uses the existing
-Dockerfile to create a `localdev-web` container. Ideally, this will more closely mimic what is happening in
-production. This docker-compose is also creating a mysql container and initializing the metadata and session
-table. It is also creating a `localdev-edge` container using the geomag-edge Dockerfile in geomag-stacks. 
-geomag-edge was not moved out of geomag-stacks out of an abudance of caution to prevent internal information
+Dockerfile to create a `localdev-web-1` container. Ideally, this will more closely mimic what is happening in
+production. This docker-compose is also creating a mysql container called `localdev-database-1` and initializing the metadata and session tables. It is also creating a `localdev-edge-1` container using the geomag-edge Dockerfile in geomag-stacks. 
+The geomag-edge folder was not moved out of geomag-stacks out of an abudance of caution to prevent internal information
 from being revealed.
 
 ## Prerequisites
@@ -14,7 +13,7 @@ For mac development:
 2. Start colima and edit virtual machine sizes using `colima start --edit`. Increase the number of CPUs to 16 and the memory to 16.
 3. Add the following env vars to your .bashrc or .zshrc. Replace the EDGE_PATH and 
 EDGE_DATA_PATH with the absolute path to your geomag-stack. See the instructions in metadata_webservice.md for instructions on creating the OPENID_CLIENT_ID and
-OPENID_CLIENT_SECRET. The rest of the values can be found in the geomag-stack docker-compose. Run the source command to pick up the new values (ex `source ./bashrc`).
+OPENID_CLIENT_SECRET. The rest of the values can be found in the geomag-stack docker-compose. Run the source command to pick up the new values (ex `source ~/.bashrc`).
 ```
 export EDGE_PATH="/Users/{user}/geomag-stack/geomag-edge"
 export EDGE_DATA_PATH="/Users/{user}/geomag-stack/edge-data"
-- 
GitLab


From 6a2f7e699ae649bb6b028fed9b91f52df6bc7c62 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 12 Nov 2024 11:43:45 -0700
Subject: [PATCH 04/16] updated some docs

---
 docs/install.md               |  2 +-
 docs/metadata_webservice.md   | 55 +----------------------------------
 localdev/local_development.md |  5 +++-
 3 files changed, 6 insertions(+), 56 deletions(-)

diff --git a/docs/install.md b/docs/install.md
index 1bc684aa..7e5e1fd7 100644
--- a/docs/install.md
+++ b/docs/install.md
@@ -1,7 +1,7 @@
 # Installation
 This document describes installation instructions for users. For those that wish to modify or develop custom code within **geomag-algorithms**, or prefer to install the Docker container, please see the following documents:
 > - [Develop](./develop.md) provides installation instruction for developers.
-> - [Docker](./install_docker.md) describes Docker container installation and usage.
+> - [Docker](../localdev/local_development.md) describes Docker container installation and usage.
 
 ## Requirements
 
diff --git a/docs/metadata_webservice.md b/docs/metadata_webservice.md
index 9c0b5141..d29dca4c 100644
--- a/docs/metadata_webservice.md
+++ b/docs/metadata_webservice.md
@@ -1,41 +1,5 @@
 # Running the Metadata Webservice Locally
 
-## Run mysql in a container (for local development)
-
-```
-docker run --rm --name mysql-db -e MYSQL_ROOT_PASSWORD=password -p 3306:3306 mysql:5.7
-```
-
-This exposes port 3306 so python can connect locally. When running the webservice in a container, container links should be used so the container can access the database container.
-
-## Set up schema in database
-
-> This is only needed the first time the database is created. Volume mounts can make this more persistent.
-
-```
-export DATABASE_URL=mysql://root:password@localhost/geomag_operations
-```
-
-### Create mysql database
-```
-docker exec -it mysql-db mysql -uroot -ppassword
-```
-> Inside mysql container:
-```
-CREATE DATABASE geomag_operations;
-exit
-```
-
-```
-poetry run python create_db.py
-```
-
-### Add some testing data (depends on DATABASE_URL environment set above).
-
-```
-poetry run python test_metadata.py
-```
-
 ## Set up OpenID application in code.usgs.gov.
 
 - Under your account, go to settings
@@ -52,21 +16,4 @@ poetry run python test_metadata.py
 
   Scopes: `openid`, `profile`, `email`
 
-## Start webservice
-
-- Export variables used for authentication:
-
-```
-export DATABASE_URL=mysql://root:password@localhost/geomag_operations
-export OPENID_CLIENT_ID={Application ID}
-export OPENID_CLIENT_SECRET={Secret}
-export OPENID_METADATA_URL=https://code.usgs.gov/.well-known/openid-configuration
-export SECRET_KEY=changeme
-export SECRET_SALT=salt
-```
-
-- Run app
-
-```
-poetry run uvicorn geomagio.api:app
-```
+[Docker](../localdev/local_development.md) describes Docker container installation and usage.
diff --git a/localdev/local_development.md b/localdev/local_development.md
index 63f5b919..1a905d36 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -32,7 +32,7 @@ To simply start build and run all the containers, make sure you are in the /loca
 docker-compose up
 ```
 
-For more granual control, there are a few helpful scripts in /localdev that you can run from root or from the folder. 
+For more granual control, there are a few helpful scripts in /localdev that you can run from the folder. 
 
 `start_services.sh`: Use this command to build and run all the services.
 
@@ -51,3 +51,6 @@ and remove the images.
 
 `docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now
 you can use SQL to explore the db.
+
+## Running the CLI locally
+Exec into the `localdev-web-1` container using
\ No newline at end of file
-- 
GitLab


From 10599ee7ae1d866db4ec5bccd3a7cadfbc318b1a Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 12 Nov 2024 11:45:57 -0700
Subject: [PATCH 05/16] more updates

---
 docs/metadata_webservice.md | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/docs/metadata_webservice.md b/docs/metadata_webservice.md
index d29dca4c..23600254 100644
--- a/docs/metadata_webservice.md
+++ b/docs/metadata_webservice.md
@@ -16,4 +16,5 @@
 
   Scopes: `openid`, `profile`, `email`
 
-[Docker](../localdev/local_development.md) describes Docker container installation and usage.
+## Running with Docker
+[local_development.md](../localdev/local_development.md) describes Docker container installation and usage.
-- 
GitLab


From 05c8ad65fb0b30ec49baaaba51658ac8c4d457f4 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 19 Nov 2024 14:44:33 -0700
Subject: [PATCH 06/16] trying to add reload

---
 localdev/docker-compose.yml   | 5 ++++-
 localdev/local_development.md | 4 ++--
 2 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
index 1b18a11d..5ffc161e 100644
--- a/localdev/docker-compose.yml
+++ b/localdev/docker-compose.yml
@@ -25,6 +25,7 @@ services:
     build:
       context: ../.
       dockerfile: ./Dockerfile
+    command: bash -c "cd /data && python create_db.py && python test_metadata.py && uvicorn --reload geomagio.api:app --host 0.0.0.0 --workers 2"
     ports:
       - '8000:8000'
     extra_hosts:
@@ -37,5 +38,7 @@ services:
       - OPENID_METADATA_URL=$OPENID_METADATA_URL
       - SECRET_KEY=$SECRET_KEY
       - SECRET_SALT=$SECRET_SALT
-      - WEBSERVICE=true
+      - WEBSERVICE=false
+    volumes:
+      - ../.:/data
     restart: always
diff --git a/localdev/local_development.md b/localdev/local_development.md
index 1a905d36..5f884c6c 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -52,5 +52,5 @@ and remove the images.
 `docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now
 you can use SQL to explore the db.
 
-## Running the CLI locally
-Exec into the `localdev-web-1` container using
\ No newline at end of file
+## Helpful Tips
+Sometimes the mysql container exits immediately with code 139 due to insufficient memory. Restarting colima resolves this issue.
-- 
GitLab


From d83bf6293c0c84dae646d7f4593d366634eb194d Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Tue, 19 Nov 2024 15:38:47 -0700
Subject: [PATCH 07/16] cleanup for mr

---
 localdev/cleanup_docker.sh    |  5 -----
 localdev/init.sql             | 38 -----------------------------------
 localdev/local_development.md | 12 -----------
 localdev/restart_web.sh       |  6 ------
 localdev/start_services.sh    | 13 ------------
 5 files changed, 74 deletions(-)
 delete mode 100755 localdev/cleanup_docker.sh
 delete mode 100755 localdev/restart_web.sh
 delete mode 100755 localdev/start_services.sh

diff --git a/localdev/cleanup_docker.sh b/localdev/cleanup_docker.sh
deleted file mode 100755
index 15736cce..00000000
--- a/localdev/cleanup_docker.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-docker stop $(docker ps -aq)
-
-docker rm -vf $(docker ps -aq)
-
-docker rmi -f $(docker images -aq)
diff --git a/localdev/init.sql b/localdev/init.sql
index 5cf5bb6a..da36a61e 100644
--- a/localdev/init.sql
+++ b/localdev/init.sql
@@ -1,39 +1 @@
 CREATE DATABASE IF NOT EXISTS geomag_operations;
-CREATE TABLE metadata (
-    id int NOT NULL AUTO_INCREMENT,
-    created_by varchar(255),
-    created_time TIMESTAMP,
-    updated_by varchar(255),
-    updated_time TIMESTAMP,
-    starttime TIMESTAMP,
-    endtime TIMESTAMP,
-    network varchar(255),
-    station varchar(255),
-    channel varchar(255),
-    location varchar(255),
-    category varchar(255),
-    priority int,
-    data_valid boolean,
-    status varchar(255),
-    metadata JSON,
-    comment TEXT,
-    review_comment TEXT,
-    PRIMARY KEY (id)
-);
-
-CREATE INDEX index_category_time ON metadata (
-    category,
-    starttime,
-    endtime
-);
-
-CREATE TABLE session (
-    id int NOT NULL AUTO_INCREMENT,
-    session_id varchar(255),
-    data TEXT,
-    updated TIMESTAMP,
-    PRIMARY KEY (id)
-);
-
-INSERT INTO metadata (category, created_by, network, station, priority, data_valid, metadata) 
-VALUES ('instrument', 'test_metadata.py', 'NT', 'BDT', 1, true, '{"type": "FGE","channels": {"U": [{ "channel": "U_Volt", "offset": 0, "scale": 313.2 }],"V": [{ "channel": "V_Volt", "offset": 0, "scale": 312.3 }],"W": [{ "channel": "W_Volt", "offset": 0, "scale": 312.0 }]},"electronics": {"serial": "E0542","x-scale": 313.2,"y-scale": 312.3,"z-scale": 312.0,"temperature-scale": 0.01},"sensor": {"serial": "S0419","x-constant": 36958,"y-constant": 36849,"z-constant": 36811}}');
diff --git a/localdev/local_development.md b/localdev/local_development.md
index 5f884c6c..e3e216d9 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -32,23 +32,11 @@ To simply start build and run all the containers, make sure you are in the /loca
 docker-compose up
 ```
 
-For more granual control, there are a few helpful scripts in /localdev that you can run from the folder. 
-
-`start_services.sh`: Use this command to build and run all the services.
-
-`restart_web.sh`: Use this command to just pick up changes in the webservice code 
-without having to rebuild the database and edge containers.
-
-`cleanup_docker.sh`: Use this command to stop all docker containers, remove them, 
-and remove the images.
-
 ## Helpful Docker Commands
 `docker ps` shows the running containers. 
 
 `docker ps -a` shows all the containers, even ones that have exited. This is helpful to see logs for containers that have unexpectedly exited.
 
-`docker logs -f {CONTAINER ID or NAME}` shows the logs. The `-f` continues streaming the new output.
-
 `docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now
 you can use SQL to explore the db.
 
diff --git a/localdev/restart_web.sh b/localdev/restart_web.sh
deleted file mode 100755
index 8f0d69f4..00000000
--- a/localdev/restart_web.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-set -ex
-
-docker-compose build web
-
-docker-compose up -d web
diff --git a/localdev/start_services.sh b/localdev/start_services.sh
deleted file mode 100755
index faf2cb26..00000000
--- a/localdev/start_services.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -ex
-
-docker-compose build edge database
-
-docker-compose up -d edge database
-
-# let database initialize
-sleep 15;
-
-docker-compose build web
-
-docker-compose up -d web
-- 
GitLab


From e3f1bf8b46f567f75737c6916d561962bb55a7b8 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Wed, 20 Nov 2024 11:41:57 -0700
Subject: [PATCH 08/16] updated readme and clarified some points on the
 local_development docs

---
 README.md                     | 29 +----------------------------
 localdev/local_development.md | 13 +++++++------
 2 files changed, 8 insertions(+), 34 deletions(-)

diff --git a/README.md b/README.md
index 9de81eed..436c9247 100644
--- a/README.md
+++ b/README.md
@@ -81,34 +81,7 @@ output_factory.write_file(
 
 ### Docker
 
-Docker is the simplest install option.
-
-1. Create and start a new container
-
-   named `geomagio`,
-   listening on local port `8000`,
-   from the image `usgs/geomag-algorithms` on docker hub
-
-   ```
-   docker run -dit --name geomagio -p 8000:8000 usgs/geomag-algorithms
-   ```
-
-2. Use the running container
-
-- Run the `geomag.py` command line interface:
-
-  > To work with files outside the container,
-  > use a volume mount when starting the container
-
-  ```
-  docker exec -it geomagio geomag.py
-  ```
-
-- Or, to run an interactive python prompt:
-
-  ```
-  docker exec -it geomagio python
-  ```
+Docker is the simplest install option. Find instructions [here](./localdev/local_development.md).
 
 ## Algorithms
 
diff --git a/localdev/local_development.md b/localdev/local_development.md
index e3e216d9..ec84029c 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -1,11 +1,7 @@
 # Local Development
 
 ## Background
-This docker-compose can replace the docker-compose in geomag-stacks. This docker-compose uses the existing
-Dockerfile to create a `localdev-web-1` container. Ideally, this will more closely mimic what is happening in
-production. This docker-compose is also creating a mysql container called `localdev-database-1` and initializing the metadata and session tables. It is also creating a `localdev-edge-1` container using the geomag-edge Dockerfile in geomag-stacks. 
-The geomag-edge folder was not moved out of geomag-stacks out of an abudance of caution to prevent internal information
-from being revealed.
+This workflow can replace the docker-compose in geomag-stacks. This docker-compose uses the existing Dockerfile to create a `localdev-web-1` container, instead of pulling down the latest geomag-algorithms image. However, it is initializing the mysql database the same way it is done in geomag-stacks. This docker-compose is also creating a mysql container called `localdev-database-1` an edge container called `localdev-edge-1` using the geomag-edge Dockerfile in geomag-stacks. The geomag-edge folder was not moved out of geomag-stacks out of an abudance of caution to prevent internal information from being revealed.
 
 ## Prerequisites
 For mac development:
@@ -27,11 +23,16 @@ export MYSQL_ROOT_PASSWORD=""
 ```
 
 ## Use
-To simply start build and run all the containers, make sure you are in the /localdev directory and run the following command.
+To simply start build and run all the containers, make sure you are in the /localdev directory and run the following command. This includes a reload command for the web container. It will restart the web container when changes are made locally.
 ```
 docker-compose up
 ```
 
+If you're not developing locally, use the following command to run the containers in the background. Reload will not work this way.
+```
+docker-compmose up -d 
+```
+
 ## Helpful Docker Commands
 `docker ps` shows the running containers. 
 
-- 
GitLab


From 745c243fc6dcf8bfdec24d6ca780075fad52a6fc Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Fri, 22 Nov 2024 09:00:31 -0700
Subject: [PATCH 09/16] working test bones

---
 geomagio/api/db/MetadataDatabaseFactory.py | 13 +++++--
 geomagio/metadata/Metadata.py              | 33 ++++++++++++----
 test/db/MetadataDatabaseFactory_test.py    | 45 ++++++++++++++++++++++
 3 files changed, 81 insertions(+), 10 deletions(-)
 create mode 100644 test/db/MetadataDatabaseFactory_test.py

diff --git a/geomagio/api/db/MetadataDatabaseFactory.py b/geomagio/api/db/MetadataDatabaseFactory.py
index fe538302..7e7fb9ab 100644
--- a/geomagio/api/db/MetadataDatabaseFactory.py
+++ b/geomagio/api/db/MetadataDatabaseFactory.py
@@ -12,14 +12,21 @@ from .metadata_table import metadata as metadata_table
 
 class MetadataDatabaseFactory(object):
     def __init__(self, database: Database):
+        print("init")
         self.database = database
 
     async def create_metadata(self, meta: Metadata) -> Metadata:
+        print(1)
         query = metadata_table.insert()
+        print(2)
         meta.status = meta.status or "new"
-        values = meta.datetime_dict(exclude={"id", "metadata_id"}, exclude_none=True)
+        print(3)
+        values = meta.model_dump(exclude={"id", "metadata_id"}, exclude_none=True)
+        print(4)
         query = query.values(**values)
+        print(5)
         meta.id = await self.database.execute(query)
+        print(6)
         return meta
 
     async def get_metadata(
@@ -118,7 +125,7 @@ class MetadataDatabaseFactory(object):
             # write current record to metadata history table
             original_metadata = await self.get_metadata_by_id(id=meta.id)
             original_metadata.metadata_id = original_metadata.id
-            values = original_metadata.datetime_dict(exclude={"id"}, exclude_none=True)
+            values = original_metadata.model_dump(exclude={"id"}, exclude_none=True)
             query = metadata_history.insert()
             query = query.values(**values)
             original_metadata.id = await self.database.execute(query)
@@ -126,7 +133,7 @@ class MetadataDatabaseFactory(object):
             meta.updated_by = updated_by
             meta.updated_time = UTCDateTime()
             query = metadata_table.update().where(metadata_table.c.id == meta.id)
-            values = meta.datetime_dict(exclude={"id", "metadata_id"})
+            values = meta.model_dump(exclude={"id", "metadata_id"})
             query = query.values(**values)
             await self.database.execute(query)
             return await self.get_metadata_by_id(id=meta.id)
diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
index 770f0fd3..7cae0430 100644
--- a/geomagio/metadata/Metadata.py
+++ b/geomagio/metadata/Metadata.py
@@ -2,7 +2,7 @@ from datetime import timezone
 from typing import Dict, Optional
 
 from obspy import UTCDateTime
-from pydantic import field_validator, BaseModel
+from pydantic import field_validator, field_serializer, BaseModel
 
 from .MetadataCategory import MetadataCategory
 from ..pydantic_utcdatetime import CustomUTCDateTimeType
@@ -79,12 +79,31 @@ class Metadata(BaseModel):
     # metadata status indicator
     status: Optional[str] = None
 
-    def datetime_dict(self, **kwargs):
-        values = self.model_dump(**kwargs)
-        for key in ["created_time", "updated_time", "starttime", "endtime"]:
-            if key in values and values[key] is not None:
-                values[key] = values[key].datetime.replace(tzinfo=timezone.utc)
-        return values
+    # serialize pydantic CustomUTCDateTimeType type into UTCDateTime for model_dump() and make sure
+    # the timezone is in utc
+    @field_serializer("created_time")
+    def serialize_created_time(self, created_time: UTCDateTime):
+        if self.created_time is not None:
+            self.created_time = self.created_time.datetime.replace(tzinfo=timezone.utc)
+        return self.created_time
+
+    @field_serializer("updated_time")
+    def serialize_updated_time(self, updated_time: UTCDateTime):
+        if self.updated_time is not None:
+            self.updated_time = self.updated_time.datetime.replace(tzinfo=timezone.utc)
+        return self.updated_time
+    
+    @field_serializer("starttime")
+    def serialize_starttime(self, starttime: UTCDateTime):
+        if self.starttime is not None:
+            self.starttime = self.starttime.datetime.replace(tzinfo=timezone.utc)
+        return self.starttime
+    
+    @field_serializer("endtime")
+    def serialize_endtime(self, endtime: UTCDateTime):
+        if self.endtime is not None:
+            self.endtime = self.endtime.datetime.replace(tzinfo=timezone.utc)
+        return self.endtime
 
     @field_validator("created_time")
     @classmethod
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
new file mode 100644
index 00000000..272304ea
--- /dev/null
+++ b/test/db/MetadataDatabaseFactory_test.py
@@ -0,0 +1,45 @@
+import unittest
+from unittest.mock import patch
+
+from geomagio.api.db import MetadataDatabaseFactory
+from geomagio.metadata import Metadata, MetadataCategory
+from geomagio.api.db.metadata_table import metadata as metadata_table
+
+class TestMetadataDatabaseFactoryClass(unittest.IsolatedAsyncioTestCase):
+
+    @patch('databases.Database.connect')
+    async def test_create_metadata(self, mock_connect_to_db):
+        mock_connection = mock_connect_to_db.return_value
+        mock_connection.execute.return_value = {"id": 1}
+
+        test_data = Metadata(
+            category=MetadataCategory.INSTRUMENT,
+            created_by="test_metadata.py",
+            network="NT",
+            station="BDT",
+            metadata={
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+        )
+        
+        await MetadataDatabaseFactory(database=mock_connection).create_metadata(test_data)
+
+        mock_connection.execute.assert_called_once()
-- 
GitLab


From 78e08c1ee03de0e561757a12dddf9350dffb7e29 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Fri, 22 Nov 2024 11:15:37 -0700
Subject: [PATCH 10/16] working test

---
 geomagio/api/db/MetadataDatabaseFactory.py |  1 -
 test/db/MetadataDatabaseFactory_test.py    | 58 ++++++++++++++++++----
 2 files changed, 48 insertions(+), 11 deletions(-)

diff --git a/geomagio/api/db/MetadataDatabaseFactory.py b/geomagio/api/db/MetadataDatabaseFactory.py
index 7e7fb9ab..19463248 100644
--- a/geomagio/api/db/MetadataDatabaseFactory.py
+++ b/geomagio/api/db/MetadataDatabaseFactory.py
@@ -12,7 +12,6 @@ from .metadata_table import metadata as metadata_table
 
 class MetadataDatabaseFactory(object):
     def __init__(self, database: Database):
-        print("init")
         self.database = database
 
     async def create_metadata(self, meta: Metadata) -> Metadata:
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
index 272304ea..6a707280 100644
--- a/test/db/MetadataDatabaseFactory_test.py
+++ b/test/db/MetadataDatabaseFactory_test.py
@@ -1,17 +1,15 @@
 import unittest
-from unittest.mock import patch
+from unittest.mock import AsyncMock, patch, MagicMock
+from databases import Database
 
 from geomagio.api.db import MetadataDatabaseFactory
 from geomagio.metadata import Metadata, MetadataCategory
 from geomagio.api.db.metadata_table import metadata as metadata_table
 
-class TestMetadataDatabaseFactoryClass(unittest.IsolatedAsyncioTestCase):
-
-    @patch('databases.Database.connect')
-    async def test_create_metadata(self, mock_connect_to_db):
-        mock_connection = mock_connect_to_db.return_value
-        mock_connection.execute.return_value = {"id": 1}
+class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    async def test_create_metadata(self, mock_execute):
         test_data = Metadata(
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
@@ -39,7 +37,47 @@ class TestMetadataDatabaseFactoryClass(unittest.IsolatedAsyncioTestCase):
                 },
             },
         )
-        
-        await MetadataDatabaseFactory(database=mock_connection).create_metadata(test_data)
 
-        mock_connection.execute.assert_called_once()
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+        expected_values = {
+            "category": "instrument",
+            "created_by": "test_metadata.py",
+            "network": "NT",
+            "station": "BDT",
+            "metadata": {
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+            "data_valid": True,
+            "priority": 1,
+            "status": "new"
+        }
+
+        expected_insert = metadata_table.insert().values(**expected_values)
+
+        mock_execute.assert_called_once()
+        mock_params = mock_execute.call_args.args[0].compile().params
+
+        expected_params = expected_insert.compile().params
+
+        assert mock_params == expected_params
-- 
GitLab


From 43f46b26fadb70e29e9f39922d3447b48942c50a Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Fri, 22 Nov 2024 15:21:39 -0700
Subject: [PATCH 11/16] wip

---
 geomagio/api/db/MetadataDatabaseFactory.py |   6 -
 geomagio/metadata/Metadata.py              |  11 +-
 test/db/MetadataDatabaseFactory_test.py    | 168 ++++++++++++++++++++-
 3 files changed, 164 insertions(+), 21 deletions(-)

diff --git a/geomagio/api/db/MetadataDatabaseFactory.py b/geomagio/api/db/MetadataDatabaseFactory.py
index 19463248..b2274e8c 100644
--- a/geomagio/api/db/MetadataDatabaseFactory.py
+++ b/geomagio/api/db/MetadataDatabaseFactory.py
@@ -15,17 +15,11 @@ class MetadataDatabaseFactory(object):
         self.database = database
 
     async def create_metadata(self, meta: Metadata) -> Metadata:
-        print(1)
         query = metadata_table.insert()
-        print(2)
         meta.status = meta.status or "new"
-        print(3)
         values = meta.model_dump(exclude={"id", "metadata_id"}, exclude_none=True)
-        print(4)
         query = query.values(**values)
-        print(5)
         meta.id = await self.database.execute(query)
-        print(6)
         return meta
 
     async def get_metadata(
diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
index 7cae0430..53f605f0 100644
--- a/geomagio/metadata/Metadata.py
+++ b/geomagio/metadata/Metadata.py
@@ -2,7 +2,7 @@ from datetime import timezone
 from typing import Dict, Optional
 
 from obspy import UTCDateTime
-from pydantic import field_validator, field_serializer, BaseModel
+from pydantic import field_validator, field_serializer, BaseModel, Field
 
 from .MetadataCategory import MetadataCategory
 from ..pydantic_utcdatetime import CustomUTCDateTimeType
@@ -52,7 +52,9 @@ class Metadata(BaseModel):
     metadata_id: Optional[int] = None
     # author
     created_by: Optional[str] = None
-    created_time: Optional[CustomUTCDateTimeType] = None
+    created_time: CustomUTCDateTimeType = Field(
+        default_factory=lambda: UTCDateTime()
+    )
     # editor
     updated_by: Optional[str] = None
     updated_time: Optional[CustomUTCDateTimeType] = None
@@ -104,8 +106,3 @@ class Metadata(BaseModel):
         if self.endtime is not None:
             self.endtime = self.endtime.datetime.replace(tzinfo=timezone.utc)
         return self.endtime
-
-    @field_validator("created_time")
-    @classmethod
-    def set_default_created_time(cls, created_time: UTCDateTime = None) -> UTCDateTime:
-        return created_time or UTCDateTime()
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
index 6a707280..d7705001 100644
--- a/test/db/MetadataDatabaseFactory_test.py
+++ b/test/db/MetadataDatabaseFactory_test.py
@@ -1,15 +1,17 @@
+import datetime
 import unittest
-from unittest.mock import AsyncMock, patch, MagicMock
+from unittest.mock import AsyncMock, patch
 from databases import Database
 
+from obspy import UTCDateTime
+
 from geomagio.api.db import MetadataDatabaseFactory
 from geomagio.metadata import Metadata, MetadataCategory
-from geomagio.api.db.metadata_table import metadata as metadata_table
 
 class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata(self, mock_execute):
+    async def test_create_metadata_defaults(self, mock_execute):
         test_data = Metadata(
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
@@ -42,6 +44,7 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         await MetadataDatabaseFactory(database=db).create_metadata(test_data)
 
+        # assert data_valid, priority, and status are set to the correct defaults
         expected_values = {
             "category": "instrument",
             "created_by": "test_metadata.py",
@@ -73,11 +76,160 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
             "status": "new"
         }
 
-        expected_insert = metadata_table.insert().values(**expected_values)
-
         mock_execute.assert_called_once()
-        mock_params = mock_execute.call_args.args[0].compile().params
+        called_params = mock_execute.call_args.args[0].compile().params
+
+        assert called_params == expected_values
+
+    # @patch("databases.Database.execute", new_callable=AsyncMock)
+    # async def test_create_metadata_with_ids(self, mock_execute):
+    #     now = datetime.datetime.now(tz=datetime.timezone.utc)
+    #     test_data = Metadata(
+    #         id=1234,
+    #         created_time=now,
+    #         metadata_id=5678,
+    #         category=MetadataCategory.INSTRUMENT,
+    #         created_by="test_metadata.py",
+    #         network="NT",
+    #         station="BDT",
+    #         metadata={
+    #             "type": "FGE",
+    #             "channels": {
+    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+    #             },
+    #             "electronics": {
+    #                 "serial": "E0542",
+    #                 "x-scale": 313.2,
+    #                 "y-scale": 312.3,
+    #                 "z-scale": 312.0,
+    #                 "temperature-scale": 0.01,
+    #             },
+    #             "sensor": {
+    #                 "serial": "S0419",
+    #                 "x-constant": 36958,
+    #                 "y-constant": 36849,
+    #                 "z-constant": 36811,
+    #             },
+    #         },
+    #     )
+
+    #     db = Database("sqlite:///:memory:")
+
+    #     await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+    #     # assert id is removed
+    #     expected_values = {
+    #         "created_time": now,
+    #         "category": "instrument",
+    #         "created_by": "test_metadata.py",
+    #         "network": "NT",
+    #         "station": "BDT",
+    #         "metadata": {
+    #             "type": "FGE",
+    #             "channels": {
+    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+    #             },
+    #             "electronics": {
+    #                 "serial": "E0542",
+    #                 "x-scale": 313.2,
+    #                 "y-scale": 312.3,
+    #                 "z-scale": 312.0,
+    #                 "temperature-scale": 0.01,
+    #             },
+    #             "sensor": {
+    #                 "serial": "S0419",
+    #                 "x-constant": 36958,
+    #                 "y-constant": 36849,
+    #                 "z-constant": 36811,
+    #             },
+    #         },
+    #         "data_valid": True,
+    #         "priority": 1,
+    #         "status": "new"
+    #     }
+
+    #     mock_execute.assert_called_once()
+    #     called_params = mock_execute.call_args.args[0].compile().params
+
+    #     assert called_params == expected_values
+
+    # @patch("databases.Database.execute", new_callable=AsyncMock)
+    # async def test_create_metadata_with_metadata_id(self, mock_execute):
+    #     test_data = Metadata(
+    #         metadata_id=5678,
+    #         category=MetadataCategory.INSTRUMENT,
+    #         created_by="test_metadata.py",
+    #         network="NT",
+    #         station="BDT",
+    #         metadata={
+    #             "type": "FGE",
+    #             "channels": {
+    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+    #             },
+    #             "electronics": {
+    #                 "serial": "E0542",
+    #                 "x-scale": 313.2,
+    #                 "y-scale": 312.3,
+    #                 "z-scale": 312.0,
+    #                 "temperature-scale": 0.01,
+    #             },
+    #             "sensor": {
+    #                 "serial": "S0419",
+    #                 "x-constant": 36958,
+    #                 "y-constant": 36849,
+    #                 "z-constant": 36811,
+    #             },
+    #         },
+    #     )
+
+    #     db = Database("sqlite:///:memory:")
+
+    #     await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+    #     # assert metadata_id is removed on values
+    #     expected_values = {
+    #         "metadata_id": 5678,
+    #         "category": "instrument",
+    #         "created_by": "test_metadata.py",
+    #         "network": "NT",
+    #         "station": "BDT",
+    #         "metadata": {
+    #             "type": "FGE",
+    #             "channels": {
+    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+    #             },
+    #             "electronics": {
+    #                 "serial": "E0542",
+    #                 "x-scale": 313.2,
+    #                 "y-scale": 312.3,
+    #                 "z-scale": 312.0,
+    #                 "temperature-scale": 0.01,
+    #             },
+    #             "sensor": {
+    #                 "serial": "S0419",
+    #                 "x-constant": 36958,
+    #                 "y-constant": 36849,
+    #                 "z-constant": 36811,
+    #             },
+    #         },
+    #         "data_valid": True,
+    #         "priority": 1,
+    #         "status": "new"
+    #     }
+
+    #     expected_insert = metadata_table.insert().values(**expected_values)
+    #     expected_params = expected_insert.compile().params
+
 
-        expected_params = expected_insert.compile().params
+    #     mock_execute.assert_called_once()
+    #     called_params = mock_execute.call_args.args[0].compile().params
 
-        assert mock_params == expected_params
+    #     assert called_params == expected_params
\ No newline at end of file
-- 
GitLab


From 7319e1594ad4d80659af100d6beb5057a8c4f2f9 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Mon, 25 Nov 2024 10:41:21 -0700
Subject: [PATCH 12/16] working tests and locally

---
 geomagio/api/secure/metadata.py         |   4 +-
 geomagio/metadata/Metadata.py           |  38 ++-
 geomagio/pydantic_utcdatetime.py        |   6 +
 test/db/MetadataDatabaseFactory_test.py | 409 +++++++++++++++---------
 test/pydantic_utcdatetime_test.py       |   8 +-
 5 files changed, 290 insertions(+), 175 deletions(-)

diff --git a/geomagio/api/secure/metadata.py b/geomagio/api/secure/metadata.py
index 235ca442..404b262b 100644
--- a/geomagio/api/secure/metadata.py
+++ b/geomagio/api/secure/metadata.py
@@ -72,7 +72,9 @@ async def create_metadata(
     metadata = await MetadataDatabaseFactory(database=database).create_metadata(
         meta=metadata
     )
-    return Response(metadata.json(), status_code=201, media_type="application/json")
+    return Response(
+        metadata.model_dump_json(), status_code=201, media_type="application/json"
+    )
 
 
 @router.get(
diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
index 53f605f0..e1a7bd18 100644
--- a/geomagio/metadata/Metadata.py
+++ b/geomagio/metadata/Metadata.py
@@ -2,7 +2,7 @@ from datetime import timezone
 from typing import Dict, Optional
 
 from obspy import UTCDateTime
-from pydantic import field_validator, field_serializer, BaseModel, Field
+from pydantic import field_serializer, BaseModel, Field
 
 from .MetadataCategory import MetadataCategory
 from ..pydantic_utcdatetime import CustomUTCDateTimeType
@@ -52,9 +52,7 @@ class Metadata(BaseModel):
     metadata_id: Optional[int] = None
     # author
     created_by: Optional[str] = None
-    created_time: CustomUTCDateTimeType = Field(
-        default_factory=lambda: UTCDateTime()
-    )
+    created_time: CustomUTCDateTimeType = Field(default_factory=lambda: UTCDateTime())
     # editor
     updated_by: Optional[str] = None
     updated_time: Optional[CustomUTCDateTimeType] = None
@@ -81,28 +79,28 @@ class Metadata(BaseModel):
     # metadata status indicator
     status: Optional[str] = None
 
-    # serialize pydantic CustomUTCDateTimeType type into UTCDateTime for model_dump() and make sure
-    # the timezone is in utc
+    # instructions for model_dump() to serialize pydantic CustomUTCDateTimeType into aware datetime.datetime type
+    # sqlalchemy is expecting aware datetime.datetime, not the string model_dump() creates by default
     @field_serializer("created_time")
     def serialize_created_time(self, created_time: UTCDateTime):
-        if self.created_time is not None:
-            self.created_time = self.created_time.datetime.replace(tzinfo=timezone.utc)
-        return self.created_time
+        if created_time is not None:
+            created_time = created_time.datetime.replace(tzinfo=timezone.utc)
+        return created_time
 
     @field_serializer("updated_time")
     def serialize_updated_time(self, updated_time: UTCDateTime):
-        if self.updated_time is not None:
-            self.updated_time = self.updated_time.datetime.replace(tzinfo=timezone.utc)
-        return self.updated_time
-    
+        if updated_time is not None:
+            updated_time = updated_time.datetime.replace(tzinfo=timezone.utc)
+        return updated_time
+
     @field_serializer("starttime")
     def serialize_starttime(self, starttime: UTCDateTime):
-        if self.starttime is not None:
-            self.starttime = self.starttime.datetime.replace(tzinfo=timezone.utc)
-        return self.starttime
-    
+        if starttime is not None:
+            starttime = starttime.datetime.replace(tzinfo=timezone.utc)
+        return starttime
+
     @field_serializer("endtime")
     def serialize_endtime(self, endtime: UTCDateTime):
-        if self.endtime is not None:
-            self.endtime = self.endtime.datetime.replace(tzinfo=timezone.utc)
-        return self.endtime
+        if endtime is not None:
+            endtime = endtime.datetime.replace(tzinfo=timezone.utc)
+        return endtime
diff --git a/geomagio/pydantic_utcdatetime.py b/geomagio/pydantic_utcdatetime.py
index d60950de..862cd138 100644
--- a/geomagio/pydantic_utcdatetime.py
+++ b/geomagio/pydantic_utcdatetime.py
@@ -2,6 +2,8 @@
 CustomUTCDateTimeType should be used in place of UTCDateTime on pydantic models.
 """
 
+import datetime
+from dateutil import tz
 from obspy import UTCDateTime
 from pydantic_core import CoreSchema, core_schema
 from typing import Annotated, Any
@@ -22,6 +24,10 @@ class CustomUTCDateTimeValidator:
         _handler: GetCoreSchemaHandler,
     ) -> CoreSchema:
         def UTCDateTime_validator(value: Any):
+            # if the user inputs an unaware datetime.datetime, make it aware
+            if isinstance(value, datetime.datetime):
+                if value.tzinfo is not tz.tzutc():
+                    value = value.replace(tzinfo=tz.tzutc())
             try:
                 time = UTCDateTime(value)
             except:
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
index d7705001..045d297e 100644
--- a/test/db/MetadataDatabaseFactory_test.py
+++ b/test/db/MetadataDatabaseFactory_test.py
@@ -1,5 +1,6 @@
 import datetime
 import unittest
+from dateutil import tz
 from unittest.mock import AsyncMock, patch
 from databases import Database
 
@@ -8,11 +9,14 @@ from obspy import UTCDateTime
 from geomagio.api.db import MetadataDatabaseFactory
 from geomagio.metadata import Metadata, MetadataCategory
 
+
 class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
     async def test_create_metadata_defaults(self, mock_execute):
+        now = UTCDateTime()
         test_data = Metadata(
+            created_time=now,
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
             network="NT",
@@ -46,6 +50,101 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         # assert data_valid, priority, and status are set to the correct defaults
         expected_values = {
+            "created_time": datetime.datetime(
+                year=now.year,
+                month=now.month,
+                day=now.day,
+                hour=now.hour,
+                minute=now.minute,
+                second=now.second,
+                microsecond=now.microsecond,
+                tzinfo=tz.tzutc(),
+            ),
+            "category": "instrument",
+            "created_by": "test_metadata.py",
+            "network": "NT",
+            "station": "BDT",
+            "metadata": {
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+            "data_valid": True,
+            "priority": 1,
+            "status": "new",
+        }
+
+        mock_execute.assert_called_once()
+        called_params = mock_execute.call_args.args[0].compile().params
+
+        assert called_params == expected_values
+
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    async def test_create_metadata_with_ids(self, mock_execute):
+        now = UTCDateTime()
+        test_data = Metadata(
+            id=1234,
+            created_time=now,
+            metadata_id=5678,
+            category=MetadataCategory.INSTRUMENT,
+            created_by="test_metadata.py",
+            network="NT",
+            station="BDT",
+            metadata={
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+        )
+
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+        # assert id and metadata_id are removed
+        expected_values = {
+            "created_time": datetime.datetime(
+                year=now.year,
+                month=now.month,
+                day=now.day,
+                hour=now.hour,
+                minute=now.minute,
+                second=now.second,
+                microsecond=now.microsecond,
+                tzinfo=tz.tzutc(),
+            ),
             "category": "instrument",
             "created_by": "test_metadata.py",
             "network": "NT",
@@ -73,7 +172,7 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
             },
             "data_valid": True,
             "priority": 1,
-            "status": "new"
+            "status": "new",
         }
 
         mock_execute.assert_called_once()
@@ -81,155 +180,159 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         assert called_params == expected_values
 
-    # @patch("databases.Database.execute", new_callable=AsyncMock)
-    # async def test_create_metadata_with_ids(self, mock_execute):
-    #     now = datetime.datetime.now(tz=datetime.timezone.utc)
-    #     test_data = Metadata(
-    #         id=1234,
-    #         created_time=now,
-    #         metadata_id=5678,
-    #         category=MetadataCategory.INSTRUMENT,
-    #         created_by="test_metadata.py",
-    #         network="NT",
-    #         station="BDT",
-    #         metadata={
-    #             "type": "FGE",
-    #             "channels": {
-    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-    #             },
-    #             "electronics": {
-    #                 "serial": "E0542",
-    #                 "x-scale": 313.2,
-    #                 "y-scale": 312.3,
-    #                 "z-scale": 312.0,
-    #                 "temperature-scale": 0.01,
-    #             },
-    #             "sensor": {
-    #                 "serial": "S0419",
-    #                 "x-constant": 36958,
-    #                 "y-constant": 36849,
-    #                 "z-constant": 36811,
-    #             },
-    #         },
-    #     )
-
-    #     db = Database("sqlite:///:memory:")
-
-    #     await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-    #     # assert id is removed
-    #     expected_values = {
-    #         "created_time": now,
-    #         "category": "instrument",
-    #         "created_by": "test_metadata.py",
-    #         "network": "NT",
-    #         "station": "BDT",
-    #         "metadata": {
-    #             "type": "FGE",
-    #             "channels": {
-    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-    #             },
-    #             "electronics": {
-    #                 "serial": "E0542",
-    #                 "x-scale": 313.2,
-    #                 "y-scale": 312.3,
-    #                 "z-scale": 312.0,
-    #                 "temperature-scale": 0.01,
-    #             },
-    #             "sensor": {
-    #                 "serial": "S0419",
-    #                 "x-constant": 36958,
-    #                 "y-constant": 36849,
-    #                 "z-constant": 36811,
-    #             },
-    #         },
-    #         "data_valid": True,
-    #         "priority": 1,
-    #         "status": "new"
-    #     }
-
-    #     mock_execute.assert_called_once()
-    #     called_params = mock_execute.call_args.args[0].compile().params
-
-    #     assert called_params == expected_values
-
-    # @patch("databases.Database.execute", new_callable=AsyncMock)
-    # async def test_create_metadata_with_metadata_id(self, mock_execute):
-    #     test_data = Metadata(
-    #         metadata_id=5678,
-    #         category=MetadataCategory.INSTRUMENT,
-    #         created_by="test_metadata.py",
-    #         network="NT",
-    #         station="BDT",
-    #         metadata={
-    #             "type": "FGE",
-    #             "channels": {
-    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-    #             },
-    #             "electronics": {
-    #                 "serial": "E0542",
-    #                 "x-scale": 313.2,
-    #                 "y-scale": 312.3,
-    #                 "z-scale": 312.0,
-    #                 "temperature-scale": 0.01,
-    #             },
-    #             "sensor": {
-    #                 "serial": "S0419",
-    #                 "x-constant": 36958,
-    #                 "y-constant": 36849,
-    #                 "z-constant": 36811,
-    #             },
-    #         },
-    #     )
-
-    #     db = Database("sqlite:///:memory:")
-
-    #     await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-    #     # assert metadata_id is removed on values
-    #     expected_values = {
-    #         "metadata_id": 5678,
-    #         "category": "instrument",
-    #         "created_by": "test_metadata.py",
-    #         "network": "NT",
-    #         "station": "BDT",
-    #         "metadata": {
-    #             "type": "FGE",
-    #             "channels": {
-    #                 "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-    #                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-    #                 "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-    #             },
-    #             "electronics": {
-    #                 "serial": "E0542",
-    #                 "x-scale": 313.2,
-    #                 "y-scale": 312.3,
-    #                 "z-scale": 312.0,
-    #                 "temperature-scale": 0.01,
-    #             },
-    #             "sensor": {
-    #                 "serial": "S0419",
-    #                 "x-constant": 36958,
-    #                 "y-constant": 36849,
-    #                 "z-constant": 36811,
-    #             },
-    #         },
-    #         "data_valid": True,
-    #         "priority": 1,
-    #         "status": "new"
-    #     }
-
-    #     expected_insert = metadata_table.insert().values(**expected_values)
-    #     expected_params = expected_insert.compile().params
-
-
-    #     mock_execute.assert_called_once()
-    #     called_params = mock_execute.call_args.args[0].compile().params
-
-    #     assert called_params == expected_params
\ No newline at end of file
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    async def test_create_metadata_without_created_time(self, mock_execute):
+        test_data = Metadata(
+            metadata_id=5678,
+            category=MetadataCategory.INSTRUMENT,
+            created_by="test_metadata.py",
+            network="NT",
+            station="BDT",
+            metadata={
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+        )
+
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+        mock_execute.assert_called_once()
+        called_params = mock_execute.call_args.args[0].compile().params
+
+        assert called_params["created_time"] is not None
+
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    async def test_create_metadata_with_starttime_and_endtime(self, mock_execute):
+        now = UTCDateTime()
+        t = UTCDateTime(2020, 1, 3, 17, 24, 40)
+        test_data = Metadata(
+            created_by="test_metadata.py",
+            created_time=now,
+            starttime=t,
+            endtime=t,
+            network="NT",
+            station="BOU",
+            channel=None,
+            location=None,
+            category=MetadataCategory.READING,
+            priority=1,
+            data_valid=True,
+            metadata={},
+        )
+
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+        # assert starttime and endtime are strings of expected UTCDateTime
+        expected_values = {
+            "category": "reading",
+            "created_time": datetime.datetime(
+                year=now.year,
+                month=now.month,
+                day=now.day,
+                hour=now.hour,
+                minute=now.minute,
+                second=now.second,
+                microsecond=now.microsecond,
+                tzinfo=tz.tzutc(),
+            ),
+            "created_by": "test_metadata.py",
+            "starttime": datetime.datetime(
+                year=t.year,
+                month=t.month,
+                day=t.day,
+                hour=t.hour,
+                minute=t.minute,
+                second=t.second,
+                microsecond=t.microsecond,
+                tzinfo=tz.tzutc(),
+            ),
+            "endtime": datetime.datetime(
+                year=t.year,
+                month=t.month,
+                day=t.day,
+                hour=t.hour,
+                minute=t.minute,
+                second=t.second,
+                microsecond=t.microsecond,
+                tzinfo=tz.tzutc(),
+            ),
+            "network": "NT",
+            "station": "BOU",
+            "metadata": {},
+            "data_valid": True,
+            "priority": 1,
+            "status": "new",
+        }
+
+        mock_execute.assert_called_once()
+        called_params = mock_execute.call_args.args[0].compile().params
+
+        assert called_params == expected_values
+
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    async def test_create_metadata_with_times_as_datetime(self, mock_execute):
+        # assert datetime is aware if not explicitly set by the user
+        s = datetime.datetime(2020, 1, 3, 17, 24, 40)
+        e = datetime.datetime(2020, 1, 3, 17, 24, 40, tzinfo=tz.tzutc())
+        test_data = Metadata(
+            created_by="test_metadata.py",
+            starttime=s,
+            endtime=e,
+            network="NT",
+            station="BOU",
+            channel=None,
+            location=None,
+            category=MetadataCategory.READING,
+            priority=1,
+            data_valid=True,
+            metadata={},
+        )
+
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
+
+        mock_execute.assert_called_once()
+        called_params = mock_execute.call_args.args[0].compile().params
+
+        assert called_params["starttime"] == datetime.datetime(
+            year=s.year,
+            month=s.month,
+            day=s.day,
+            hour=s.hour,
+            minute=s.minute,
+            second=s.second,
+            microsecond=s.microsecond,
+            tzinfo=tz.tzutc(),
+        )
+        assert called_params["endtime"] == datetime.datetime(
+            year=e.year,
+            month=e.month,
+            day=e.day,
+            hour=e.hour,
+            minute=e.minute,
+            second=e.second,
+            microsecond=e.microsecond,
+            tzinfo=tz.tzutc(),
+        )
diff --git a/test/pydantic_utcdatetime_test.py b/test/pydantic_utcdatetime_test.py
index 120bbabc..ef81615a 100644
--- a/test/pydantic_utcdatetime_test.py
+++ b/test/pydantic_utcdatetime_test.py
@@ -17,12 +17,18 @@ def test_UTCDateTime_string():
     assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
 
 
-def test_UTCDateTime_timestamp():
+def test_UTCDateTime_datetime():
     t = TimeClass(starttime=datetime.datetime(2024, 11, 5, tzinfo=tz.tzutc()))
 
     assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
 
 
+def test_UTCDateTime_datetime_unaware():
+    t = TimeClass(starttime=datetime.datetime(2024, 11, 5))
+
+    assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
+
+
 def test_UTCDateTime_unix_timestamp():
     t = TimeClass(starttime=1730764800)
 
-- 
GitLab


From 9c38f3a09d78b31e59a0f8b0b0fad074dcdba2b9 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Mon, 2 Dec 2024 09:43:46 -0700
Subject: [PATCH 13/16] merging in master

---
 geomagio/metadata/Metadata.py           |   9 +-
 test/db/MetadataDatabaseFactory_test.py | 143 ++++++++++++++++++++----
 2 files changed, 131 insertions(+), 21 deletions(-)

diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
index e1a7bd18..bfc6a35f 100644
--- a/geomagio/metadata/Metadata.py
+++ b/geomagio/metadata/Metadata.py
@@ -2,7 +2,7 @@ from datetime import timezone
 from typing import Dict, Optional
 
 from obspy import UTCDateTime
-from pydantic import field_serializer, BaseModel, Field
+from pydantic import field_validator, field_serializer, BaseModel, Field
 
 from .MetadataCategory import MetadataCategory
 from ..pydantic_utcdatetime import CustomUTCDateTimeType
@@ -52,7 +52,7 @@ class Metadata(BaseModel):
     metadata_id: Optional[int] = None
     # author
     created_by: Optional[str] = None
-    created_time: CustomUTCDateTimeType = Field(default_factory=lambda: UTCDateTime())
+    created_time: Optional[CustomUTCDateTimeType] = None
     # editor
     updated_by: Optional[str] = None
     updated_time: Optional[CustomUTCDateTimeType] = None
@@ -104,3 +104,8 @@ class Metadata(BaseModel):
         if endtime is not None:
             endtime = endtime.datetime.replace(tzinfo=timezone.utc)
         return endtime
+
+    @field_validator("created_time")
+    @classmethod
+    def set_default_created_time(cls, created_time: UTCDateTime = None) -> UTCDateTime:
+        return created_time or UTCDateTime()
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
index 045d297e..61e60299 100644
--- a/test/db/MetadataDatabaseFactory_test.py
+++ b/test/db/MetadataDatabaseFactory_test.py
@@ -7,16 +7,14 @@ from databases import Database
 from obspy import UTCDateTime
 
 from geomagio.api.db import MetadataDatabaseFactory
-from geomagio.metadata import Metadata, MetadataCategory
+from geomagio.metadata import Metadata, MetadataCategory, MetadataQuery
 
 
 class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
     async def test_create_metadata_defaults(self, mock_execute):
-        now = UTCDateTime()
         test_data = Metadata(
-            created_time=now,
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
             network="NT",
@@ -50,16 +48,6 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         # assert data_valid, priority, and status are set to the correct defaults
         expected_values = {
-            "created_time": datetime.datetime(
-                year=now.year,
-                month=now.month,
-                day=now.day,
-                hour=now.hour,
-                minute=now.minute,
-                second=now.second,
-                microsecond=now.microsecond,
-                tzinfo=tz.tzutc(),
-            ),
             "category": "instrument",
             "created_by": "test_metadata.py",
             "network": "NT",
@@ -96,12 +84,10 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
         assert called_params == expected_values
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_with_ids(self, mock_execute):
+    async def test_create_metadata_created_time(self, mock_execute):
         now = UTCDateTime()
         test_data = Metadata(
-            id=1234,
             created_time=now,
-            metadata_id=5678,
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
             network="NT",
@@ -133,7 +119,7 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         await MetadataDatabaseFactory(database=db).create_metadata(test_data)
 
-        # assert id and metadata_id are removed
+        # assert data_valid, priority, and status are set to the correct defaults
         expected_values = {
             "created_time": datetime.datetime(
                 year=now.year,
@@ -181,8 +167,9 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
         assert called_params == expected_values
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_without_created_time(self, mock_execute):
+    async def test_create_metadata_with_ids(self, mock_execute):
         test_data = Metadata(
+            id=1234,
             metadata_id=5678,
             category=MetadataCategory.INSTRUMENT,
             created_by="test_metadata.py",
@@ -215,10 +202,42 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
 
         await MetadataDatabaseFactory(database=db).create_metadata(test_data)
 
+        # assert id and metadata_id are removed
+        expected_values = {
+            "category": "instrument",
+            "created_by": "test_metadata.py",
+            "network": "NT",
+            "station": "BDT",
+            "metadata": {
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+            "data_valid": True,
+            "priority": 1,
+            "status": "new",
+        }
+
         mock_execute.assert_called_once()
         called_params = mock_execute.call_args.args[0].compile().params
 
-        assert called_params["created_time"] is not None
+        assert called_params == expected_values
 
     @patch("databases.Database.execute", new_callable=AsyncMock)
     async def test_create_metadata_with_starttime_and_endtime(self, mock_execute):
@@ -336,3 +355,89 @@ class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
             microsecond=e.microsecond,
             tzinfo=tz.tzutc(),
         )
+
+    @patch("databases.Database.execute", new_callable=AsyncMock)
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_update_metadata_defaults(self, mock_fetch_all, mock_execute):
+        test_data = Metadata(
+            category=MetadataCategory.INSTRUMENT,
+            network="NT",
+            station="BDT",
+            metadata={
+                "type": "FGE",
+                "channels": {
+                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+                },
+                "electronics": {
+                    "serial": "E0542",
+                    "x-scale": 313.2,
+                    "y-scale": 312.3,
+                    "z-scale": 312.0,
+                    "temperature-scale": 0.01,
+                },
+                "sensor": {
+                    "serial": "S0419",
+                    "x-constant": 36958,
+                    "y-constant": 36849,
+                    "z-constant": 36811,
+                },
+            },
+        )
+
+        db = Database("sqlite:///:memory:")
+        yesterday = datetime.datetime(2024, 11, 1, 8, 15, tzinfo=tz.tzutc())
+
+        mock_fetch_all.return_value = (
+            {
+                "id": 1234,
+                "created_time": yesterday,
+                "category": "instrument",
+                "network": "NT",
+                "station": "BDT",
+                "metadata": {
+                    "foo": "bar",
+                },
+            },
+        )
+
+        await MetadataDatabaseFactory(database=db).update_metadata(
+            meta=test_data, updated_by="test_user"
+        )
+
+        assert mock_fetch_all.call_count == 2
+        assert mock_execute.call_count == 2
+
+        first_called_params = mock_execute.call_args_list[0].args[0].compile().params
+        second_called_params = mock_execute.call_args_list[1].args[0].compile().params
+
+        assert first_called_params["metadata_id"] == 1234
+        assert first_called_params["created_time"] == yesterday
+        assert first_called_params["metadata"] == {"foo": "bar"}
+
+        assert second_called_params["updated_by"] == "test_user"
+        assert second_called_params["updated_time"] is not None
+        assert second_called_params["metadata"] == test_data.metadata
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata(self, mock_fetch_all):
+        test_query = MetadataQuery(
+            category=MetadataCategory.INSTRUMENT,
+            station="BSL",
+            starttime=UTCDateTime(2020, 1, 20),
+        )
+
+        db = Database("sqlite:///:memory:")
+
+        await MetadataDatabaseFactory(database=db).get_metadata(params=test_query)
+
+        mock_fetch_all.assert_called_once()
+
+        called_params = mock_fetch_all.call_args.args[0].compile().params
+
+        assert called_params["category_1"] == "instrument"
+        assert called_params["station_1"] == "BSL"
+        assert called_params["endtime_1"] == datetime.datetime(
+            2020, 1, 20, tzinfo=tz.tzutc()
+        )
-- 
GitLab


From 2b0dc605999f63d3d379d5a38004a16a5358b001 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Mon, 2 Dec 2024 10:13:19 -0700
Subject: [PATCH 14/16] move edge container info here

---
 localdev/docker-compose.yml               |   6 +-
 localdev/geomag-edge/Dockerfile           |  45 ++++++++++
 localdev/geomag-edge/LICENSE.md           |  23 +++++
 localdev/geomag-edge/README.md            |  36 ++++++++
 localdev/geomag-edge/docker-configure.sh  | 103 ++++++++++++++++++++++
 localdev/geomag-edge/docker-entrypoint.sh |  40 +++++++++
 6 files changed, 250 insertions(+), 3 deletions(-)
 create mode 100644 localdev/geomag-edge/Dockerfile
 create mode 100644 localdev/geomag-edge/LICENSE.md
 create mode 100644 localdev/geomag-edge/README.md
 create mode 100644 localdev/geomag-edge/docker-configure.sh
 create mode 100644 localdev/geomag-edge/docker-entrypoint.sh

diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
index 5ffc161e..9d936edf 100644
--- a/localdev/docker-compose.yml
+++ b/localdev/docker-compose.yml
@@ -1,7 +1,7 @@
 version: '3'
 services:
   database:
-    image: mysql:8.0.30
+    image: mysql:5.7
     ports:
       - '3306:3306'
     environment:
@@ -12,14 +12,14 @@ services:
     platform: linux/amd64
 
   edge:
-    build: $EDGE_PATH
+    build: ./geomag-edge
     ports:
       - '7981:7981'
       - '2060:2060'
       - '7974:7974'
       - '2061:2061'
     volumes:
-      - $EDGE_DATA_PATH:/data
+      - ./edge-data:/data
   
   web:
     build:
diff --git a/localdev/geomag-edge/Dockerfile b/localdev/geomag-edge/Dockerfile
new file mode 100644
index 00000000..d75e1abd
--- /dev/null
+++ b/localdev/geomag-edge/Dockerfile
@@ -0,0 +1,45 @@
+# dockerfile to build basic edge/cwb image
+
+FROM usgs/java:8
+
+LABEL maintainer="Jeremy Fee <jmfee@usgs.gov>"
+
+# set up vdl account
+# NOTE: USGS' public ftp server was decommisioned at the end of FY22.
+#       (i.e., ftp://hazards.cr.usgs.gov/CWBQuery/EdgeCWBRelease.tar.gz)
+#       As of January 2023, there is an experimental method to retrieve
+#       EdgeCWBRelease.tar.gz via a Gitlab package registry, which is
+#       what is used below (i.e., curl -O https://code.usgs.gov/...).
+#       -EJR
+RUN yum install -y net-tools sudo \
+    && mkdir /TEMP \
+    && cd /TEMP \
+    && curl -O https://code.usgs.gov/ghsc/neic/edgecwb/edgecwbgroup/edgecwbfiles/-/package_files/7388/download \
+    && mv -f download EdgeCWBRelease.tar.gz \
+    # && curl -O ftp://hazards.cr.usgs.gov/CWBQuery/EdgeCWBRelease.tar.gz \
+    && zcat EdgeCWBRelease.tar.gz | tar xf - \
+    && tar -xvzf EdgeCWBRelease.tar.gz \
+    && tar xf scripts_release.tar \
+    && cd scripts/INSTALL \
+    && ./addAccount vdl \
+    && sudo -u vdl cp /TEMP/EdgeCWBRelease.tar.gz ~vdl \
+    && cd ~vdl \
+    && sudo -u vdl /TEMP/scripts/installCWBRelease.bash \
+    && cp ~vdl/NoDB/DB/*.txt ~vdl/DB/. \
+    && rm -rf /TEMP \
+    && echo 'vdl - memlock 1024' >> /etc/security/limits.conf \
+    && echo 'vdl - stack 81920' >> /etc/security/limits.conf \
+    && echo 'vdl - nproc 20480' >> /etc/security/limits.conf \
+    && echo 'vdl - nofile 8192' >> /etc/security/limits.conf \
+    && mkdir /data \
+    && chown vdl:vdl /data /home/vdl
+
+# add docker entrypoint and configuration scripts
+COPY docker-configure.sh docker-entrypoint.sh /home/vdl/
+RUN chmod a+x /home/vdl/docker-entrypoint.sh /home/vdl/docker-configure.sh
+
+
+USER vdl
+EXPOSE 2060 2061 7974 7981
+WORKDIR /home/vdl
+CMD [ "./docker-entrypoint.sh" ]
diff --git a/localdev/geomag-edge/LICENSE.md b/localdev/geomag-edge/LICENSE.md
new file mode 100644
index 00000000..58fc160d
--- /dev/null
+++ b/localdev/geomag-edge/LICENSE.md
@@ -0,0 +1,23 @@
+Unless otherwise noted, This software is in the public domain because it
+contains materials that originally came from the United States Geological
+Survey, an agency of the United States Department of Interior. For more
+information, see the official USGS copyright policy at
+http://www.usgs.gov/visual-id/credit_usgs.html#copyright
+
+
+Disclaimers
+-----------
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+Information provided by this software may be preliminary or provisional and is
+subject to revision. It is being provided to meet the need for timely best
+science. The information has not received final approval by the U.S. Geological
+Survey (USGS) and is provided on the condition that neither the USGS nor the
+U.S. Government shall be held liable for any damages resulting from the
+authorized or unauthorized use of the information.
diff --git a/localdev/geomag-edge/README.md b/localdev/geomag-edge/README.md
new file mode 100644
index 00000000..ce11a39b
--- /dev/null
+++ b/localdev/geomag-edge/README.md
@@ -0,0 +1,36 @@
+geomag-edge
+=============
+
+A lightweight configuration of an EDGE/CWB node in a container.
+
+
+Running the Docker image
+------------------------
+A version of the image created by this project is available on docker hub.
+```
+docker run -d --name localedge -p 2060:2060 usgs/geomag-edge
+```
+
+
+Building the Docker image
+-------------------------
+From the root of the project, run:
+```
+docker build -t usgs/geomag-edge:latest .
+```
+
+
+Related Projects
+--------------------
+
+- [EDGE/CWB wiki](https://github.com/usgs/edgecwb/wiki)
+
+The Dockerfile downloads the latest release of EDGE/CWB.
+
+- https://github.com/usgs/geomag-algorithms
+
+  Python library to process timeseries data, that can read from and write to an EDGE process.
+
+- https://github.com/usgs/geomag-edge-ws
+
+  JSON and IAGA2002 web service for data stored in EDGE.
diff --git a/localdev/geomag-edge/docker-configure.sh b/localdev/geomag-edge/docker-configure.sh
new file mode 100644
index 00000000..b26054d2
--- /dev/null
+++ b/localdev/geomag-edge/docker-configure.sh
@@ -0,0 +1,103 @@
+#! /bin/bash
+
+
+# only run configuration the first time
+RUN_ONCE=ran-docker-configure
+if [ -f "${RUN_ONCE}" ]; then
+  echo "## Already configured";
+  exit;
+else
+  touch "${RUN_ONCE}"
+fi
+
+
+################################################################################
+# configuration/defaults
+
+# files to write
+EDGE_CONFIG=edge.config
+EDGE_PROP=edge.prop
+EDGEMOM_SETUP=edgemom.setup
+QUERYMOM_SETUP=querymom.setup
+
+# figure out container ip address
+IP=$(ifconfig | sed -En 's/127.0.0.1//;s/.*inet (addr:)?(([0-9]*\.){3}[0-9]*).*/\2/p')
+
+
+################################################################################
+# write edge.config
+echo "## Writing ${EDGE_CONFIG}"
+cat > "${EDGE_CONFIG}" <<-DONE
+Host=$(hostname)
+HostIP=$IP
+PublicIP=$IP
+DONE
+
+
+################################################################################
+# write edge.prop
+echo "## Writing ${EDGE_PROP}"
+cat > "${EDGE_PROP}" <<-DONE
+Station=DOCKER
+Network=DOCKER
+Node=1
+Instance=1
+
+DBServer=NoDB
+StatusDBServer=NoDB
+MetaDBServer=NoDB
+MySQLServer=NoDB
+AlarmIP=localhost
+StatusServer=localhost
+
+ndatapath=1
+daysize=3001
+extendsize=2000
+ebqsize=20000
+nday=10000000
+datapath=/data/
+logfilepath=/home/vdl/log/
+
+emailTo=
+SMTPFrom=
+SNWServer=
+SNWPort=0
+DONE
+
+
+################################################################################
+# write edgemom.setup
+
+echo "## Writing ${EDGEMOM_SETUP}"
+cat > "${EDGEMOM_SETUP}" <<-DONE
+Mom:EdgeMom:-empty >>edgemom
+Echn:EdgeChannelServer:-nodb >>echn
+# Load:MiniSeedServer:-nohydra -noudpchan -p 7965 >>load
+Replace:MiniSeedServer:-nohydra -noudpchan -p 7974 >>replace
+RawInput:RawInputServer:-nohydra -p 7981 -rsend 100 >>rawinput
+DONE
+
+
+################################################################################
+# write querymom.setup
+
+echo "## Writing ${QUERYMOM_SETUP}"
+cat > "${QUERYMOM_SETUP}" <<-DONE
+Mom:EdgeMom:-empty >>querymom
+Echn:EdgeChannelServer:-nodb >>echnqm
+QS:gov.usgs.cwbquery.EdgeQueryServer:-allowrestricted -mdsport 0 >>queryserver
+CWBWS:gov.usgs.anss.waveserver.CWBWaveServer:-allowrestricted -daysback 10000 -maxthreads 500 -mdsport 0 -nodb -p 2060 -nofilter -queryall >>cwbws
+DLQS:gov.usgs.cwbquery.DataLinkToQueryServer:-empty >>dlqs
+QSC:gov.usgs.cwbquery.QuerySpanCollection:-d 86400 -bands * >>qsc
+DONE
+
+
+################################################################################
+# Have EDGE process config files
+# NOTE: this overwrites the files above and so is commented out
+#
+#echo "## Running NoDBConfig"
+#java -cp ~vdl/bin/EdgeCWB.jar gov.usgs.anss.edgemom.NoDBConfig \
+#    -config "${EDGE_CONFIG}" \
+#    -once \
+#    -prop "${EDGE_PROP}"
diff --git a/localdev/geomag-edge/docker-entrypoint.sh b/localdev/geomag-edge/docker-entrypoint.sh
new file mode 100644
index 00000000..89462850
--- /dev/null
+++ b/localdev/geomag-edge/docker-entrypoint.sh
@@ -0,0 +1,40 @@
+#! /bin/bash
+
+
+# bash normally ignores SIGTERM
+_term () {
+  echo '## Caught SIGTERM, stopping edge processes'
+  kill -TERM "$alarm" "$edgemom" "$querymom"
+}
+trap _term SIGTERM
+
+
+################################################################################
+# configure environment
+
+source ~vdl/.bashrc
+
+echo '## Running docker-configure'
+./docker-configure.sh
+
+
+################################################################################
+# start edge processes
+
+echo '## Starting alarm'
+java -jar bin/EdgeCWB.jar alarm '^' 128 -alarm -noaction -nocfg -nodb &
+alarm=$!
+
+echo '## Starting edgemom'
+java -jar bin/EdgeCWB.jar edgemom '1#1' 499 -max -f edgemom.setup &
+edgemom=$!
+
+echo '## Starting querymom'
+java -jar bin/EdgeCWB.jar querymom '1#1' 161 -f querymom.setup &
+querymom=$!
+
+
+################################################################################
+# wait for edge processes to exit
+
+wait "$alarm" "$edgemom" "$querymom"
-- 
GitLab


From 06b21cb1de8fa0bbe381bca6407abf5a5bf8b899 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Mon, 2 Dec 2024 10:52:33 -0700
Subject: [PATCH 15/16] added cleanup script and cleaned up docs

---
 localdev/cleanup-docker.sh    | 12 ++++++++++++
 localdev/docker-compose.yml   |  4 ++--
 localdev/local_development.md | 15 ++++-----------
 3 files changed, 18 insertions(+), 13 deletions(-)
 create mode 100755 localdev/cleanup-docker.sh

diff --git a/localdev/cleanup-docker.sh b/localdev/cleanup-docker.sh
new file mode 100755
index 00000000..1c245406
--- /dev/null
+++ b/localdev/cleanup-docker.sh
@@ -0,0 +1,12 @@
+#!/bin/sh -e
+
+# script to completely clean up all docker resources
+docker stop $(docker ps -aq)
+
+docker rm -vf $(docker ps -aq)
+
+docker rmi -f $(docker images -aq)
+
+# this command will ask if you are sure if you want to continue. the benefit of this command is to free up space by removing cached objects, 
+# but it does mean that the next time you run docker-compose up, it will take longer.
+docker system prune --volumes
diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
index 9d936edf..d68cee8c 100644
--- a/localdev/docker-compose.yml
+++ b/localdev/docker-compose.yml
@@ -32,10 +32,10 @@ services:
       - host.docker.internal:host-gateway
     environment:
       - DATA_HOST=host.docker.internal
-      - DATABASE_URL=$DATABASE_URL
       - OPENID_CLIENT_ID=$OPENID_CLIENT_ID
       - OPENID_CLIENT_SECRET=$OPENID_CLIENT_SECRET
-      - OPENID_METADATA_URL=$OPENID_METADATA_URL
+      - DATABASE_URL=mysql://root:password@host.docker.internal:3306/geomag_operations
+      - OPENID_METADATA_URL=https://code.usgs.gov/.well-known/openid-configuration
       - SECRET_KEY=$SECRET_KEY
       - SECRET_SALT=$SECRET_SALT
       - WEBSERVICE=false
diff --git a/localdev/local_development.md b/localdev/local_development.md
index ec84029c..ae659a92 100644
--- a/localdev/local_development.md
+++ b/localdev/local_development.md
@@ -1,22 +1,16 @@
 # Local Development
 
 ## Background
-This workflow can replace the docker-compose in geomag-stacks. This docker-compose uses the existing Dockerfile to create a `localdev-web-1` container, instead of pulling down the latest geomag-algorithms image. However, it is initializing the mysql database the same way it is done in geomag-stacks. This docker-compose is also creating a mysql container called `localdev-database-1` an edge container called `localdev-edge-1` using the geomag-edge Dockerfile in geomag-stacks. The geomag-edge folder was not moved out of geomag-stacks out of an abudance of caution to prevent internal information from being revealed.
+This docker-compose uses the main Dockerfile to create a `localdev-web-1` container and creates a mysql container called `localdev-database-1` and an edge container called `localdev-edge-1`.
 
 ## Prerequisites
 For mac development:
 1. Download colima using homebrew.
 2. Start colima and edit virtual machine sizes using `colima start --edit`. Increase the number of CPUs to 16 and the memory to 16.
-3. Add the following env vars to your .bashrc or .zshrc. Replace the EDGE_PATH and 
-EDGE_DATA_PATH with the absolute path to your geomag-stack. See the instructions in metadata_webservice.md for instructions on creating the OPENID_CLIENT_ID and
-OPENID_CLIENT_SECRET. The rest of the values can be found in the geomag-stack docker-compose. Run the source command to pick up the new values (ex `source ~/.bashrc`).
+3. Add the following env vars to your .bashrc or .zshrc or an .env file. See the instructions [here](../docs/metadata_webservice.md) for instructions on creating the OPENID_CLIENT_ID and OPENID_CLIENT_SECRET. The rest of the values can be set to strings of your choice. If you set the values in your .bashrc or .zshrc, run the source command to pick up the new values (ex `source ~/.bashrc`).
 ```
-export EDGE_PATH="/Users/{user}/geomag-stack/geomag-edge"
-export EDGE_DATA_PATH="/Users/{user}/geomag-stack/edge-data"
 export OPENID_CLIENT_ID=""
 export OPENID_CLIENT_SECRET=""
-export DATABASE_URL=""
-export OPENID_METADATA_URL=""
 export SECRET_KEY=""
 export SECRET_SALT=""
 export MYSQL_ROOT_PASSWORD=""
@@ -38,8 +32,7 @@ docker-compmose up -d
 
 `docker ps -a` shows all the containers, even ones that have exited. This is helpful to see logs for containers that have unexpectedly exited.
 
-`docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now
-you can use SQL to explore the db.
+`docker exec -it {CONTAINER ID} /bin/bash` execs into the container. This can be helpful for exploring the mysql db. After you exec into the mysql container, run `mysql -u root -p` in the container. Input the password you set to MYSQL_ROOT_PASSWORD. Now you can use SQL to explore the db.
 
 ## Helpful Tips
-Sometimes the mysql container exits immediately with code 139 due to insufficient memory. Restarting colima resolves this issue.
+Sometimes the mysql container exits immediately with code 139 due to insufficient memory. You can clean up your docker resources using the `cleanup-docker.sh` script in this folder. Restarting colima can also resolve this issue.
-- 
GitLab


From 49825c5eb8cd235ae8bc8c2149a23792c6deb6fd Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Wed, 4 Dec 2024 12:43:30 -0700
Subject: [PATCH 16/16] made requested documentation changes

---
 README.md                   | 29 ++++++++++++++++++-
 docs/develop.md             |  3 ++
 docs/install.md             |  2 +-
 docs/metadata_webservice.md | 56 +++++++++++++++++++++++++++++++++++--
 4 files changed, 86 insertions(+), 4 deletions(-)

diff --git a/README.md b/README.md
index 436c9247..9de81eed 100644
--- a/README.md
+++ b/README.md
@@ -81,7 +81,34 @@ output_factory.write_file(
 
 ### Docker
 
-Docker is the simplest install option. Find instructions [here](./localdev/local_development.md).
+Docker is the simplest install option.
+
+1. Create and start a new container
+
+   named `geomagio`,
+   listening on local port `8000`,
+   from the image `usgs/geomag-algorithms` on docker hub
+
+   ```
+   docker run -dit --name geomagio -p 8000:8000 usgs/geomag-algorithms
+   ```
+
+2. Use the running container
+
+- Run the `geomag.py` command line interface:
+
+  > To work with files outside the container,
+  > use a volume mount when starting the container
+
+  ```
+  docker exec -it geomagio geomag.py
+  ```
+
+- Or, to run an interactive python prompt:
+
+  ```
+  docker exec -it geomagio python
+  ```
 
 ## Algorithms
 
diff --git a/docs/develop.md b/docs/develop.md
index 932d5ed7..b9985919 100644
--- a/docs/develop.md
+++ b/docs/develop.md
@@ -111,3 +111,6 @@ https://black.readthedocs.io/en/stable/the_black_code_style
   Resolve any rebase conflicts.
   If you have already pushed this branch to your fork, you *may* need to force push
   because branch history has changed.
+
+## Local Development
+After you get everything installed, you can find further instructions on local development [here](../localdev/local_development.md).
diff --git a/docs/install.md b/docs/install.md
index 7e5e1fd7..1bc684aa 100644
--- a/docs/install.md
+++ b/docs/install.md
@@ -1,7 +1,7 @@
 # Installation
 This document describes installation instructions for users. For those that wish to modify or develop custom code within **geomag-algorithms**, or prefer to install the Docker container, please see the following documents:
 > - [Develop](./develop.md) provides installation instruction for developers.
-> - [Docker](../localdev/local_development.md) describes Docker container installation and usage.
+> - [Docker](./install_docker.md) describes Docker container installation and usage.
 
 ## Requirements
 
diff --git a/docs/metadata_webservice.md b/docs/metadata_webservice.md
index 23600254..9c0b5141 100644
--- a/docs/metadata_webservice.md
+++ b/docs/metadata_webservice.md
@@ -1,5 +1,41 @@
 # Running the Metadata Webservice Locally
 
+## Run mysql in a container (for local development)
+
+```
+docker run --rm --name mysql-db -e MYSQL_ROOT_PASSWORD=password -p 3306:3306 mysql:5.7
+```
+
+This exposes port 3306 so python can connect locally. When running the webservice in a container, container links should be used so the container can access the database container.
+
+## Set up schema in database
+
+> This is only needed the first time the database is created. Volume mounts can make this more persistent.
+
+```
+export DATABASE_URL=mysql://root:password@localhost/geomag_operations
+```
+
+### Create mysql database
+```
+docker exec -it mysql-db mysql -uroot -ppassword
+```
+> Inside mysql container:
+```
+CREATE DATABASE geomag_operations;
+exit
+```
+
+```
+poetry run python create_db.py
+```
+
+### Add some testing data (depends on DATABASE_URL environment set above).
+
+```
+poetry run python test_metadata.py
+```
+
 ## Set up OpenID application in code.usgs.gov.
 
 - Under your account, go to settings
@@ -16,5 +52,21 @@
 
   Scopes: `openid`, `profile`, `email`
 
-## Running with Docker
-[local_development.md](../localdev/local_development.md) describes Docker container installation and usage.
+## Start webservice
+
+- Export variables used for authentication:
+
+```
+export DATABASE_URL=mysql://root:password@localhost/geomag_operations
+export OPENID_CLIENT_ID={Application ID}
+export OPENID_CLIENT_SECRET={Secret}
+export OPENID_METADATA_URL=https://code.usgs.gov/.well-known/openid-configuration
+export SECRET_KEY=changeme
+export SECRET_SALT=salt
+```
+
+- Run app
+
+```
+poetry run uvicorn geomagio.api:app
+```
-- 
GitLab