Please consider responding to a brief user needs survey.

Responding to review. Not tested.

parent 16dcdcdf
This diff is collapsed.
......@@ -378,7 +378,6 @@ def report_fc_values(df, field_defs, xmlfile, fill=-99999):
if not found:
extra_flds += [fld]
# Initialize values
attrxmlstr = """<attr><attrlabl>{}</attrlabl><attrdef>{}</attrdef><attrdefs>{}</attrdefs><attrdomv><rdom><rdommin>{}</rdommin><rdommax>{}</rdommax><attrunit>{}</attrunit></rdom></attrdomv></attr>"""
na = ''
fills = ''
eainfo = ''
......@@ -389,8 +388,9 @@ def report_fc_values(df, field_defs, xmlfile, fill=-99999):
if fld not in field_defs:
continue
ptscol = df[fld]
attr_vals = field_defs[fld]
# Range domain fields
if not 'type' in field_defs[fld].keys():
if not 'type' in attr_vals.keys():
if any(ptscol.isnull()):
na = 'Nulls present'
ptscol = ptscol[~ptscol.isnull()]
......@@ -420,15 +420,43 @@ def report_fc_values(df, field_defs, xmlfile, fill=-99999):
mx = 'xxx'
pass
print('{:_<20}{:_>20} | {:_<20} {:_<15}{:_>10}'.format(fld, mn, mx, fills, na))
eainfo += attrxmlstr.format(fld, field_defs[fld]['def'], field_defs[fld]['defs'], mn, mx, field_defs[fld]['unit'])
if not 'unit' in attr_vals:
attrxmlstr_nounit = """<attr>
<attrlabl>{}</attrlabl>
<attrdef>{}</attrdef>
<attrdefs>{}</attrdefs>
<attrdomv>
<rdom>
<rdommin>{}</rdommin>
<rdommax>{}</rdommax>
</rdom>
</attrdomv>
</attr>
"""
eainfo += attrxmlstr_nounit.format(fld, attr_vals['def'], attr_vals['defs'], mn, mx)
else:
attrxmlstr = """<attr>
<attrlabl>{}</attrlabl>
<attrdef>{}</attrdef>
<attrdefs>{}</attrdefs>
<attrdomv>
<rdom>
<rdommin>{}</rdommin>
<rdommax>{}</rdommax>
<attrunit>{}</attrunit>
</rdom>
</attrdomv>
</attr>
"""
eainfo += attrxmlstr.format(fld, attr_vals['def'], attr_vals['defs'], mn, mx, attr_vals['unit'])
# Enumerated domain fields
elif field_defs[fld]['type'] == 'enumerated':
elif attr_vals['type'] == 'enumerated':
# for val in ptscol.unique():
# eavals += enum_xml.format(val, field_defs['enum_domain'][val], field_defs['producer'])
print('{:.<20} {:}'.format(fld, ' | '.join((x) for x in sorted(ptscol.astype(str).unique()))))
eainfo += '<attr>xxx{}xxx</attr>'.format(fld)
# Unique domain fields
elif field_defs[fld]['type'] == 'unique':
elif attr_vals['type'] == 'unique':
print('{:.<20} {:}'.format(fld, 'type = UNIQUE'))
eainfo += '<attr>xxx{}xxx</attr>'.format(fld)
......
......@@ -174,7 +174,18 @@
"\n",
"# Copy feature class to dataframe.\n",
"trans_df = fwa.FCtoDF(extendedTrans, id_fld=tID_fld, extra_fields=extra_fields)\n",
"\n",
"# Set capitalization of fields to match expected\n",
"colrename = {}\n",
"for f in sorted_pt_flds:\n",
" for c in trans_df.columns:\n",
" if f.lower() == c.lower():\n",
" colrename[c] = f\n",
"trans_df.rename(index=str, columns=colrename, inplace=True)\n",
"\n",
"# Set DD_ID, MHW, and Azimuth fields\n",
"trans_df['DD_ID'] = trans_df[tID_fld] + sitevals['id_init_val']\n",
"trans_df['MHW'] = sitevals['MHW']\n",
"trans_df.drop('Azimuth', axis=1, inplace=True, errors='ignore')\n",
"trans_df.rename_axis({\"BEARING\": \"Azimuth\"}, axis=1, inplace=True)\n",
"\n",
......@@ -191,6 +202,21 @@
"trans_df.head()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"colrename = {}\n",
"for f in sorted_pt_flds:\n",
" for c in trans_df.columns:\n",
" if f.lower() == c.lower():\n",
" colrename[c] = f\n",
"\n",
"trans_df.rename(index=str, columns=colrename, inplace=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
......@@ -858,6 +884,55 @@
"pts_df.sample(5)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Change number of significant digits\n",
"\n",
"- Elevations: 2 significant digits \n",
"- Lengths: 1 (lidar has 15-30 cm resolution in vertical and 1-m-ish in horizontal I believe)\n",
"- UTM coords: 2\n",
"- lat longs: 6\n",
"- LRR: 2\n",
"\n",
"Would be nice if fill value was always integer (not -99999.0 etc.)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Round fields to given significant digits\n",
"fprecision = {# UTM coordinates\n",
" 'seg_x':2, 'seg_y':2, 'SL_x':2, 'SL_y':2, 'DL_x':2, 'DL_y':2,\n",
" 'DH_x':2, 'DH_y':2, 'DL_snapX':2, 'DL_snapY':2, 'DH_snapX':2, 'DH_snapY':2, \n",
" 'Arm_x':2, 'Arm_y':2,\n",
" # Geographic coordinates\n",
" 'seg_lon':6, 'seg_lat':6,\n",
" # Elevations\n",
" 'ptZ':2, 'ptZmhw':2, 'DL_z':2, 'DL_zmhw':2, 'DH_z':2, 'DH_zmhw':2, \n",
" 'Arm_z':2, 'Arm_zmhw':2, 'uBH':2, 'mean_Zmhw':2, 'max_Zmhw':2,\n",
" # Lengths\n",
" 'Dist_Seg':1, 'Dist_MHWbay':1, 'DistSegDH':1, 'DistSegDL':1, 'DistSegArm':1,\n",
" 'DistDH':1, 'DistDL':1, 'DistArm':1,'Dist2Inlet':1, 'WidthPart':1, 'WidthLand':1, 'WidthFull':1, 'uBW':1,\n",
" # LRR\n",
" 'LRR':2,\n",
" # IDs\n",
" 'SplitSort':0, 'sort_ID':0, 'TRANSECTID':0, 'TRANSORDER':0, 'DD_ID':0,\n",
" # Other\n",
" 'Azimuth':1,'Bslope':4,'ptSlp':4,\n",
" # Coded classifications\n",
" 'GeoSet':0, 'SubType':0, 'VegDens':0, 'VegType':0,\n",
" 'Construction':0, 'Development':0, 'Nourishment':0}\n",
"pts_df = pts_df.round(fprecision)\n",
"\n",
"# Set GeoSet, SubType, VegDens, VegType fields to int32 dtypes\n",
"pts_df = pts_df.astype({'GeoSet':'int32', 'SubType':'int32', 'VegDens':'int32', 'VegType':'int32'})"
]
},
{
"cell_type": "markdown",
"metadata": {},
......@@ -1044,7 +1119,7 @@
"outputs": [],
"source": [
"# Create transect file with only ID values and geometry to publish.\n",
"trans_flds = ['TRANSECTID', 'TRANSORDER', 'DD_ID']\n",
"trans_flds = ['TRANSECTID', 'TRANSORDER', 'DD_ID', 'MHW']\n",
"for i, f in enumerate(trans_flds):\n",
" for c in trans_df.columns:\n",
" if f.lower() == c.lower():\n",
......@@ -1077,7 +1152,7 @@
"outputs": [],
"source": [
"# Create transect FC with fill values - Join values from trans_df to the transect FC as a new file.\n",
"trans_fc = fwa.JoinDFtoFC(trans_df, extendedTrans, tID_fld, out_fc=trans_name+'_fill')\n",
"trans_fc = fwa.JoinDFtoFC_2(trans_df, extendedTrans, tID_fld, out_fc=trans_name+'_fill')\n",
"\n",
"# Create transect FC with null values\n",
"fwa.CopyFCandReplaceValues(trans_fc, fill, None, out_fc=trans_name+'_null', out_dir=home)\n",
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment