Skip to content

Commit

Permalink
Api (#125)
Browse files Browse the repository at this point in the history
* Added functions to load existing StreamCat ftp csv tables to the StreamCat API database

* fix to populate tables function

* Added function to list published / unpublished metrics in API

* Updated delete table function and control table

* updates to API

* updates for StreamCat_API.py and ControlTable

* updated control table

* adjusting metric metadata info

* channel width additions
  • Loading branch information
mhweber authored Mar 24, 2023
1 parent 8c7a3ab commit 63b5b63
Show file tree
Hide file tree
Showing 2 changed files with 135 additions and 4 deletions.
6 changes: 3 additions & 3 deletions ControlTable_StreamCat.csv
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,8 @@ MTBS_2015,Continuous,MTBS_2015,none,MTBS_2015.tif,,MTBS,Mean,100,0,0,0,,Yes,,Yes
MTBS_2016,Continuous,MTBS_2016,none,MTBS_2016.tif,,MTBS,Mean,100,0,0,0,,Yes,,Yes
MTBS_2017,Continuous,MTBS_2017,none,MTBS_2017.tif,,MTBS,Mean,100,0,0,0,,Yes,,Yes
MTBS_2018,Continuous,MTBS_2018,none,MTBS_2018.tif,,MTBS,Mean,100,0,0,0,,Yes,,Yes
MTBS_Severity_1984,Categorical,lookup/MTBS_severity1984_lookup.csv,none,MTBS_Severity_1984.tif,,MTBS_Severity_1984,Percent,1,0,0,0,,Yes,,Yes
MTBS_Severity_1985,Categorical,lookup/MTBS_severity1985_lookup.csv,none,MTBS_Severity_1985.tif,,MTBS_Severity_1985,Percent,1,0,0,0,,Yes,,Yes
MTBS_Severity_1984,Categorical,lookup/MTBS_severity1984_lookup.csv,none,MTBS_Severity_1984.tif,,MTBS_Severity_1984,Percent,1,0,0,0,,Yes,,
MTBS_Severity_1985,Categorical,lookup/MTBS_severity1985_lookup.csv,none,MTBS_Severity_1985.tif,,MTBS_Severity_1985,Percent,1,0,0,0,,Yes,,
MTBS_Severity_1986,Categorical,lookup/MTBS_severity1986_lookup.csv,none,MTBS_Severity_1986.tif,,MTBS_Severity_1986,Percent,1,0,0,0,,Yes,,
MTBS_Severity_1987,Categorical,lookup/MTBS_severity1987_lookup.csv,none,MTBS_Severity_1987.tif,,MTBS_Severity_1987,Percent,1,0,0,0,,Yes,,
MTBS_Severity_1988,Categorical,lookup/MTBS_severity1988_lookup.csv,none,MTBS_Severity_1988.tif,,MTBS_Severity_1988,Percent,1,0,0,0,,Yes,,
Expand Down Expand Up @@ -293,7 +293,7 @@ RdDens_RipBuf100,Continuous,RdDens,Rp100,roadden.tif,,RoadDensityRipBuf100,Mean,
RdCrs,Continuous,RdCrs,none,rdstcrs.tif,SlpWtd,RoadStreamCrossings,Density,0.03,0,0,0,,Yes,,Yes
RockN,Continuous,RockN,none,RockN_USA_USGSproj_1km_kgkm2.tif,,RockN,Mean,1,0,0,0,,Yes,,Yes
Runoff,Continuous,Runoff,none,runoff.tif,,Runoff,Mean,1,0,0,0,,Yes,,Yes
Septic,Continuous,Septic,none,septic1990.tif,,Septic,Mean,0.001,0,0,0,,Yes,,Yes
Septic,Continuous,Septic,none,septic1990.tif,,Septic,Mean,1,0,0,0,,Yes,,Yes
Clay,Continuous,Clay,none,clay.tif,,STATSGO_Set1,Mean,0.01,0,0,0,,Yes,,Yes
Sand,Continuous,Sand,none,sand.tif,,STATSGO_Set1,Mean,0.01,0,0,0,,Yes,,Yes
Om,Continuous,Om,none,om.tif,,STATSGO_Set2,Mean,0.01,0,0,0,,Yes,,Yes
Expand Down
133 changes: 132 additions & 1 deletion StreamCat_API.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from datetime import datetime as dt
import csv
import urllib3
from io import StringIO
from bs4 import BeautifulSoup

def DBtablesAsDF(config_file):
Expand All @@ -39,6 +40,7 @@ def DBtablesAsDF(config_file):
df=pd.DataFrame.from_records(json_object)
return df


def ViewDBtable(config_file, table):
"""
__author__ = "Marc Weber <[email protected]>"
Expand All @@ -59,8 +61,34 @@ def ViewDBtable(config_file, table):
headers=config.defaults(),
verify=False
)
#json_object = json.loads(r.text)
#df=pd.DataFrame.from_records(json_object)
#return df
pprint(json.loads(r.text))

def ViewMetadatatable(config_file, table):
"""
__author__ = "Marc Weber <[email protected]>"
"Rick Debbout <[email protected]>"
List table info for a specific StreamCat API database
table, using a config file that contains db url,username,
password, and server
Arguments
---------
config_file : configuration file with db configuration parameters
table : database table name
"""
config = configparser.ConfigParser()
config.read(config_file)
r = requests.get(
f"{config['server']['URL']}/StreamCat/admin/manage/tables/{table}/variable_info",
headers=config.defaults(),
verify=False
)
df = pd.read_csv(StringIO(r.content.decode("utf8" , errors="ignore")))
return(df)


def DeleteDBtable(config_file, table, just_data=False):
"""
Expand Down Expand Up @@ -145,6 +173,7 @@ def PopulateDBtable(config_file, table, file_loc, temp_file):

counter=0
for file in files:
print(file)
infile = file_loc + '/' + file
df = pd.read_csv(infile)
counter+=len(df)
Expand Down Expand Up @@ -224,15 +253,45 @@ def MissingAPImetrics(config_file):
return(published, not_published)


def UpdateMetricMetadata(config_file, table, infile, temp_file):
"""
__author__ = "Marc Weber <[email protected]>"
Modify metric information in the StreamCat API database,
using a config file that contains db url,username, password,
and server. You must provide a fresh list of all variable
info entries for this resource, as it clears out the
existing list and substitutes a new one.
Arguments
---------
config : configuration file with db configuration parameters
table : name of table metadata to load
infile : pandas data frame from ViewMetadatatable function
"""

config = configparser.ConfigParser()
config.read(config_file)
requests.urllib3.disable_warnings()
infile.to_csv(f'{temp_file}', index=False)
filedata = open(f'{temp_file}', "rb")
response = requests.put(f"{config['server']['URL']}/StreamCat/admin/manage/tables/{table}/variable_info",
headers=config.defaults(), verify=False, data=filedata)
return(response)

###############

# Define config file
config_file='E:/GitProjects/NARS/NARS/api/api_config.ini'
# config_file='E:/GitProjects/NARS/NARS/api/api_config_postgres.ini'

# List tables
test = DBtablesAsDF(config_file)
test.head()
test.tail()
test['DSNAME'][0:20]
test['DSNAME'][21:40]
test['DSNAME'][41:60]
test['DSNAME'][61:70]
test['DSNAME'][61:80]
test['DSNAME'][81:120]
# View a particular table
Expand All @@ -244,6 +303,7 @@ def MissingAPImetrics(config_file):
# Delete a tables
DeleteDBtable(config_file, table, just_data =True)
# DeleteDBtable(config_file, table, just_data =False)

# Create a table
test = CreateDBtable(config_file, table_params)
print(test)
Expand All @@ -255,10 +315,17 @@ def MissingAPImetrics(config_file):
# table='GeoChemPhys2'
table='ImperviousSurfacesRipBuf100'
table='RoadDensityRipBuf100'
table='Dams'
table='predicted_channel_widths_depths'
# file_loc='O:/PRIV/CPHEA/PESD/COR/CORFILES/Geospatial_Library_Projects/StreamCat/FTP_Staging/HydroRegions'
file_loc='O:/PRIV/CPHEA/PESD/COR/CORFILES/Geospatial_Library_Projects/StreamCat/predicted-values/widths-depths_v2'
temp_file='E:/WorkingData/junk.csv'

table='MTBS'
table='WWTP'
file_loc='O:/PRIV/CPHEA/PESD/COR/CORFILES/Geospatial_Library_Projects/StreamCat/FTP_Staging/HydroRegions'
temp_file='E:/WorkingData/junk2.csv'

LoadTime = dt.now()
PopulateDBtable(config_file, table, file_loc, temp_file)
print("Table load complete in : " + str(dt.now() - LoadTime))
Expand All @@ -277,6 +344,51 @@ def MissingAPImetrics(config_file):
published, unpublished = MissingAPImetrics(config_file)


# View metadata for a table
table = 'WWTP'
df = ViewMetadatatable(config_file, table)
df.loc[df.METRIC_NAME == 'WWTPAllDens[AOI]', 'METRIC_UNITS'] = "number/ km2"

# Read in .csv file of metadata
table='predicted_channel_widths_depths'
met = pd.read_csv('O:/PRIV/CPHEA/PESD/COR/CORFILES/Geospatial_Library_Projects/StreamCat/MetaData/StreamCatMetrics.csv', encoding='cp1252')
met = met.loc[met['final_table'] == table]
met.columns = met.columns.str.upper()
met = met[df.columns]
temp_file='E:/WorkingData/junk.csv'
UpdateMetricMetadata(config_file, table, met, temp_file)

# Update metadata for a table
# make any adjustments to metrics in table and update
df['SOURCE_URL'].values[0]
df['SOURCE_URL'] = 'https://nadp.slh.wisc.edu/maps-data/ntn-gradient-maps/'
df['SOURCE_URL'].values[0]

# View metadata for a table
table = 'RefStreamTempPred'
df = ViewMetadatatable(config_file, table)

# Update metadata for a table
# make any adjustments to metrics in table and update
df['SOURCE_URL'].values[0]
df['SOURCE_URL'] = 'https://enviroatlas.epa.gov/enviroatlas/DataFactSheets/pdf/Supplemental/PotentialWetlandArea.pdf'
df['SOURCE_URL'].values[0]
temp_file='E:/WorkingData/junk.csv'
UpdateMetricMetadata(config_file, table, df, temp_file)




table_params = {"name": "predicted_channel_widths_depths",
"metrics":[{"name": "wetted_width_m", "display_name": "Predicted wetted width"},
{"name": "thalweg_depth_cm", "display_name": "Predicted Thalweg Depth"},
{"name": "bankfull_width_m", "display_name": "Predicted Bankfull Widthy"},
{"name": "bankfull_depth_m", "display_name": "Predicted Bankfull Depth"}],
"columns": [{"name": "CatPctFull", "type": "number"},{"name": "WsPctFull", "type": "number"},
{"name": "wetted_width_m", "type": "number"},{"name": "thalweg_depth_cm","type": "number"},
{"name": "bankfull_width_m", "type": "number"},{"name": "bankfull_depth_m","type": "number"},]}


table_params = {"name": "AgMidHiSlopes2011",
"metrics":[{"name": "PctAg2011Slp10Cat", "display_name": "Percent of Agriculture on 10% Slope"},
{"name": "PctAg2011Slp20Cat", "display_name": "Percent of Agriculture on 20% Slope"}],
Expand Down Expand Up @@ -314,6 +426,24 @@ def MissingAPImetrics(config_file):
{"name": "WWTPAllDensCat", "type": "number"},{"name": "WWTPAllDensWs","type": "number"},]}



table_params = {"name": "MTBS_Severity_1984",
"metrics":[{"name": "pctnofire1984", "display_name": "Percent No Fire Burn Class For Year"},
{"name": "pctundsev1984", "display_name": "Percent Underburned to Low Burn Severity Class For Year"},
{"name": "pctlowsev1984", "display_name": "Percent Low Burn Severity Class For Year"},
{"name": "pctmodsev1984", "display_name": "Percent Moderate Burn Severity Class For Year"},
{"name": "pcthighsev1984", "display_name": "Percent High Burn Severity Class For Year"},
{"name": "pctincvegresp1984", "display_name": "Percent Increased Greenness and Veg Response Class For Year"},
{"name": "pctnonprocmask1984", "display_name": "Percent Non Processing Mask Class For Year"}],
"columns": [{"name": "CatPctFull", "type": "number"},{"name": "WsPctFull", "type": "number"},
{"name": "PctNoFireCat1984Cat", "type": "number"},{"name": "PctNoFire1984Ws","type": "number"},
{"name": "PctUndSev1984Cat", "type": "number"},{"name": "PctUndSev1984Ws","type": "number"},
{"name": "PctLowSev1984Cat", "type": "number"},{"name": "PctLowSev1984Ws","type": "number"},
{"name": "PctModSev1984Cat", "type": "number"},{"name": "PctModSev1984Ws","type": "number"},
{"name": "PctHighSev1984Cat", "type": "number"},{"name": "PctHighSev1984Ws","type": "number"},
{"name": "PctIncVegResp1984Cat", "type": "number"},{"name": "PctIncVegResp1984Ws","type": "number"},
{"name": "PctNonProcMask1984Cat", "type": "number"},{"name": "PctNonProcMask1984Ws","type": "number"}]}

table_params = {"name": "MTBS_Severity_2018",
"metrics":[{"name": "pctnofire2018", "display_name": "Percent No Fire Burn Class For Year"},
{"name": "pctundsev2018", "display_name": "Percent Underburned to Low Burn Severity Class For Year"},
Expand Down Expand Up @@ -474,4 +604,5 @@ def MissingAPImetrics(config_file):
{"name": "PctUrbLo2019CatRp100", "type": "number"},{"name": "PctUrbLo2019WsRp100","type": "number"},
{"name": "PctUrbMd2019CatRp100", "type": "number"},{"name": "PctUrbMd2019WsRp100","type": "number"},
{"name": "PctUrbOp2019CatRp100", "type": "number"},{"name": "PctUrbOp2019WsRp100","type": "number"},
{"name": "PctWdWet2019CatRp100", "type": "number"},{"name": "PctWdWet2019WsRp100","type": "number"}]}
{"name": "PctWdWet2019CatRp100", "type": "number"},{"name": "PctWdWet2019WsRp100","type": "number"}]}

0 comments on commit 63b5b63

Please sign in to comment.