-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLDMpedons_Extract_LDMPedons_from_SDA.py
287 lines (212 loc) · 10.5 KB
/
LDMpedons_Extract_LDMPedons_from_SDA.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
#-------------------------------------------------------------------------------
# Name: Extract_LDM_Pedons_from_SDA.py
# Purpose: Download NCSS Lab Data Mart Pedons from Soil Data Access
# Compliance process.
# Author: Adolfo.Diaz
# GIS Specialist
# National Soil Survey Center
# USDA - NRCS
# e-mail: [email protected]
# phone: 608.662.4422 ext. 216
# ==========================================================================================
# Updated 2/12/2021 - Adolfo Diaz
# - Hydric Classification Presence was added as an additional optional interpreation.
# - Added the simple soils geometry as a layer.
# - Added 'datetime' as a library to be imported; not sure why it was functioning
# correclty without it.
# - replaced the reserved python keyword 'property' with soilProperty
# - slightly updated the metadata description
# ==========================================================================================
# Updated 4/7/2021 - Adolfo Diaz
# - Changed Hydric Rating interpretation from dominant condition to component frequency.
# This uses Jason Nemecek's approach
# - Added URL to access Ecological Site Descriptions in EDIT database
# - Added SSURGO Metadata for spatial and tabular version to final output.
#-------------------------------------------------------------------------------
# ==============================================================================================================================
def AddMsgAndPrint(msg, severity=0):
# prints message to screen if run as a python script
# Adds tool message to the geoprocessor
#
#Split the message on \n first, so that if it's multiple lines, a GPMessage will be added for each line
try:
print(msg)
#for string in msg.split('\n'):
#Add a geoprocessing message (in case this is run as a tool)
if severity == 0:
arcpy.AddMessage(msg)
elif severity == 1:
arcpy.AddWarning(msg)
elif severity == 2:
arcpy.AddError("\n" + msg)
except:
pass
# ==============================================================================================================================
def errorMsg():
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
theMsg = "\t" + traceback.format_exception(exc_type, exc_value, exc_traceback)[1] + "\n\t" + traceback.format_exception(exc_type, exc_value, exc_traceback)[-1]
if theMsg.find("exit") > -1:
AddMsgAndPrint("\n\n")
pass
else:
AddMsgAndPrint(theMsg,2)
except:
AddMsgAndPrint("Unhandled error in unHandledException method", 2)
pass
# ==============================================================================================================================
def getSDATabularRequest(sqlQuery,layerPath):
# Description
# This function sends an SQL query to Soil Data Access and appends the results to a
# layer.
# Parameters
# sqlQuery - A valid SDA SQL statement in ascii format
# layerPath - Directory path to an existing spatial layer or table where the SDA results
# will be appended to. The field names returned in the metadata portion
# of the JSON request will automatically be added to the layer
# Returns
# This function returns True if SDA results were successfully appended to the input
# layer. False otherwise.
# Return Flase if an HTTP Error occurred such as a bad query, server timeout or no
# response from server
try:
#uncomment next line to print interp query to console
#arcpy.AddMessage(pQry.replace(">", ">").replace("<", "<"))
# SDA url
url = r'https://SDMDataAccess.sc.egov.usda.gov/Tabular/post.rest'
# Create request using JSON, return data as JSON
request = {}
request["format"] = "JSON+COLUMNNAME+METADATA"
request["query"] = sqlQuery
#json.dumps = serialize obj (request dictionary) to a JSON formatted str
data = json.dumps(request)
# Send request to SDA Tabular service using urllib library
# because we are passing the "data" argument, this is a POST request, not a GET
# ArcMap Request
#req = urllib.Request(url, data)
#response = urllib.urlopen(req)
# ArcPro Request
data = data.encode('ascii')
response = urllib.request.urlopen(url,data)
# read query results
queryResults = response.read()
# Convert the returned JSON string into a Python dictionary.
qData = json.loads(queryResults)
# if dictionary key "Table" is found
if "Table" in qData:
# extract 'Data' List from dictionary to create list of lists
queryData = qData["Table"]
## # remove the column names and column info lists from propRes list above
## # Last element represents info for specific property
## # [u'areasymbol', u'musym', u'muname', u'mukey', u'drainagecl']
## # [u'ColumnOrdinal=0,ColumnSize=20,NumericPrecision=255,NumericScale=255,ProviderType=VarChar,IsLong=False,ProviderSpecificDataType=System.Data.SqlTypes.SqlString,DataTypeName=varchar',
## columnNames = list() # ['drainagecl']
## columnInfo = list()
## columnNames.append(propRes.pop(0)[-1])
## columnInfo.append(propRes.pop(0)[-1])
columnNames = queryData.pop(0) # Isolate column names and remove from queryData
columnInfo = queryData.pop(0) # Isolate column info and remove from queryData
mukeyIndex = columnNames.index('mukey') # list index of where 'mukey' is found
propertyIndex = len(columnNames) -1 # list index of where property of interest is found; normally last place
propertyFldName = columnNames[propertyIndex] # SSURGO field name of the property of interest
# Add fields in columnNames to layerPath
if not addSSURGOpropertyFld(layerPath, columnNames, columnInfo):
return False
# Get the expanded SSURGO field name of the property of interest
fieldAlias = lookupSSURGOFieldName(propertyFldName,returnAlias=True)
# Update the field alias if possible
if fieldAlias:
arcpy.AlterField_management(layerPath,propertyFldName,"#",fieldAlias)
# rearrange queryData list into a dictionary of lists with the
# mukey as key and tabular info as a list of values. This will be used
# in the UpdateCursor to lookup tabular info by MUKEY
# '455428': [u'MN161','L110E','Lester-Ridgeton complex, 18 to 25 percent slopes','B']
propertyDict = dict()
for item in queryData:
propertyDict[item[mukeyIndex]] = item
# columnNames = [u'areasymbol', u'musym', u'muname', u'mukey', u'drainagecl']
with arcpy.da.UpdateCursor(layerPath, columnNames) as cursor:
for row in cursor:
mukey = row[mukeyIndex]
# lookup property info by MUKEY; Only update the property of interest
# No need to keep updating fields such as areasymbol, musym....etc
propertyVal = propertyDict[mukey][propertyIndex]
row[propertyIndex] = propertyVal
cursor.updateRow(row)
return True
else:
AddMsgAndPrint("Failed to get tabular data (getSDATabularRequest)",2)
return False
except socket.timeout as e:
AddMsgAndPrint('Soil Data Access timeout error',2)
return False
except socket.error as e:
AddMsgAndPrint('Socket error: ' + str(e),2)
return False
except HTTPError as e:
AddMsgAndPrint('HTTP Error' + str(e),2)
return False
except URLError as e:
AddMsgAndPrint('URL Error' + str(e),2)
return False
except:
errorMsg()
return False
if __name__ == '__main__':
try:
import sys, os, time, urllib, json, traceback, socket, arcpy, datetime
from arcpy import metadata as md
from urllib.error import HTTPError, URLError
from urllib.request import Request
#sqlQuery = """SELECT * FROM lab_physical_properties;"""
#sqlQuery = """SELECT COUNT labsampnum FROM lab_physical_properties;"""
#sqlQuery = """SELECT TOP 10000 * FROM lab_physical_properties;"""
#sqlQuery = """SELECT (labsampnum) FROM lab_physical_properties;"""
#sqlQuery = """SELECT (labsampnum) FROM lab_physical_properties FOR JSON AUTO;"""
sqlQuery = "~DeclareVarchar(@json,max)~\n"\
";WITH src (n) AS\n"\
"(\n"\
" SELECT TOP 50 PERCENT *\n"\
" FROM lab_physical_properties sc1\n"\
" FOR JSON AUTO\n"\
")\n"\
"SELECT @json = src.n\n"\
"FROM src\n"\
"SELECT @json, LEN(@json);"
##~DeclareVarchar(@json,max)~
##;WITH src (n) AS
##(
## SELECT *
## FROM lab_physical_properties sc1
## FOR JSON AUTO
##)
##SELECT @json = src.n
##FROM src
##SELECT @json, LEN(@json);
print(sqlQuery)
# SDA url
url = r'https://SDMDataAccess.sc.egov.usda.gov/Tabular/post.rest'
# Create request using JSON, return data as JSON
request = {}
#request["format"] = "JSON+COLUMNNAME+METADATA"
request["format"] = "JSON"
request["query"] = sqlQuery
#json.dumps = serialize obj (request dictionary) to a JSON formatted str
data = json.dumps(request)
# Send request to SDA Tabular service using urllib library
# because we are passing the "data" argument, this is a POST request, not a GET
# ArcMap Request
#req = urllib.Request(url, data)
#response = urllib.urlopen(req)
# ArcPro Request
data = data.encode('ascii')
response = urllib.request.urlopen(url,data)
# read query results
queryResults = response.read()
# Convert the returned JSON string into a Python dictionary.
qData = json.loads(queryResults)
except HTTPError as e:
content = e.read()
print(content)
except:
errorMsg()