Skip to content

Commit

Permalink
Merge branch 'release/1.27.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
colekettler committed Mar 10, 2020
2 parents 1edcff3 + d5ab775 commit c79be7f
Show file tree
Hide file tree
Showing 54 changed files with 5,079 additions and 615 deletions.
4 changes: 0 additions & 4 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,3 @@ Optional. Ancillary topics, caveats, alternative strategies that didn't work out
* Start after checking out this branch
* Include any setup required, such as bundling scripts, restarting services, etc.
* Include test case, and expected output

## Checklist

- [ ] All JavaScript tests pass `./scripts/testem.sh`
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,16 @@ The same script can be used to load the stream network data:
$ ./scripts/aws/setupdb.sh -s
```

and all the other data:

```bash
$ ./scripts/aws/setupdb.sh -d
$ ./scripts/aws/setupdb.sh -m
$ ./scripts/aws/setupdb.sh -p
$ ./scripts/aws/setupdb.sh -c
$ ./scripts/aws/setupdb.sh -q
```

Note that if you receive out of memory errors while loading the data, you may want to increase the RAM on your `services` VM (1512 MB may be all that is necessary).

See debug messages from the web app server:
Expand Down
2 changes: 1 addition & 1 deletion deployment/ansible/group_vars/all
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ docker_compose_version: "1.23.*"
geop_host: "localhost"
geop_port: 8090

geop_version: "4.0.1"
geop_version: "4.0.3"
geop_cache_enabled: 1

nginx_cache_dir: "/var/cache/nginx"
Expand Down
50 changes: 44 additions & 6 deletions src/mmw/apps/geoprocessing_api/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

from django.conf import settings

from mmw.settings import layer_classmaps

from apps.modeling.geoprocessing import multi, parse
from apps.modeling.tr55.utils import aoi_resolution

Expand Down Expand Up @@ -128,21 +130,23 @@ def analyze_nlcd(result, area_of_interest=None):

result = parse(result)
histogram = {}
total_ara = 0
total_count = 0
categories = []

has_ara = type(result.keys()[0]) == tuple

def area(dictionary, key, default=0):
return dictionary.get(key, default) * pixel_width * pixel_width

# Convert results to histogram, calculate total
for key, count in result.iteritems():
nlcd = key[0] if has_ara else key
nlcd, ara = key
total_count += count
total_ara += count if ara == 1 else 0
histogram[nlcd] = count + histogram.get(nlcd, 0)

for nlcd, (code, name) in settings.NLCD_MAPPING.iteritems():
has_ara = total_ara > 0

for nlcd, (code, name) in layer_classmaps.NLCD.iteritems():
categories.append({
'area': area(histogram, nlcd),
'active_river_area': area(result, (nlcd, 1)) if has_ara else None,
Expand Down Expand Up @@ -179,7 +183,7 @@ def analyze_soil(result, area_of_interest=None):
s = s if s != settings.NODATA else 3 # Map NODATA to 3
histogram[s] = count + histogram.get(s, 0)

for soil, (code, name) in settings.SOIL_MAPPING.iteritems():
for soil, (code, name) in layer_classmaps.SOIL.iteritems():
categories.append({
'area': histogram.get(soil, 0) * pixel_width * pixel_width,
'code': code,
Expand Down Expand Up @@ -305,6 +309,40 @@ def cm_to_m(x):
}


@shared_task
def analyze_protected_lands(result, area_of_interest=None):
if 'error' in result:
raise Exception('[analyze_protected_lands] {}'.format(result['error']))

pixel_width = aoi_resolution(area_of_interest) if area_of_interest else 1

result = parse(result)
histogram = {}
total_count = 0
categories = []

for key, count in result.iteritems():
total_count += count
histogram[key] = count + histogram.get(key, 0)

for class_id, (code, name) in layer_classmaps.PROTECTED_LANDS.iteritems():
categories.append({
'area': histogram.get(class_id, 0) * pixel_width * pixel_width,
'class_id': class_id,
'code': code,
'coverage': float(histogram.get(class_id, 0)) / total_count,
'type': name,
})

return {
'survey': {
'name': 'protected_lands',
'displayName': 'Protected Lands',
'categories': categories,
}
}


def collect_nlcd(histogram, geojson=None):
"""
Convert raw NLCD geoprocessing result to area dictionary
Expand All @@ -316,7 +354,7 @@ def collect_nlcd(histogram, geojson=None):
'code': code,
'nlcd': nlcd,
'type': name,
} for nlcd, (code, name) in settings.NLCD_MAPPING.iteritems()]
} for nlcd, (code, name) in layer_classmaps.NLCD.iteritems()]

return {'categories': categories}

Expand Down
30 changes: 15 additions & 15 deletions src/mmw/apps/geoprocessing_api/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,21 +138,21 @@ def test_survey_land_only(self):
self.maxDiff = None
# NLCD Histogram of Cave Creek-Arizona Canal Diversion Channel HUC-10
histogram = {
'List(0)': 95,
'List(43)': 35,
'List(71)': 3228,
'List(42)': 5758,
'List(11)': 279,
'List(81)': 57,
'List(82)': 682,
'List(52)': 499636,
'List(21)': 73992,
'List(22)': 110043,
'List(23)': 105894,
'List(24)': 20719,
'List(90)': 461,
'List(31)': 25,
'List(95)': 159
'List(0, -2147483648)': 95,
'List(43, -2147483648)': 35,
'List(71, -2147483648)': 3228,
'List(42, -2147483648)': 5758,
'List(11, -2147483648)': 279,
'List(81, -2147483648)': 57,
'List(82, -2147483648)': 682,
'List(52, -2147483648)': 499636,
'List(21, -2147483648)': 73992,
'List(22, -2147483648)': 110043,
'List(23, -2147483648)': 105894,
'List(24, -2147483648)': 20719,
'List(90, -2147483648)': 461,
'List(31, -2147483648)': 25,
'List(95, -2147483648)': 159
}

expected = {
Expand Down
2 changes: 2 additions & 0 deletions src/mmw/apps/geoprocessing_api/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
name='start_analyze_streams'),
url(r'analyze/terrain/$', views.start_analyze_terrain,
name='start_analyze_terrain'),
url(r'analyze/protected-lands/$', views.start_analyze_protected_lands,
name='start_analyze_protected_lands'),
url(r'jobs/' + uuid_regex, get_job, name='get_job'),
url(r'modeling/worksheet/$', views.start_modeling_worksheet,
name='start_modeling_worksheet'),
Expand Down
148 changes: 139 additions & 9 deletions src/mmw/apps/geoprocessing_api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

from django.utils.timezone import now
from django.urls import reverse
from django.conf import settings
from django.contrib.gis.geos import GEOSGeometry

from apps.core.models import Job
from apps.core.tasks import (save_job_error,
Expand Down Expand Up @@ -414,14 +412,8 @@ def start_analyze_land(request, format=None):

geop_input = {'polygon': [area_of_interest]}

aoi_geom = GEOSGeometry(area_of_interest, srid=4326)
geop_task = 'nlcd'

if settings.ARA_PERIMETER.contains(aoi_geom):
geop_task = 'nlcd_ara'

return start_celery_job([
geoprocessing.run.s(geop_task, geop_input, wkaoi),
geoprocessing.run.s('nlcd_ara', geop_input, wkaoi),
tasks.analyze_nlcd.s(area_of_interest)
], area_of_interest, user)

Expand Down Expand Up @@ -1019,6 +1011,144 @@ def start_analyze_terrain(request, format=None):
], area_of_interest, user)


@swagger_auto_schema(method='post',
manual_parameters=[schemas.WKAOI],
request_body=schemas.MULTIPOLYGON,
responses={200: schemas.JOB_STARTED_RESPONSE})
@decorators.api_view(['POST'])
@decorators.authentication_classes((SessionAuthentication,
TokenAuthentication, ))
@decorators.permission_classes((IsAuthenticated, ))
@decorators.throttle_classes([BurstRateThrottle, SustainedRateThrottle])
@log_request
def start_analyze_protected_lands(request, format=None):
"""
Starts a job to produce a protected lands histogram for a given area.
Uses the Protected Areas Database of the United States (PADUS),
published by the U.S. Geological Survey Gap Analysis Program in 2016.
For more information, see the
[technical documentation](https://wikiwatershed.org/
documentation/mmw-tech/#overlays-tab-coverage).
## Response
You can use the URL provided in the response's `Location`
header to poll for the job's results.
<summary>
**Example of a completed job's `result`**
</summary>
<details>
{
"survey": {
"displayName": "Protected Lands",
"name": "protected_lands",
"categories": [
{
"area": 3589.015925407952,
"class_id": 1,
"code": "pra_f",
"coverage": 0.00004202077927535166,
"type": "Park or Recreational Area - Federal"
},
{
"area": 0.0,
"class_id": 2,
"code": "pra_s",
"coverage": 0.0,
"type": "Park or Recreational Area - State"
},
{
"area": 11292838.60929612,
"class_id": 3,
"code": "pra_l",
"coverage": 0.132218381989894,
"type": "Park or Recreational Area - Local"
},
{
"area": 0.0,
"class_id": 4,
"code": "pra_p",
"coverage": 0.0,
"type": "Park or Recreational Area - Private"
},
{
"area": 0.0,
"class_id": 5,
"code": "pra_u",
"coverage": 0.0,
"type": "Park or Recreational Area - Unknown"
},
{
"area": 19739.587589743736,
"class_id": 6,
"code": "nra_f",
"coverage": 0.00023111428601443412,
"type": "Natural Resource Area - Federal"
},
{
"area": 0.0,
"class_id": 7,
"code": "nra_s",
"coverage": 0.0,
"type": "Natural Resource Area - State"
},
{
"area": 206368.41571095726,
"class_id": 8,
"code": "nra_l",
"coverage": 0.0024161948083327206,
"type": "Natural Resource Area - Local"
},
{
"area": 4486.26990675994,
"class_id": 9,
"code": "nra_p",
"coverage": 0.000052525974094189576,
"type": "Natural Resource Area - Private"
},
{
"area": 0.0,
"class_id": 10,
"code": "nra_u",
"coverage": 0.0,
"type": "Natural Resource Area - Unknown"
},
{
"area": 0.0,
"class_id": 11,
"code": "con_ease",
"coverage": 0.0,
"type": "Conservation Easement"
},
{
"area": 0.0,
"class_id": 12,
"code": "ag_ease",
"coverage": 0.0,
"type": "Agricultural Easement"
}
]
}
}
</details>
"""
user = request.user if request.user.is_authenticated else None
area_of_interest, wkaoi = _parse_input(request)

geop_input = {'polygon': [area_of_interest]}

return start_celery_job([
geoprocessing.run.s('protected_lands', geop_input, wkaoi),
tasks.analyze_protected_lands.s(area_of_interest)
], area_of_interest, user)


@swagger_auto_schema(method='post',
request_body=schemas.MULTIPOLYGON,
responses={200: schemas.JOB_STARTED_RESPONSE})
Expand Down
8 changes: 6 additions & 2 deletions src/mmw/apps/modeling/mapshed/calcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,11 @@ def nearest_weather_stations(shapes, n=NUM_WEATHER_STATIONS):
subquery = '''
(SELECT %s watershed_id, station, location, meanrh, meanwind,
meanprecip, begyear, endyear, eroscoeff, rain_cool,
rain_warm, etadj, grw_start, grw_end
rain_warm, etadj, grw_start, grw_end,
ST_Distance(ST_Transform(geom, 5070),
ST_Transform(
ST_SetSRID(ST_GeomFromText(%s), 4326),
5070)) dist
FROM ms_weather_station
ORDER BY geom <-> ST_SetSRID(ST_GeomFromText(%s), 4326)
LIMIT %s)
Expand All @@ -69,7 +73,7 @@ def nearest_weather_stations(shapes, n=NUM_WEATHER_STATIONS):
for (_, watershed_id, aoi) in shapes:
subqueries.append(subquery)
geom = GEOSGeometry(aoi, srid=4326)
params.extend([watershed_id, geom.wkt, n])
params.extend([watershed_id, geom.wkt, geom.wkt, n])

sql = ' UNION '.join(subqueries) + ';'

Expand Down
3 changes: 3 additions & 0 deletions src/mmw/apps/modeling/mapshed/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,9 @@ def collect_data(geop_results, geojson, watershed_id=None, weather=None):
else:
ws = nearest_weather_stations([(None, watershed_id, geojson)])

z['WeatherStations'] = [{'station': s.station,
'distance': s.dist} for s in ws]

z['Grow'] = growing_season(ws)
z['Acoef'] = erosion_coeff(ws, z['Grow'])
z['PcntET'] = et_adjustment(ws)
Expand Down
Loading

0 comments on commit c79be7f

Please sign in to comment.