Skip to content

Commit

Permalink
Merge pull request #20 from mattrossman/flatten-menu
Browse files Browse the repository at this point in the history
Flatten menu response and refactor menu functions
  • Loading branch information
simon-andrews authored Aug 18, 2018
2 parents e6fc392 + b45fa21 commit 3a78af4
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 39 deletions.
42 changes: 3 additions & 39 deletions umass_toolkit/dining.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,11 @@
# Abandon all hope, ye who enter here.

from bs4 import BeautifulSoup
import datetime
import json
import pint
import requests
import urllib.parse

import dining_utils

_ureg = pint.UnitRegistry()

def get_locations():
locations = requests.get('https://www.umassdining.com/uapp/get_infov2').json()
Expand Down Expand Up @@ -37,37 +33,6 @@ def location_id_to_name(location_id):
return location['name']
raise KeyError('no locations found with ID %d' % location_id)

def _menu_html_to_dict(html_string):
soup = BeautifulSoup(html_string, 'html.parser')
items = soup.find_all('a', href='#inline')
ret = {}
for item in items:
item_name = item.string
ret[item_name] = {}
for attribute in item.attrs.keys():
if attribute.startswith('data-'):
if attribute.endswith('dv') or attribute in ['data-dish-name', 'data-recipe-webcode']:
continue
attribute_name = attribute[5:]
data = item.attrs[attribute]
if attribute_name == 'calories' or attribute_name == 'calories-from-fat':
if data == '':
continue
data = int(data)
elif attribute_name == 'clean-diet-str':
diets = data.split(', ')
ret[item_name]['diets'] = diets
continue
elif attribute_name in ['allergens', 'ingredient-list']:
data = dining_utils.parse_list(data)
elif attribute_name in ['cholesterol', 'sodium', 'dietary-fiber', 'protein', 'sat-fat', 'sugars',
'total-carb', 'total-fat', 'trans-fat']:
if data == '':
continue
data = _ureg.Quantity(data)
ret[item_name][attribute_name] = data
return ret

def get_menu(location, date = datetime.date.today()):
# If there is no menu available (for example, if the location is closed), then UMass Dining will simply return a blank page.
# Status code is 200 no matter what...
Expand All @@ -78,11 +43,10 @@ def get_menu(location, date = datetime.date.today()):
r = requests.get(request_url).json()
except json.decoder.JSONDecodeError:
return []
ret = {}
ret = []
for meal in r.keys():
ret[meal] = {}
for menu in r[meal].keys():
ret[meal][menu] = _menu_html_to_dict(r[meal][menu])
for category in r[meal].keys():
ret.extend(dining_utils.category_html_to_dict(r[meal][category], meal, category))
return ret

def get_food_trucks():
Expand Down
33 changes: 33 additions & 0 deletions umass_toolkit/dining_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
from bs4 import BeautifulSoup
import pint


ureg = pint.UnitRegistry()


#TODO: handle multiple levels of parentheses
#e.g. Chili (spicy (but not too spicy))
def parse_list(ingredients):
Expand All @@ -24,3 +31,29 @@ def parse_list(ingredients):
ingredient_list.append(ingredient)
i += 1
return ingredient_list

def category_html_to_dict(html_string, meal, category):
soup = BeautifulSoup(html_string, 'html.parser')
items = soup.find_all('a', href='#inline')
ret = []
for item in items:
dish = {}
dish['category-name'] = category
dish['meal-name'] = meal
for attribute in item.attrs.keys():
if attribute.startswith('data-') and not attribute.endswith('dv'):
attribute_name = attribute[5:]
data = item.attrs[attribute]
if attribute_name == 'calories' or attribute_name == 'calories-from-fat':
data = int(data) if data else None
elif attribute_name == 'clean-diet-str':
data = data.split(', ')
attribute_name = 'diets'
elif attribute_name in ['allergens', 'ingredient-list']:
data = parse_list(data)
elif attribute_name in ['cholesterol', 'sodium', 'dietary-fiber', 'protein', 'sat-fat', 'sugars',
'total-carb', 'total-fat', 'trans-fat']:
data = ureg.Quantity(data) if data else None
dish[attribute_name] = data
ret.append(dish)
return ret

0 comments on commit 3a78af4

Please sign in to comment.