Skip to content

Commit

Permalink
feat: making kp async (#29)
Browse files Browse the repository at this point in the history
* feat: making kp async

* Update main.py

* Update main.py

* refractor: added asyncify to wikidata_kp

* Update main.py

* fix: git conflict

* fix: tests

* feat: added task_group(multi-processing)

* Update test_main.py

* Update requirements.txt

* refactor: fix requirements, made it text

* test: mock aiohttp get method

* Update test_knowledge_panels.py

* refractor: make flake8 happy

* refactor: fix suggested changes

* tests: removed useless async

* doc: some more comments

Co-authored-by: Alex Garel <alex@garel.org>
  • Loading branch information
sumit-158 and alexgarel committed Sep 21, 2022
1 parent 84b0d0e commit af9a70b
Show file tree
Hide file tree
Showing 8 changed files with 170 additions and 113 deletions.
23 changes: 12 additions & 11 deletions app/knowledge_panels.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def __init__(
self.sec_value = sec_value
self.country = country

def hunger_game_kp(self):
async def hunger_game_kp(self):
query = {}
questions_url = "https://hunger.openfoodfacts.org/questions"
facets = {self.facet: self.value, self.sec_facet: self.sec_value}
Expand Down Expand Up @@ -60,7 +60,7 @@ def hunger_game_kp(self):
if query:
urls.add((f"{questions_url}?{urlencode(query)}", description))

t_description = hungergame()
t_description = await hungergame()
for id, val in enumerate(sorted(urls)):
url, des = val
html.append(
Expand All @@ -75,7 +75,7 @@ def hunger_game_kp(self):

return kp

def data_quality_kp(self):
async def data_quality_kp(self):
"""
Get data corresponding to differnet facet
"""
Expand Down Expand Up @@ -113,7 +113,7 @@ def data_quality_kp(self):
if self.sec_value is not None:
path += f"/{self.sec_value}"
description += f" {self.sec_value}"
(t_html, source_url, t_description, t_title) = data_quality(url=url, path=path)
(t_html, source_url, t_description, t_title) = await data_quality(url=url, path=path)

return {
"Quality": {
Expand All @@ -129,7 +129,7 @@ def data_quality_kp(self):
},
}

def last_edits_kp(self):
async def last_edits_kp(self):
"""
Return knowledge panel for last-edits corresponding to different facet
"""
Expand All @@ -156,6 +156,7 @@ def last_edits_kp(self):
url = "https://world.openfoodfacts.org"
if self.facet is not None:
description += f"{self.facet}"
source_url = f"{url}/{self.facet}?sort_by=last_modified_t"
if self.value is not None:
query[f"{facet_plural(facet=self.facet)}_tags_en"] = self.value
description += f" {self.value}"
Expand All @@ -164,7 +165,7 @@ def last_edits_kp(self):
query[f"{facet_plural(facet=self.sec_facet)}_tags_en"] = self.sec_value
description += f" {self.sec_facet} {self.sec_value}"
source_url = f"{url}/{self.facet}/{self.value}/{self.sec_facet}/{self.sec_value}?sort_by=last_modified_t" # noqa: E501
t_html, t_description, t_title = last_edit(url=url, query=query)
t_html, t_description, t_title = await last_edit(url=url, query=query)

return {
"LastEdits": {
Expand All @@ -180,28 +181,28 @@ def last_edits_kp(self):
},
}

def _wikidata_kp(self, facet, value):
async def _wikidata_kp(self, facet, value):
query = {}
if value:
query["tagtype"] = facet_plural(facet=facet)
query["fields"] = "wikidata"
query["tags"] = value

entities = wikidata_helper(query=query, value=value)
entities = await wikidata_helper(query=query, value=value)

return entities

def wikidata_kp(self):
async def wikidata_kp(self):
"""
Return knowledge panel for wikidata
"""
entities = set()
try:
entities.add(self._wikidata_kp(facet=self.facet, value=self.value))
entities.add(await self._wikidata_kp(facet=self.facet, value=self.value))
except Exception:
logging.exception("While adding wikidata for primary facet")
try:
entities.add(self._wikidata_kp(facet=self.sec_facet, value=self.sec_value))
entities.add(await self._wikidata_kp(facet=self.sec_facet, value=self.sec_value))
except Exception:
logging.exception("While adding wikidata for secandary facet")

Expand Down
44 changes: 25 additions & 19 deletions app/main.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
import logging
from typing import Union

import asyncer
from fastapi import FastAPI

from .i18n import active_translation
from .knowledge_panels import KnowledgePanels
from .models import FacetName, HungerGameFilter
from .models import FacetName, HungerGameFilter, Taxonomies

app = FastAPI()


@app.get("/")
def hello():
async def hello():
return {"message": "Hello from facets-knowledge-panels! Tip: open /docs for documentation"}


@app.get("/knowledge_panel")
def knowledge_panel(
async def knowledge_panel(
facet_tag: FacetName,
value_tag: Union[str, None] = None,
sec_facet_tag: Union[str, None] = None,
Expand All @@ -31,29 +32,34 @@ def knowledge_panel(
"""
with active_translation(lang_code):
panels = []
# creating object that will compute knowledge panels
obj_kp = KnowledgePanels(
facet=facet_tag.value,
value=value_tag,
sec_facet=sec_facet_tag,
sec_value=sec_value_tag,
country=country,
)
try:
# this will contains panels computations
soon_panels = []
# the task_group will run these knowledge_panels async functions concurrently
async with asyncer.create_task_group() as task_group:
# launch each panels computation
if facet_tag in HungerGameFilter.list():
panels.append(obj_kp.hunger_game_kp())
except Exception:
logging.exception("error occued while appending hungergames-kp")
try:
panels.append(obj_kp.data_quality_kp())
except Exception:
logging.exception("error occued while appending data-quality-kp")
try:
panels.append(obj_kp.last_edits_kp())
except Exception:
logging.exception("error occued while appending last-edites-kp")
try:
panels.append(obj_kp.wikidata_kp())
except Exception:
logging.exception("error occued while appending wikidata-kp")
soon_panels.append(task_group.soonify(obj_kp.hunger_game_kp)())
soon_panels.append(task_group.soonify(obj_kp.data_quality_kp)())
soon_panels.append(task_group.soonify(obj_kp.last_edits_kp)())
if facet_tag in Taxonomies.list():
soon_panels.append(task_group.soonify(obj_kp.wikidata_kp)())
# collect panels results
for soon_value in soon_panels:
# if an exception was raised during computation
# we will get it on value retrieval
# but we don't want to sacrifice whole result for a single failure
# as most panels depends on external resources that may not be available
try:
panels.append(soon_value.value)
except Exception:
logging.exception()

return {"knowledge_panels": panels}
40 changes: 23 additions & 17 deletions app/off.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,23 @@
from collections import namedtuple
from urllib.parse import urljoin

import requests
import aiohttp
from asyncer import asyncify

from .i18n import DEFAULT_LANGUAGE, get_current_lang
from .i18n import translate as _
from .wikidata_utils import get_wikidata_entity, wikidata_props


def data_quality(url, path):
async def data_quality(url, path):
"""
Helper function to return issues for data-quality
"""
source_url = urljoin(url, path)
quality_url = f"{source_url}/data-quality.json"
response_API = requests.get(quality_url)
data = response_API.json()
async with aiohttp.ClientSession() as session:
source_url = urljoin(url, path)
quality_url = f"{source_url}/data-quality.json"
async with session.get(quality_url) as resp:
data = await resp.json()
total_issues = data["count"]
tags = data["tags"]
html = []
Expand All @@ -24,7 +26,7 @@ def data_quality(url, path):
"products": tag["products"],
"name": tag["name"],
}
html.append(f'<li><a href={tag["url"]}>')
html.append(f"""<li><a href='{tag["url"]}'>""")
html.append(_("{products} products with {name}").format(**info))
html.append("</a></li>")

Expand All @@ -44,13 +46,14 @@ def data_quality(url, path):
return text, source_url, description, title


def last_edit(url, query):
async def last_edit(url, query):
"""
Helper function to return data for last-edits
"""
search_url = f"{url}/api/v2/search"
response_API = requests.get(search_url, params=query)
data = response_API.json()
async with aiohttp.ClientSession() as session:
search_url = f"{url}/api/v2/search"
async with session.get(search_url, params=query) as resp:
data = await resp.json()
counts = data["count"]
tags = data["products"]

Expand Down Expand Up @@ -108,17 +111,18 @@ def in_lang(data, lang, suffix=""):
return data[DEFAULT_LANGUAGE + suffix]


def wikidata_helper(query, value):
async def wikidata_helper(query, value):
"""
Helper function to return wikidata eg:label,description,image_url
"""
lang = get_current_lang()
url = "https://world.openfoodfacts.org/api/v2/taxonomy"
response_API = requests.get(url, params=query)
data = response_API.json()
async with aiohttp.ClientSession() as session:
url = "https://world.openfoodfacts.org/api/v2/taxonomy"
async with session.get(url, params=query) as resp:
data = await resp.json()
tag = data[value]
entity_id = in_lang(tag["wikidata"], lang)
entity = get_wikidata_entity(entity_id=entity_id)
entity = await asyncify(get_wikidata_entity)(entity_id=entity_id)
if wikidata_props.image_prop in entity:
image = entity[wikidata_props.image_prop]
image_url = image.image_url
Expand All @@ -136,6 +140,7 @@ def wikidata_helper(query, value):
OSM_relation = "https://www.openstreetmap.org/relation/{}".format(osm)
else:
OSM_relation = ""

entities = Entities(
in_lang(entity.label, lang),
in_lang(entity.description, lang),
Expand All @@ -148,7 +153,8 @@ def wikidata_helper(query, value):
return entities


def hungergame():
async def hungergame():

"""Helper function for making Translation easy"""
description = _("Answer robotoff questions about")
return description
3 changes: 1 addition & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,4 @@ services:
- .:/code
ports:
- "127.0.0.1:80:80"


command: ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80", "--reload"]
3 changes: 3 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[pytest]
testpaths = tests
asyncio_mode = auto
Binary file modified requirements.txt
Binary file not shown.
Loading

0 comments on commit af9a70b

Please sign in to comment.