38 search_results = resp.json()
40 for page
in search_results.get(
'data', []):
42 parts_of_speech = page.get(
'senses')
and page[
'senses'][0].get(
'parts_of_speech')
43 if parts_of_speech
and parts_of_speech[0] ==
'Wikipedia definition':
48 for title_raw
in page[
'japanese']:
49 if 'word' not in title_raw:
50 alt_forms.append(title_raw[
'reading'])
52 title = title_raw[
'word']
53 if 'reading' in title_raw:
54 title +=
' (' + title_raw[
'reading'] +
')'
55 alt_forms.append(title)
57 result_url = urljoin(BASE_URL, page[
'slug'])
62 content =
" ".join(f
"{engdef}." for _, engdef, _
in definitions)
64 {
'url': result_url,
'title':
", ".join(alt_forms),
'content': content[:300] + (content[300:]
and '...')}
70 results.append(
get_infobox(alt_forms, result_url, definitions))