ghomasHudson commited on
Commit
9a19160
1 Parent(s): 4af4b99

Add loading

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -17,28 +17,28 @@ article = st.text_area('Article to analyze:', value=open("example.txt").read())
17
 
18
  seen_entities = []
19
  if st.button('Submit'):
20
- print(article)
21
- good_ents = []
22
-
23
- doc = nlp(article)
24
- for ent in doc.ents:
25
- if ent._.kb_qid is None or ent.label_ not in ["ORG", "PERSON", "GPE"] or ent.text in seen_entities:
26
- continue
27
- seen_entities.append(ent.text)
28
- print((ent.text, ent.label_, ent._.kb_qid, ent._.url_wikidata, ent._.nerd_score))
29
- r = requests.get("https://www.wikidata.org/w/api.php?action=wbgetclaims&format=json&property=P18&entity=" + ent._.kb_qid)
30
- data = r.json()["claims"]
31
- if "P18" in data.keys():
32
- data = data["P18"][0]["mainsnak"]
33
- img_name = data["datavalue"]["value"].replace(" ", "_")
34
- img_name_hash = hashlib.md5(img_name.encode("utf-8")).hexdigest()
35
- a = img_name_hash[0]
36
- b = img_name_hash[1]
37
- url= f"https://upload.wikimedia.org/wikipedia/commons/{a}/{a}{b}/{img_name}"
38
- good_ents.append((ent.text, ent.label_, ent._.kb_qid, ent._.url_wikidata, ent._.nerd_score, url))
39
- cols = st.columns(len(good_ents))
40
- for i, ent in enumerate(good_ents):
41
- # st.image(url)
42
- with cols[i]:
43
- components.html(f"<image style='border-radius: 50%;object-fit:cover;width:100px;height:100px' src='{ent[-1]}'/>", height=110, width=110)
44
- st.caption(ent[0])
 
17
 
18
  seen_entities = []
19
  if st.button('Submit'):
20
+ good_ents = []
21
+
22
+ with st.spinner(text="Analysing..."):
23
+ doc = nlp(article)
24
+ for ent in doc.ents:
25
+ if ent._.kb_qid is None or ent.label_ not in ["ORG", "PERSON", "GPE"] or ent.text in seen_entities:
26
+ continue
27
+ seen_entities.append(ent.text)
28
+ print((ent.text, ent.label_, ent._.kb_qid, ent._.url_wikidata, ent._.nerd_score))
29
+ r = requests.get("https://www.wikidata.org/w/api.php?action=wbgetclaims&format=json&property=P18&entity=" + ent._.kb_qid)
30
+ data = r.json()["claims"]
31
+ if "P18" in data.keys():
32
+ data = data["P18"][0]["mainsnak"]
33
+ img_name = data["datavalue"]["value"].replace(" ", "_")
34
+ img_name_hash = hashlib.md5(img_name.encode("utf-8")).hexdigest()
35
+ a = img_name_hash[0]
36
+ b = img_name_hash[1]
37
+ url= f"https://upload.wikimedia.org/wikipedia/commons/{a}/{a}{b}/{img_name}"
38
+ good_ents.append((ent.text, ent.label_, ent._.kb_qid, ent._.url_wikidata, ent._.nerd_score, url))
39
+ cols = st.columns(len(good_ents))
40
+ for i, ent in enumerate(good_ents):
41
+ # st.image(url)
42
+ with cols[i]:
43
+ components.html(f"<image style='border-radius: 50%;object-fit:cover;width:100px;height:100px' src='{ent[-1]}'/>", height=110, width=110)
44
+ st.caption(ent[0])