-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinfer_articles.py
58 lines (46 loc) · 1.72 KB
/
infer_articles.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 10 12:16:45 2021
@author: cbadenes
"""
import model.workers as workers
import pysolr
import json
import multiprocessing as mp
import time
import statistics
if __name__ == '__main__':
solr_query="scope_s:Development"
solr = pysolr.Solr('http://librairy.linkeddata.es/data/mesinesp', always_commit=True, timeout=50)
print("Number of processors: ", mp.cpu_count())
pool = mp.Pool(mp.cpu_count())
sentences = []
print("reading from solr..")
counter = 0
completed = False
window_size=200
cursor = "*"
documents = []
while (not completed):
old_counter = counter
try:
articles = solr.search(q=solr_query,rows=window_size,cursorMark=cursor,sort="id asc")
cursor = articles.nextCursorMark
results = pool.starmap(workers.inference,[(article['id'],article['abstract_t']) for article in articles.docs] )
documents.extend(results)
counter += len(results)
print(counter,"docs evaluated")
if (old_counter == counter):
print("done!")
break
except:
print("Solr query error. Wait for 5secs..")
time.sleep(5.0)
with open('baseline-llda-l0.json', 'w') as outfile:
json.dump({'documents':[doc['result0'] for doc in documents]}, outfile)
with open('baseline-llda-l1.json', 'w') as outfile:
json.dump({'documents':[doc['result1'] for doc in documents]}, outfile)
with open('baseline-llda-l2.json', 'w') as outfile:
json.dump({'documents':[doc['result2'] for doc in documents]}, outfile)
pool.close()