Selaa lähdekoodia

要素提取结果格式化

luojiehua 2 vuotta sitten
vanhempi
commit
a75d6ad0b6

+ 1 - 0
BiddingKG/dl/entityLink/entityLink.py

@@ -241,6 +241,7 @@ def init_redis_pool():
 
 def is_enterprise_exist(enterprise_name):
     global ENTERPRISE_HUGE,SET_ENTERPRISE,POOL_REDIS
+    # print("test",enterprise_name)
     if ENTERPRISE_HUGE:
         if POOL_REDIS is None:
             init_redis_pool()

+ 1 - 1
BiddingKG/dl/interface/extract.py

@@ -275,7 +275,7 @@ def predict(doc_id,text,title="",page_time="",web_source_no='',web_source_name="
     #               (str(_entity.entity_type),str(_entity.entity_text),str(_entity.label),str(_entity.values),str(_entity.sentence_index),
     #                str(_entity.begin_index),str(_entity.end_index)))
     _extract_json = json.dumps(data_res,cls=MyEncoder,sort_keys=True,indent=4,ensure_ascii=False)
-    _extract_json = _extract_json.replace("\x06", "").replace("\x05", "").replace("\x07", "").replace('\\', '')
+    _extract_json = _extract_json.replace("\x06", "").replace("\x05", "").replace("\x07", "")
     return _extract_json#, list_articles[0].content, get_ent_context(list_sentences, list_entitys)
 
 

+ 1 - 0
BiddingKG/run_extract_server.py

@@ -81,6 +81,7 @@ def run_thread(data,list_result):
     web_source_no = data.get("web_source_no","")
     web_source_name = data.get("web_source_name","")
     original_docchannel = data.get("original_docchannel","")
+    print("web_source_name:",web_source_name)
     is_fail = False
     try:
         if _content!="":