modelFactory.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550
  1. '''
  2. Created on 2019年5月16日
  3. @author: User
  4. '''
  5. import os
  6. import sys
  7. sys.path.append(os.path.abspath("../.."))
  8. from keras import models
  9. from keras import layers
  10. # from keras_contrib.layers import CRF
  11. from keras.preprocessing.sequence import pad_sequences
  12. from keras import optimizers,losses,metrics
  13. from BiddingKG.dl.common.Utils import *
  14. import tensorflow as tf
  15. class Model_role_classify():
  16. def __init__(self,lazyLoad=getLazyLoad()):
  17. #self.model_role_file = os.path.abspath("../role/models/model_role.model.hdf5")
  18. self.model_role_file = os.path.dirname(__file__)+"/../role/log/new_biLSTM-ep012-loss0.028-val_loss0.040-f10.954.h5"
  19. self.model_role = None
  20. self.graph = tf.get_default_graph()
  21. if not lazyLoad:
  22. self.getModel()
  23. def getModel(self):
  24. if self.model_role is None:
  25. self.model_role = models.load_model(self.model_role_file,custom_objects={'precision':precision,'recall':recall,'f1_score':f1_score})
  26. return self.model_role
  27. def encode(self,tokens,begin_index,end_index,**kwargs):
  28. return embedding(spanWindow(tokens=tokens,begin_index=begin_index,end_index=end_index,size=10),shape=(2,10,128))
  29. def predict(self,x):
  30. x = np.transpose(np.array(x),(1,0,2,3))
  31. with self.graph.as_default():
  32. return self.getModel().predict([x[0],x[1]])
  33. class Model_role_classify_word():
  34. def __init__(self,lazyLoad=getLazyLoad()):
  35. if USE_PAI_EAS:
  36. lazyLoad = True
  37. #self.model_role_file = os.path.abspath("../role/log/ep071-loss0.107-val_loss0.122-f10.956.h5")
  38. self.model_role_file = os.path.dirname(__file__)+"/../role/models/ep038-loss0.140-val_loss0.149-f10.947.h5"
  39. #self.model_role_file = os.path.abspath("../role/log/textcnn_ep017-loss0.088-val_loss0.125-f10.955.h5")
  40. self.model_role = None
  41. self.sess_role = tf.Session(graph=tf.Graph())
  42. if not lazyLoad:
  43. self.getModel()
  44. def getModel(self):
  45. if self.model_role is None:
  46. with self.sess_role.as_default() as sess:
  47. with self.sess_role.graph.as_default():
  48. meta_graph_def = tf.saved_model.loader.load(sess=self.sess_role, tags=["serve"], export_dir=os.path.dirname(__file__)+"/role_savedmodel")
  49. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  50. signature_def = meta_graph_def.signature_def
  51. input0 = self.sess_role.graph.get_tensor_by_name(signature_def[signature_key].inputs["input0"].name)
  52. input1 = self.sess_role.graph.get_tensor_by_name(signature_def[signature_key].inputs["input1"].name)
  53. input2 = self.sess_role.graph.get_tensor_by_name(signature_def[signature_key].inputs["input2"].name)
  54. output = self.sess_role.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  55. self.model_role = [[input0,input1,input2],output]
  56. return self.model_role
  57. '''
  58. def load_weights(self):
  59. model = self.getModel()
  60. model.load_weights(self.model_role_file)
  61. '''
  62. def encode(self,tokens,begin_index,end_index,entity_text,**kwargs):
  63. _span = spanWindow(tokens=tokens,begin_index=begin_index,end_index=end_index,size=12,center_include=True,word_flag=True,text=entity_text)
  64. # print(_span)
  65. _encode_span = encodeInput(_span, word_len=20, word_flag=True,userFool=False)
  66. # print(_encode_span)
  67. return _encode_span
  68. def predict(self,x):
  69. x = np.transpose(np.array(x),(1,0,2))
  70. model_role = self.getModel()
  71. assert len(x)==len(model_role[0])
  72. feed_dict = {}
  73. for _x,_t in zip(x,model_role[0]):
  74. feed_dict[_t] = _x
  75. list_result = limitRun(self.sess_role,[model_role[1]],feed_dict)[0]
  76. return list_result
  77. #return self.sess_role.run(model_role[1],feed_dict=feed_dict)
  78. class Model_money_classify():
  79. def __init__(self,lazyLoad=getLazyLoad()):
  80. if USE_PAI_EAS:
  81. lazyLoad = True
  82. self.model_money_file = os.path.dirname(__file__)+"/../money/models/model_money_word.h5"
  83. self.model_money = None
  84. self.sess_money = tf.Session(graph=tf.Graph())
  85. if not lazyLoad:
  86. self.getModel()
  87. def getModel(self):
  88. if self.model_money is None:
  89. with self.sess_money.as_default() as sess:
  90. with sess.graph.as_default():
  91. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=os.path.dirname(__file__)+"/money_savedmodel")
  92. # meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=os.path.dirname(__file__)+"/money_savedmodel_bilstmonly")
  93. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  94. signature_def = meta_graph_def.signature_def
  95. input0 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input0"].name)
  96. input1 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input1"].name)
  97. input2 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input2"].name)
  98. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  99. self.model_money = [[input0,input1,input2],output]
  100. return self.model_money
  101. '''
  102. if self.model_money is None:
  103. self.model_money = models.load_model(self.model_money_file,custom_objects={'precision':precision,'recall':recall,'f1_score':f1_score})
  104. return self.model_money
  105. '''
  106. '''
  107. def load_weights(self):
  108. model = self.getModel()
  109. model.load_weights(self.model_money_file)
  110. '''
  111. def encode(self,tokens,begin_index,end_index,**kwargs):
  112. _span = spanWindow(tokens=tokens, begin_index=begin_index, end_index=end_index, size=10, center_include=True, word_flag=True)
  113. # print(_span)
  114. return encodeInput(_span, word_len=30, word_flag=True,userFool=False)
  115. return embedding_word(_span,shape=(3,100,60))
  116. def predict(self,x):
  117. # print("shape",np.shape(x))
  118. x = np.transpose(np.array(x),(1,0,2))
  119. model_money = self.getModel()
  120. assert len(x)==len(model_money[0])
  121. feed_dict = {}
  122. for _x,_t in zip(x,model_money[0]):
  123. feed_dict[_t] = _x
  124. list_result = limitRun(self.sess_money,[model_money[1]],feed_dict)[0]
  125. #return self.sess_money.run(model_money[1],feed_dict=feed_dict)
  126. return list_result
  127. '''
  128. with self.graph.as_default():
  129. return self.getModel().predict([x[0],x[1],x[2]])
  130. '''
  131. from itertools import groupby
  132. from BiddingKG.dl.relation_extraction.model import get_words_matrix
  133. class Model_relation_extraction():
  134. def __init__(self,lazyLoad=getLazyLoad()):
  135. if USE_PAI_EAS:
  136. lazyLoad = True
  137. self.subject_model_file = os.path.dirname(__file__)+"/../relation_extraction/models2/subject_model"
  138. self.object_model_file = os.path.dirname(__file__)+"/../relation_extraction/models2/object_model"
  139. self.model_subject = None
  140. self.model_object = None
  141. self.sess_subject = tf.Session(graph=tf.Graph())
  142. self.sess_object = tf.Session(graph=tf.Graph())
  143. if not lazyLoad:
  144. self.getModel1()
  145. self.getModel2()
  146. self.entity_type_dict = {
  147. 'org': '<company/org>',
  148. 'company': '<company/org>',
  149. 'location': '<location>',
  150. 'phone': '<phone>',
  151. 'person': '<contact_person>'
  152. }
  153. self.id2predicate = {
  154. 0: "rel_person", # 公司——联系人
  155. 1: "rel_phone", # 联系人——电话
  156. 2: "rel_address" # 公司——地址
  157. }
  158. self.words_size = 128
  159. # subject_model
  160. def getModel1(self):
  161. if self.model_subject is None:
  162. with self.sess_subject.as_default() as sess:
  163. with sess.graph.as_default():
  164. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=self.subject_model_file)
  165. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  166. signature_def = meta_graph_def.signature_def
  167. input0 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input0"].name)
  168. input1 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input1"].name)
  169. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  170. self.model_subject = [[input0,input1],output]
  171. return self.model_subject
  172. # object_model
  173. def getModel2(self):
  174. if self.model_object is None:
  175. with self.sess_object.as_default() as sess:
  176. with sess.graph.as_default():
  177. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=self.object_model_file)
  178. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  179. signature_def = meta_graph_def.signature_def
  180. input0 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input0"].name)
  181. input1 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input1"].name)
  182. input2 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input2"].name)
  183. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  184. self.model_object = [[input0,input1,input2],output]
  185. return self.model_object
  186. def encode(self,entity_list,list_sentence):
  187. list_sentence = sorted(list_sentence, key=lambda x: x.sentence_index)
  188. entity_list = sorted(entity_list, key=lambda x: (x.sentence_index, x.begin_index))
  189. pre_data = []
  190. text_data = []
  191. last_sentence_index = -1
  192. for key, group in groupby(entity_list, key=lambda x: x.sentence_index):
  193. if key - last_sentence_index > 1:
  194. for i in range(last_sentence_index + 1, key):
  195. pre_data.extend(list_sentence[i].tokens)
  196. text_data.extend([0] * len(list_sentence[i].tokens))
  197. group = list(group)
  198. for i in range(len(group)):
  199. ent = group[i]
  200. _tokens = list_sentence[key].tokens
  201. if i == len(group) - 1:
  202. if i == 0:
  203. pre_data.extend(_tokens[:ent.begin_index])
  204. text_data.extend([0] * len(_tokens[:ent.begin_index]))
  205. pre_data.append(self.entity_type_dict[ent.entity_type])
  206. text_data.append(ent)
  207. pre_data.extend(_tokens[ent.end_index + 1:])
  208. text_data.extend([0] * len(_tokens[ent.end_index + 1:]))
  209. break
  210. else:
  211. pre_data.append(self.entity_type_dict[ent.entity_type])
  212. text_data.append(ent)
  213. pre_data.extend(_tokens[ent.end_index + 1:])
  214. text_data.extend([0] * len(_tokens[ent.end_index + 1:]))
  215. break
  216. if i == 0:
  217. pre_data.extend(_tokens[:ent.begin_index])
  218. text_data.extend([0] * len(_tokens[:ent.begin_index]))
  219. pre_data.append(self.entity_type_dict[ent.entity_type])
  220. text_data.append(ent)
  221. pre_data.extend(_tokens[ent.end_index + 1:group[i + 1].begin_index])
  222. text_data.extend([0] * len(_tokens[ent.end_index + 1:group[i + 1].begin_index]))
  223. else:
  224. pre_data.append(self.entity_type_dict[ent.entity_type])
  225. text_data.append(ent)
  226. pre_data.extend(_tokens[ent.end_index + 1:group[i + 1].begin_index])
  227. text_data.extend([0] * len(_tokens[ent.end_index + 1:group[i + 1].begin_index]))
  228. last_sentence_index = key
  229. return text_data, pre_data
  230. def predict(self,text_in, words, rate=0.5):
  231. # text_words = text_in
  232. triple_list = []
  233. # print("tokens:",words)
  234. # _t2 = [self.words2id.get(c, 1) for c in words]
  235. _t2 = np.zeros((len(words), self.words_size))
  236. for i in range(len(words)):
  237. _t2[i] = np.array(get_words_matrix(words[i]))
  238. _t2 = np.array([_t2])
  239. _t3 = [1 for _ in words]
  240. _t3 = np.array([_t3])
  241. # _k1 = self.model_subject.predict([_t2, _t3])
  242. _k1 = limitRun(self.sess_subject,[self.model_subject[1]],feed_dict={self.model_subject[0][0]:_t2,
  243. self.model_subject[0][1]:_t3})[0]
  244. _k1 = _k1[0, :, 0]
  245. _k1 = np.where(_k1 > rate)[0]
  246. # print('k1',_k1)
  247. _subjects = []
  248. for i in _k1:
  249. _subject = text_in[i]
  250. _subjects.append((_subject, i, i))
  251. if _subjects:
  252. _t2 = np.repeat(_t2, len(_subjects), 0)
  253. _t3 = np.repeat(_t3, len(_subjects), 0)
  254. _k1, _ = np.array([_s[1:] for _s in _subjects]).T.reshape((2, -1, 1))
  255. # _o1 = self.model_object.predict([_t2, _t3, _k1])
  256. _o1 = limitRun(self.sess_object, [self.model_object[1]], feed_dict={self.model_object[0][0]: _t2,
  257. self.model_object[0][1]: _t3,
  258. self.model_object[0][2]: _k1})[0]
  259. for i, _subject in enumerate(_subjects):
  260. _oo1 = np.where(_o1[i] > 0.5)
  261. # print('_oo1', _oo1)
  262. for _ooo1, _c1 in zip(*_oo1):
  263. _object = text_in[_ooo1]
  264. _predicate = self.id2predicate[_c1]
  265. triple_list.append((_subject[0], _predicate, _object))
  266. # print([(t[0].entity_text,t[1],t[2].entity_text) for t in triple_list])
  267. return triple_list
  268. else:
  269. return []
  270. class Model_person_classify():
  271. def __init__(self,lazyLoad=getLazyLoad()):
  272. if USE_PAI_EAS:
  273. lazyLoad = True
  274. self.model_person_file = os.path.dirname(__file__)+"/../person/models/model_person.model.hdf5"
  275. self.model_person = None
  276. self.sess_person = tf.Session(graph=tf.Graph())
  277. if not lazyLoad:
  278. self.getModel()
  279. def getModel(self):
  280. if self.model_person is None:
  281. with self.sess_person.as_default() as sess:
  282. with sess.graph.as_default():
  283. # meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=os.path.dirname(__file__)+"/person_savedmodel_new")
  284. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir=os.path.dirname(__file__)+"/person_savedmodel_new_znj")
  285. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  286. signature_def = meta_graph_def.signature_def
  287. input0 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input0"].name)
  288. input1 = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["input1"].name)
  289. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  290. self.model_person = [[input0,input1],output]
  291. return self.model_person
  292. '''
  293. if self.model_person is None:
  294. self.model_person = models.load_model(self.model_person_file,custom_objects={'precision':precision,'recall':recall,'f1_score':f1_score})
  295. return self.model_person
  296. '''
  297. '''
  298. def load_weights(self):
  299. model = self.getModel()
  300. model.load_weights(self.model_person_file)
  301. '''
  302. def encode(self,tokens,begin_index,end_index,**kwargs):
  303. # return embedding(spanWindow(tokens=tokens,begin_index=begin_index,end_index=end_index,size=10),shape=(2,10,128))
  304. return embedding(spanWindow(tokens=tokens,begin_index=begin_index,end_index=end_index,size=20),shape=(2,20,128))
  305. def predict(self,x):
  306. x = np.transpose(np.array(x),(1,0,2,3))
  307. model_person = self.getModel()
  308. assert len(x)==len(model_person[0])
  309. feed_dict = {}
  310. for _x,_t in zip(x,model_person[0]):
  311. feed_dict[_t] = _x
  312. list_result = limitRun(self.sess_person,[model_person[1]],feed_dict)[0]
  313. return list_result
  314. #return self.sess_person.run(model_person[1],feed_dict=feed_dict)
  315. '''
  316. with self.graph.as_default():
  317. return self.getModel().predict([x[0],x[1]])
  318. '''
  319. class Model_form_line():
  320. def __init__(self,lazyLoad=getLazyLoad()):
  321. self.model_file = os.path.dirname(__file__)+"/../form/model/model_form.model - 副本.hdf5"
  322. self.model_form = None
  323. self.graph = tf.get_default_graph()
  324. if not lazyLoad:
  325. self.getModel()
  326. def getModel(self):
  327. if self.model_form is None:
  328. self.model_form = models.load_model(self.model_file,custom_objects={"precision":precision,"recall":recall,"f1_score":f1_score})
  329. return self.model_form
  330. def encode(self,data,shape=(100,60),expand=False,**kwargs):
  331. embedding = np.zeros(shape)
  332. word_model = getModel_word()
  333. for i in range(len(data)):
  334. if i>=shape[0]:
  335. break
  336. if data[i] in word_model.vocab:
  337. embedding[i] = word_model[data[i]]
  338. if expand:
  339. embedding = np.expand_dims(embedding,0)
  340. return embedding
  341. def predict(self,x):
  342. with self.graph.as_default():
  343. return self.getModel().predict(x)
  344. class Model_form_item():
  345. def __init__(self,lazyLoad=getLazyLoad()):
  346. self.model_file = os.path.dirname(__file__)+"/../form/log/ep039-loss0.038-val_loss0.064-f10.9783.h5"
  347. self.model_form = None
  348. self.sess_form = tf.Session(graph=tf.Graph())
  349. if not lazyLoad:
  350. self.getModel()
  351. def getModel(self):
  352. if self.model_form is None:
  353. with self.sess_form.as_default() as sess:
  354. with sess.graph.as_default():
  355. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir="%s/form_savedmodel"%(os.path.dirname(__file__)))
  356. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  357. signature_def = meta_graph_def.signature_def
  358. inputs = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["inputs"].name)
  359. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  360. self.model_form = [[inputs],output]
  361. return self.model_form
  362. '''
  363. if self.model_form is None:
  364. with self.graph.as_defalt():
  365. self.model_form = models.load_model(self.model_file,custom_objects={"precision":precision,"recall":recall,"f1_score":f1_score})
  366. return self.model_form
  367. '''
  368. def encode(self,data,**kwargs):
  369. return encodeInput([data], word_len=50, word_flag=True,userFool=False)[0]
  370. return encodeInput_form(data)
  371. def predict(self,x):
  372. model_form = self.getModel()
  373. list_result = limitRun(self.sess_form,[model_form[1]],feed_dict={model_form[0][0]:x})[0]
  374. return list_result
  375. #return self.sess_form.run(model_form[1],feed_dict={model_form[0][0]:x})
  376. '''
  377. with self.graph.as_default():
  378. return self.getModel().predict(x)
  379. '''
  380. class Model_form_context():
  381. def __init__(self,lazyLoad=getLazyLoad()):
  382. self.model_form = None
  383. self.sess_form = tf.Session(graph=tf.Graph())
  384. if not lazyLoad:
  385. self.getModel()
  386. def getModel(self):
  387. if self.model_form is None:
  388. with self.sess_form.as_default() as sess:
  389. with sess.graph.as_default():
  390. meta_graph_def = tf.saved_model.loader.load(sess,tags=["serve"],export_dir="%s/form_context_savedmodel"%(os.path.dirname(__file__)))
  391. signature_key = tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
  392. signature_def = meta_graph_def.signature_def
  393. inputs = sess.graph.get_tensor_by_name(signature_def[signature_key].inputs["inputs"].name)
  394. output = sess.graph.get_tensor_by_name(signature_def[signature_key].outputs["outputs"].name)
  395. self.model_form = [[inputs],output]
  396. return self.model_form
  397. '''
  398. if self.model_form is None:
  399. with self.graph.as_defalt():
  400. self.model_form = models.load_model(self.model_file,custom_objects={"precision":precision,"recall":recall,"f1_score":f1_score})
  401. return self.model_form
  402. '''
  403. def encode_table(self,inner_table,size=30):
  404. def encode_item(_table,i,j):
  405. _x = [_table[j-1][i-1],_table[j-1][i],_table[j-1][i+1],
  406. _table[j][i-1],_table[j][i],_table[j][i+1],
  407. _table[j+1][i-1],_table[j+1][i],_table[j+1][i+1]]
  408. e_x = [encodeInput_form(_temp[0],MAX_LEN=30) for _temp in _x]
  409. _label = _table[j][i][1]
  410. # print(_x)
  411. # print(_x[4],_label)
  412. return e_x,_label,_x
  413. def copytable(inner_table):
  414. table = []
  415. for line in inner_table:
  416. list_line = []
  417. for item in line:
  418. list_line.append([item[0][:size],item[1]])
  419. table.append(list_line)
  420. return table
  421. table = copytable(inner_table)
  422. padding = ["#"*30,0]
  423. width = len(table[0])
  424. height = len(table)
  425. table.insert(0,[padding for i in range(width)])
  426. table.append([padding for i in range(width)])
  427. for item in table:
  428. item.insert(0,padding.copy())
  429. item.append(padding.copy())
  430. data_x = []
  431. data_y = []
  432. data_text = []
  433. data_position = []
  434. for _i in range(1,width+1):
  435. for _j in range(1,height+1):
  436. _x,_y,_text = encode_item(table,_i,_j)
  437. data_x.append(_x)
  438. _label = [0,0]
  439. _label[_y] = 1
  440. data_y.append(_label)
  441. data_text.append(_text)
  442. data_position.append([_i-1,_j-1])
  443. # input = table[_j][_i][0]
  444. # item_y = [0,0]
  445. # item_y[table[_j][_i][1]] = 1
  446. # data_x.append(encodeInput([input], word_len=50, word_flag=True,userFool=False)[0])
  447. # data_y.append(item_y)
  448. return data_x,data_y,data_text,data_position
  449. def encode(self,inner_table,**kwargs):
  450. data_x,_,_,data_position = self.encode_table(inner_table)
  451. return data_x,data_position
  452. def predict(self,x):
  453. model_form = self.getModel()
  454. list_result = limitRun(self.sess_form,[model_form[1]],feed_dict={model_form[0][0]:x})[0]
  455. return list_result
  456. # class Model_form_item():
  457. # def __init__(self,lazyLoad=False):
  458. # self.model_file = os.path.dirname(__file__)+"/ep039-loss0.038-val_loss0.064-f10.9783.h5"
  459. # self.model_form = None
  460. #
  461. # if not lazyLoad:
  462. # self.getModel()
  463. # self.graph = tf.get_default_graph()
  464. #
  465. # def getModel(self):
  466. # if self.model_form is None:
  467. # self.model_form = models.load_model(self.model_file,custom_objects={"precision":precision,"recall":recall,"f1_score":f1_score})
  468. # return self.model_form
  469. #
  470. # def encode(self,data,**kwargs):
  471. #
  472. # return encodeInput_form(data)
  473. #
  474. # def predict(self,x):
  475. # with self.graph.as_default():
  476. # return self.getModel().predict(x)