evaluates.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497
  1. #coding=utf-8
  2. # evaluate为该方法的入口函数,必须用这个名字
  3. from odps.udf import annotate
  4. from odps.distcache import get_cache_archive
  5. from odps.distcache import get_cache_file
  6. from odps.udf import BaseUDTF
  7. # 配置pandas依赖包
  8. def include_package_path(res_name):
  9. import os, sys
  10. archive_files = get_cache_archive(res_name)
  11. dir_names = sorted([os.path.dirname(os.path.normpath(f.name)) for f in archive_files
  12. if '.dist_info' not in f.name], key=lambda v: len(v))
  13. sys.path.append(dir_names[0])
  14. return os.path.dirname(dir_names[0])
  15. # 可能出现类似RuntimeError: xxx has been blocked by sandbox
  16. # 这是因为包含C的库,会被沙盘block,可设置set odps.isolation.session.enable = true
  17. def include_file(file_name):
  18. import os, sys
  19. so_file = get_cache_file(file_name)
  20. sys.path.append(os.path.dirname(os.path.abspath(so_file.name)))
  21. def include_so(file_name):
  22. import os, sys
  23. so_file = get_cache_file(file_name)
  24. with open(so_file.name, 'rb') as fp:
  25. content=fp.read()
  26. so = open(file_name, "wb")
  27. so.write(content)
  28. so.flush()
  29. so.close()
  30. #初始化业务数据包,由于上传限制,python版本以及archive解压包不统一等各种问题,需要手动导入
  31. def init_env(list_files,package_name):
  32. import os,sys
  33. if len(list_files)==1:
  34. so_file = get_cache_file(list_files[0])
  35. cmd_line = os.path.abspath(so_file.name)
  36. os.system("unzip -o %s -d %s"%(cmd_line,package_name))
  37. elif len(list_files)>1:
  38. cmd_line = "cat"
  39. for _file in list_files:
  40. so_file = get_cache_file(_file)
  41. cmd_line += " "+os.path.abspath(so_file.name)
  42. cmd_line += " > temp.zip"
  43. os.system(cmd_line)
  44. os.system("unzip -o temp.zip -d %s"%(package_name))
  45. # os.system("rm -rf %s/*.dist-info"%(package_name))
  46. # return os.listdir(os.path.abspath("local_package"))
  47. # os.system("echo export LD_LIBRARY_PATH=%s >> ~/.bashrc"%(os.path.abspath("local_package")))
  48. # os.system("source ~/.bashrc")
  49. sys.path.insert(0,os.path.abspath(package_name))
  50. # sys.path.append(os.path.join(os.path.abspath("local_package"),"interface_real"))
  51. # UDF主程序
  52. # 由于Series可能在多处调用,所以先在__init__中将其定义为全局类。
  53. @annotate("string->string")
  54. class JiebaCut(object):
  55. def __init__(self):
  56. # zip_01 = include_package_path('testB01.zip')
  57. # zip_02 = include_package_path('testB02.zip')
  58. # self.cat_cmd = "cat %s %s > %s"%(zip_01+"/files/*",zip_02+"/files/*","testH.zip")
  59. # import os
  60. import sys
  61. # os.system(self.cat_cmd)
  62. # self.out = str(os.path.getsize("testH.zip"))
  63. # # self.out = str(os.listdir(zip_01+"/files/testB01/"))
  64. # os.system("mkdir jieba_t")
  65. # os.system("unzip testH.zip -d jieba_t")
  66. # self.out = str(os.listdir("jieba_t"))
  67. # sys.path.append(".")
  68. # # sys.path.append(os.path.dirname(os.path.normpath("jieba_test")))
  69. # # import jieba_test
  70. # from jieba_t import cut
  71. include_package_path("jiebaA.zip")
  72. import jieba
  73. reload(sys)
  74. sys.setdefaultencoding('utf-8')
  75. global jieba
  76. def evaluate(self, x):
  77. import os
  78. # return self.out
  79. # return str(os.listdir("jieba_test"))
  80. # return self.cat_cmd
  81. return '--'.join(jieba.cut(x))
  82. @annotate("string->string")
  83. class Preprocess(BaseUDTF):
  84. def __init__(self):
  85. # init_env(["gensim_package.zip.env"],"local_package1")
  86. import sys
  87. import uuid
  88. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  89. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  90. self.out = include_package_path("envs_py37.env.zip")
  91. # self.out = init_env(["envs_py37.zip.env"],"local_package")
  92. self.out = init_env(["so.env"],".")
  93. import BiddingKG.dl.interface.predictor as predictor
  94. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  95. import BiddingKG.dl.entityLink.entityLink as entityLink
  96. import BiddingKG.dl.interface.getAttributes as getAttributes
  97. global Preprocessing,entityLink,predictor,uuid,getAttributes
  98. # import gensim
  99. # include_package_path("numpy.zip")
  100. # init_env(["tensorflow-1.14.0-cp37-cp37m-manylinux1_x86_64.whl"])
  101. # so_file = get_cache_file("tensorflow-1.14.0-cp37-cp37m-manylinux1_x86_64.whl")
  102. # import os
  103. # self.out = os.path.abspath(so_file.name)
  104. # import tensorflow
  105. def process(self, x):
  106. k = str(uuid.uuid4())
  107. list_articles = Preprocessing.get_preprocessed_articles([[k,x,"","_doc_id",""]],useselffool=True)
  108. self.forward(list_articles[0].toJson())
  109. # list_articles,list_sentences,list_entitys,_cost_time = Preprocessing.get_preprocessed([[k,x,"","_doc_id",""]],useselffool=True)
  110. #
  111. # codeName = predictor.getPredictor("codeName").predict(list_sentences,MAX_AREA=2000,list_entitys=list_entitys)
  112. #
  113. # predictor.getPredictor("prem").predict(list_sentences,list_entitys)
  114. #
  115. # predictor.getPredictor("roleRule").predict(list_articles,list_sentences, list_entitys,codeName)
  116. #
  117. # predictor.getPredictor("epc").predict(list_sentences,list_entitys)
  118. #
  119. # entityLink.link_entitys(list_entitys)
  120. #
  121. # prem = getAttributes.getPREMs(list_sentences,list_entitys,list_articles)
  122. # # return str(self.out)
  123. # return "1"
  124. # list_articles,list_sentences,list_entitys,_ = Preprocessing.get_articles_processed([["doc_id",x,"","",""]],useselffool=True)
  125. # if len(list_articles)==1:
  126. # json_article = list_articles[0]
  127. # self.forward(list_sentences[0][0].sentence_text)
  128. @annotate("string -> string,string")
  129. class Preprocess_article(BaseUDTF):
  130. def __init__(self):
  131. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  132. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  133. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  134. self.out = init_env(["envs_py37.zip.env"],"local_package")
  135. self.out = init_env(["so.env"],".")
  136. import uuid
  137. import BiddingKG.dl.interface.predictor as predictor
  138. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  139. import BiddingKG.dl.entityLink.entityLink as entityLink
  140. import BiddingKG.dl.interface.getAttributes as getAttributes
  141. global Preprocessing,entityLink,predictor,uuid,getAttributes
  142. def process(self, x):
  143. if x is not None:
  144. k = str(uuid.uuid4())
  145. list_articles = Preprocessing.get_preprocessed_article([[k,x,"","_doc_id",""]])
  146. self.forward(list_articles[0].id,list_articles[0].toJson())
  147. @annotate("string->string,string")
  148. class Preprocess_sentences(BaseUDTF):
  149. def __init__(self):
  150. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  151. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  152. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  153. self.out = init_env(["envs_py37.zip.env"],"local_package")
  154. self.out = init_env(["so.env"],".")
  155. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  156. import BiddingKG.dl.interface.Entitys as Entitys
  157. import json
  158. global Preprocessing,Entitys,json
  159. def process(self,x):
  160. _article = Entitys.Article.fromJson(x)
  161. list_sentences = Preprocessing.get_preprocessed_sentences([_article],True)
  162. list_out = []
  163. for _sentence in list_sentences[0]:
  164. list_out.append(_sentence.toJson())
  165. self.forward(_article.id,json.dumps(list_out))
  166. @annotate("string->string,string")
  167. class Preprocess_entitys(BaseUDTF):
  168. def __init__(self):
  169. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  170. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  171. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  172. self.out = init_env(["envs_py37.zip.env"],"local_package")
  173. self.out = init_env(["so.env"],".")
  174. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  175. import BiddingKG.dl.interface.Entitys as Entitys
  176. import json
  177. global Preprocessing,Entitys,json
  178. def process(self,x):
  179. list_sentence = []
  180. for _x in json.loads(x):
  181. list_sentence.append(Entitys.Sentences.fromJson(_x))
  182. list_out = []
  183. list_entitys = Preprocessing.get_preprocessed_entitys([list_sentence],True)
  184. for _entity in list_entitys[0]:
  185. list_out.append(_entity.toJson())
  186. self.forward(list_sentence[0].doc_id,json.dumps(list_out))
  187. @annotate("string->string,string")
  188. class Predict_codename(BaseUDTF):
  189. def __init__(self):
  190. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  191. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  192. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  193. self.out = init_env(["envs_py37.zip.env"],"local_package")
  194. self.out = init_env(["so.env"],".")
  195. import BiddingKG.dl.interface.predictor as predictor
  196. import BiddingKG.dl.interface.Entitys as Entitys
  197. import json
  198. global predictor,Entitys,json
  199. def process(self,x):
  200. list_sentence = []
  201. for _x in json.loads(x):
  202. list_sentence.append(Entitys.Sentences.fromJson(_x))
  203. codename = predictor.getPredictor("codeName").predict([list_sentence],MAX_AREA=2000)
  204. self.forward(codename[0][0],json.dumps(codename[0]))
  205. @annotate("string,string->string,string")
  206. class Predict_role(BaseUDTF):
  207. def __init__(self):
  208. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  209. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  210. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  211. self.out = init_env(["envs_py37.zip.env"],"local_package")
  212. self.out = init_env(["so.env"],".")
  213. import BiddingKG.dl.interface.predictor as predictor
  214. import BiddingKG.dl.interface.Entitys as Entitys
  215. import json
  216. global predictor,Entitys,json
  217. def process(self,x,y):
  218. list_sentence = []
  219. list_entity = []
  220. for _x in json.loads(x):
  221. list_sentence.append(Entitys.Sentences.fromJson(_x))
  222. for _y in json.loads(y):
  223. list_entity.append(Entitys.Entity.fromJson(_y))
  224. predictor.getPredictor("prem").predict_role([list_sentence],[list_entity])
  225. list_out = []
  226. for _entity in list_entity:
  227. if _entity.label is not None:
  228. list_out.append(_entity.toJson())
  229. self.forward(list_sentence[0].doc_id,json.dumps(list_out))
  230. @annotate("string,string->string,string")
  231. class Predict_money(BaseUDTF):
  232. def __init__(self):
  233. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  234. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  235. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  236. self.out = init_env(["envs_py37.zip.env"],"local_package")
  237. self.out = init_env(["so.env"],".")
  238. import BiddingKG.dl.interface.predictor as predictor
  239. import BiddingKG.dl.interface.Entitys as Entitys
  240. import json
  241. global predictor,Entitys,json
  242. def process(self,x,y):
  243. list_sentence = []
  244. list_entity = []
  245. for _x in json.loads(x):
  246. list_sentence.append(Entitys.Sentences.fromJson(_x))
  247. for _y in json.loads(y):
  248. list_entity.append(Entitys.Entity.fromJson(_y))
  249. predictor.getPredictor("prem").predict_money([list_sentence],[list_entity])
  250. list_out = []
  251. for _entity in list_entity:
  252. if _entity.label is not None:
  253. list_out.append(_entity.toJson())
  254. self.forward(list_sentence[0].doc_id,json.dumps(list_out))
  255. @annotate("string,string->string,string")
  256. class Predict_person(BaseUDTF):
  257. def __init__(self):
  258. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  259. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  260. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  261. self.out = init_env(["envs_py37.zip.env"],"local_package")
  262. self.out = init_env(["so.env"],".")
  263. import BiddingKG.dl.interface.predictor as predictor
  264. import BiddingKG.dl.interface.Entitys as Entitys
  265. import json
  266. global predictor,Entitys,json
  267. def process(self,x,y):
  268. list_sentence = []
  269. list_entity = []
  270. for _x in json.loads(x):
  271. list_sentence.append(Entitys.Sentences.fromJson(_x))
  272. for _y in json.loads(y):
  273. list_entity.append(Entitys.Entity.fromJson(_y))
  274. predictor.getPredictor("epc").predict_person([list_sentence],[list_entity])
  275. list_out = []
  276. for _entity in list_entity:
  277. if _entity.label is not None:
  278. list_out.append(_entity.toJson())
  279. self.forward(list_sentence[0].doc_id,json.dumps(list_out))
  280. @annotate("string,string,string,string,string,string,string->string,string,string")
  281. class ContentUnion(BaseUDTF):
  282. def __init__(self):
  283. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  284. self.out = init_env(["BiddingKG.zip.env"],"local_package")
  285. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  286. self.out = init_env(["envs_py37.zip.env"],"local_package")
  287. self.out = init_env(["so.env"],".")
  288. import BiddingKG.dl.interface.predictor as predictor
  289. import BiddingKG.dl.interface.Entitys as Entitys
  290. import BiddingKG.dl.interface.getAttributes as getAttributes
  291. import BiddingKG.dl.entityLink.entityLink as entityLink
  292. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  293. import json
  294. global predictor,Entitys,getAttributes,entityLink,json,MyEncoder,Preprocessing
  295. #自定义jsonEncoder
  296. class MyEncoder(json.JSONEncoder):
  297. def __init__(self):
  298. import numpy as np
  299. global np
  300. def default(self, obj):
  301. if isinstance(obj, np.ndarray):
  302. return obj.tolist()
  303. elif isinstance(obj, bytes):
  304. return str(obj, encoding='utf-8')
  305. elif isinstance(obj, (np.float_, np.float16, np.float32,
  306. np.float64)):
  307. return float(obj)
  308. return json.JSONEncoder.default(self, obj)
  309. def process(self,json_article,list_json_sentence,list_json_entity,list_json_entity_role,list_json_entity_money,list_json_entity_person,json_codename):
  310. dict_entity = {}
  311. list_sentence = []
  312. list_entity = []
  313. _article = Entitys.Article.fromJson(json_article)
  314. for list_json in [list_json_entity_role,list_json_entity_money,list_json_entity_person]:
  315. for _json_entity in json.loads(list_json):
  316. _entity = Entitys.Entity.fromJson(_json_entity)
  317. _key = "%s-%s-%s"%(str(_entity.doc_id),str(_entity.entity_id),str(_entity.entity_type))
  318. dict_entity[_key] = _entity
  319. for _json_sentence in json.loads(list_json_sentence):
  320. list_sentence.append(Entitys.Sentences.fromJson(_json_sentence))
  321. for _json_entity in json.loads(list_json_entity):
  322. _entity = Entitys.Entity.fromJson(_json_entity)
  323. _key = "%s-%s-%s"%(str(_entity.doc_id),str(_entity.entity_id),str(_entity.entity_type))
  324. if _key in dict_entity:
  325. list_entity.append(dict_entity[_key])
  326. else:
  327. list_entity.append(_entity)
  328. codeName = json.loads(json_codename)
  329. predictor.getPredictor("roleRule").predict([_article],[list_sentence], [list_entity],[codeName])
  330. entityLink.link_entitys([list_entity])
  331. prem = getAttributes.getPREMs([list_sentence],[list_entity],[_article])
  332. # result = json.dumps(Preprocessing.union_result([codeName], prem)[0][1],cls=MyEncoder,sort_keys=True,indent=4,ensure_ascii=False)
  333. result = json.dumps(Preprocessing.union_result([codeName], prem)[0][1],ensure_ascii=False)
  334. self.forward(_article.id,_article.doc_id,result)
  335. @annotate("string,bigint,string,string->string,bigint,string")
  336. class Extract(BaseUDTF):
  337. def __init__(self):
  338. # self.out = init_env(["BiddingKG.z01","BiddingKG.z02"],"local_package")
  339. import uuid
  340. global uuid
  341. import logging
  342. import datetime
  343. logging.basicConfig(level = logging.INFO,format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
  344. logging.info("time1"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  345. self.out = init_env(["BiddingKG.zip.env"],str(uuid.uuid4()))
  346. logging.info("time2"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  347. self.out = init_env(["wiki_128_word_embedding_new.vector.env"],".")
  348. logging.info("time3"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  349. # self.out = init_env(["envs_py37.zip.env"],str(uuid.uuid4()))
  350. self.out = include_package_path("envs_py37.env.zip")
  351. logging.info("time4"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  352. self.out = init_env(["so.env"],".")
  353. logging.info("time5"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  354. import BiddingKG.dl.interface.predictor as predictor
  355. logging.info("time6"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  356. import BiddingKG.dl.interface.Entitys as Entitys
  357. logging.info("time6.1"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  358. import BiddingKG.dl.interface.getAttributes as getAttributes
  359. logging.info("time6.2"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  360. import BiddingKG.dl.entityLink.entityLink as entityLink
  361. logging.info("time6.2"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  362. import BiddingKG.dl.interface.Preprocessing as Preprocessing
  363. logging.info("time6.3"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  364. import json
  365. import time
  366. from BiddingKG.dl.common.Utils import log
  367. logging.info("time7"+str(datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S')))
  368. import numpy as np
  369. global predictor,Entitys,getAttributes,entityLink,json,MyEncoder,Preprocessing,time,log,MyEncoder,np
  370. class MyEncoder(json.JSONEncoder):
  371. def default(self, obj):
  372. if isinstance(obj, np.ndarray):
  373. return obj.tolist()
  374. elif isinstance(obj, bytes):
  375. return str(obj, encoding='utf-8')
  376. elif isinstance(obj, (np.float_, np.float16, np.float32,
  377. np.float64)):
  378. return float(obj)
  379. elif isinstance(obj,(np.int64)):
  380. return int(obj)
  381. return json.JSONEncoder.default(self, obj)
  382. def process(self,content,_doc_id,_title,page_time):
  383. if content is not None and _doc_id not in [105677700,126694044,126795572,126951461]:
  384. k = str(uuid.uuid4())
  385. cost_time = dict()
  386. start_time = time.time()
  387. log("start process doc %s"%(str(_doc_id)))
  388. try:
  389. list_articles,list_sentences,list_entitys,_cost_time = Preprocessing.get_preprocessed([[k,content,"",str(_doc_id),str(_title)]],useselffool=True)
  390. log("get preprocessed done of doc_id%s"%(_doc_id))
  391. cost_time["preprocess"] = time.time()-start_time
  392. cost_time.update(_cost_time)
  393. '''
  394. for articles in list_articles:
  395. print(articles.content)
  396. '''
  397. start_time = time.time()
  398. codeName = predictor.getPredictor("codeName").predict(list_sentences,MAX_AREA=2000,list_entitys=list_entitys)
  399. log("get codename done of doc_id%s"%(_doc_id))
  400. cost_time["codename"] = time.time()-start_time
  401. start_time = time.time()
  402. predictor.getPredictor("prem").predict(list_sentences,list_entitys)
  403. log("get prem done of doc_id%s"%(_doc_id))
  404. cost_time["prem"] = time.time()-start_time
  405. start_time = time.time()
  406. predictor.getPredictor("roleRule").predict(list_articles,list_sentences, list_entitys,codeName)
  407. cost_time["rule"] = time.time()-start_time
  408. start_time = time.time()
  409. predictor.getPredictor("epc").predict(list_sentences,list_entitys)
  410. log("get epc done of doc_id%s"%(_doc_id))
  411. cost_time["person"] = time.time()-start_time
  412. start_time = time.time()
  413. entityLink.link_entitys(list_entitys)
  414. '''
  415. for list_entity in list_entitys:
  416. for _entity in list_entity:
  417. for _ent in _entity.linked_entitys:
  418. print(_entity.entity_text,_ent.entity_text)
  419. '''
  420. prem = getAttributes.getPREMs(list_sentences,list_entitys,list_articles)
  421. log("get attributes done of doc_id%s"%(_doc_id))
  422. cost_time["attrs"] = time.time()-start_time
  423. #print(prem)
  424. data_res = Preprocessing.union_result(codeName, prem)[0][1]
  425. data_res["cost_time"] = cost_time
  426. data_res["success"] = True
  427. _article = list_articles[0]
  428. self.forward(page_time,int(_article.doc_id),json.dumps(data_res,cls=MyEncoder,ensure_ascii=False))
  429. except Exception as e:
  430. log("%s===error docid:%s"%(str(e),str(_doc_id)))