import tensorflow as tf import sys import os import json import re import time import uuid from BiddingKG.dl.common.Utils import log import BiddingKG.dl.interface.predictor as predictor import BiddingKG.dl.interface.Preprocessing as Preprocessing import BiddingKG.dl.interface.getAttributes as getAttributes import BiddingKG.dl.entityLink.entityLink as entityLink import numpy as np import ctypes import inspect from threading import Thread import traceback os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"] = "" sys.path.append(os.path.abspath("..")) tf.app.flags.DEFINE_string("tables", "", "tables info") FLAGS = tf.app.flags.FLAGS print("tables:" + FLAGS.tables) tables = [FLAGS.tables] filename_queue = tf.train.string_input_producer(tables, num_epochs=1) reader = tf.TableRecordReader() key, value = reader.read(filename_queue) record_defaults = [[1.0], [1.0], [1.0], [1.0], ["Iris-virginica"]] col1, col2, col3, col4, col5 = tf.decode_csv(value, record_defaults = record_defaults) # line 9 and 10 can be written like below for short. It will be helpful when too many columns exist. # record_defaults = [[1.0]] * 4 + [["Iris-virginica"]] # value_list = tf.decode_csv(value, record_defaults = record_defaults) writer = tf.TableRecordWriter("odps://demo_show/tables/iris_output") write_to_table = writer.write([0, 1, 2, 3, 4], [col1, col2, col3, col4, col5]) # line 16 can be written like below for short. It will be helpful when too many columns exist. # write_to_table = writer.write(range(5), value_list) close_table = writer.close() init = tf.global_variables_initializer() with tf.Session() as sess: sess.run(init) sess.run(tf.local_variables_initializer()) coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(coord=coord) try: step = 0 while not coord.should_stop(): step += 1 sess.run(write_to_table) except tf.errors.OutOfRangeError: print('%d records copied' % step) finally: sess.run(close_table) coord.request_stop() coord.join(threads)