模块'tensorflow.app'没有属性'flags' [英] Module 'tensorflow.app' has no attribute 'flags'

查看:832
本文介绍了模块'tensorflow.app'没有属性'flags'的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

#单手而且图片中没有身体部位
#========================= =============================

从tens.nets导入导入张量流为tf
cpm_hand_slim
从utils import导入numpy为np
cpm_utils
导入cv2
导入时间
导入数学
导入sys

参数

FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('DEMO_TYPE',
#default_value ='test_imgs / roger .png',
default_value ='test_imgs / longhand.jpg',
#default_value ='SINGLE',
docstring ='MULTI:显示多个阶段,'
'单身:只有最后阶段,'
'HM:显示最后阶段热图,'
'到.jpg或.png图像的路径')
tf.app.flags.DEFINE_string('model_path',
default_value ='models / weights / cpm_hand.pkl',
docstring ='你的模型')
tf.app.flags.DEFINE_integer('input_size',
default_value = 368 ,
docstring ='输入图像大小')
tf.app.flags.DEFINE_integer('hmap_size',
default_value = 46,
docstring ='输出热图大小')
tf.app.flags.DEFINE_integer('cmap_radius',
default_value = 21,
docstring ='中心地图高斯方差')
tf.app.flags.DEFINE_integer('joint ',
default_value = 21,
docstring ='关节数')
tf.app.flags.DEFINE_integer('阶段',
default_value = 6,
docstring ='CPM阶段数')
tf.app.flags.DEFINE_integer('cam_num',
default_value = 0,
docstring ='网络摄像头设备号')
tf.app.flags.DEFINE_bool('KALMAN_ON',
default_value = True,
docstring ='enalbe kalman filter')
tf.app.flags.DEFINE_float('kalman_noise',
default_value = 3e-2,
docstring = '卡尔曼滤波器噪声值')
tf.app.flags.DEFINE_string('color_channel',
default_value ='RGB',
docstring ='')

#设置每个手指的颜色
joint_color_code = [[139,53,255],
[0,56,255],
[43,41,237],
[ 37,168,36],
[147,147,0],
[70,17,145]]


limbs = [[0,1] ],
[1 ,2],
[2,3],
[3,4],
[0,5],
[5,6],
[6] ,7],
[7,8],
[0,9],
[9,10],
[10,11],
[11] ,12],
[0,13],
[13,14],
[14,15],
[15,16],
[0 ,17],
[17,18],
[18,19],
[19,20]
]

if sys.version_info .major == 3:
PYTHON_VERSION = 3
else:
PYTHON_VERSION = 2


def main(argv):
tf_device = '/ gpu:0'
with tf.device(tf_device):
build graph

if FLAGS.color_channel =='RGB':
input_data = tf.placeholder(dtype = tf.float32,shape = [None,FLAGS.input_size,FLAGS.input_size,3],
name ='input_image')
else:
input_data = tf.placeholder(dtype = t f.float32,shape = [None,FLAGS.input_size,FLAGS.input_size,1],
name ='input_image')

center_map = tf.placeholder(dtype = tf.float32, shape = [None,FLAGS.input_size,FLAGS.input_size,1],
name ='center_map')

model = cpm_hand_slim.CPM_Model(FLAGS.stages,FLAGS.joints + 1)
model.build_model(input_data,center_map,1)

saver = tf.train.Saver()

创建会话并恢复权重

sess = tf.Session()

sess.run(tf.global_variables_initializer())
if FLAGS.model_path.endswith('pkl'):
model.load_weights_from_file(FLAGS.model_path,sess,False)
else:
saver.restore(sess,FLAGS.model_path)

test_center_map = cpm_utils.gaussian_img(FLAGS .input_size,FLAGS.input_size,FLAGS.input_size / 2,
FLAGS.input_size / 2,
FLAGS.cmap_radius)
test_center_map = np.reshape(test_center_map,[1,FLAGS.input_size,FLAGS.input_size,1])$ ​​b
$ b#检查权重
for tf.trainable_variables()中的变量:
with tf.variable_scope('',reuse = True):
var = tf.get_variable(variable.name.split(':0) ')[0])
print(variable.name,np.mean(sess.run(var)))

如果不是FLAGS.DEMO_TYPE.endswith(('png',' jpg')):
cam = cv2.VideoCapture(FLAGS.cam_num)

#创建kalman过滤器
如果FLAGS.KALMAN_ON:
kalman_filter_array = [cv2.KalmanFilter (4,2)for _ in range(FLAGS.joints)]
for _,joint_kalman_filter in enumerate(kalman_filter_array):
joint_kalman_filter.transitionMatrix = np.array([[1,0,1,0} ],[0,1,0,1],[0,0,1,0],[0,0,0,1]],
np.float32)
joint_kalman_filter.measurementMatrix = np.array([[1,0,0,0,[0,1,0,0]],np.float32)
joint_kalman_filter.processNoiseCov = np.array([[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]],
np.float32)* FLAGS.kalman_noise
else:
kalman_filter_array = None

with tf.device(tf_device):

而True:
t1 = time.time()
如果FLAGS.DEMO_TYPE.endswith(('png','jpg')):
test_img = cpm_utils.read_image(FLAGS.DEMO_TYPE,[],FLAGS。 input_size,'IMAGE')
else:
test_img = cpm_utils.read_image([],cam,FLAGS.input_size,'WEBCAM')

test_img_resize = cv2.resize(test_img ,(FLAGS.input_size,FLAGS.input_size))
print('img read time%f'%(time.time() - t1))

if FLA GS.color_channel =='GRAY':
test_img_resize = np.dot(test_img_resize [...,:3],[0.299,0.587,0.114])。reshape(
(FLAGS.input_size,FLAGS) .input_size,1))
cv2.imshow('color',test_img.astype(np.uint8))
cv2.imshow('gray',test_img_resize.astype(np.uint8))
cv2.waitKey(1)

test_img_input = test_img_resize / 256.0 - 0.5
test_img_input = np.expand_dims(test_img_input,axis = 0)


如果FLAGS.DEMO_TYPE.endswith(('png','jpg')):
#推理
t1 = time.time()
predict_heatmap,stage_heatmap_np = sess.run([model] .current_heatmap,
model.stage_heatmap,
],
fe ed_dict = {'input_image:0':test_img_input,
'center_map:0':test_center_map})

#显示可视化图像
demo_img = visualize_result(test_img,FLAGS,stage_heatmap_np, kalman_filter_array)
cv2.imshow('demo_img',demo_img.astype(np.uint8))
if cv2.waitKey(0)== ord('q'):break
print( 'fps:%。2f'%(1 /(time.time() - t1)))
elif FLAGS.DEMO_TYPE =='MULTI':

#推理
t1 = time.time()
predict_heatmap,stage_heatmap_np = sess.run([model.current_heatmap,
model.stage_heatmap,
],
feed_dict = {'input_image:0' :test_img_input,
'center_map:0':test_center_map})

#显示可视化图像
demo_img = visualize_result(test_img,FLAGS,stage_heatmap_np,kalman_filter_array)
cv2。 imshow('demo_img',demo_img.astype(np.uint8))
if cv2.waitKey(1)== ord('q'):break
print('fps:%。2f'% (1 /(time.time() - t1)))


elif FLAGS.DEMO_TYPE =='SINGLE':

#推理
t1 = time.time()
stage_heatmap_np = sess.run([model.stage_heatmap [5]],
feed_dict = {'input_image:0':test_img_input,
'center_map:0' :test_center_map})

#显示可视化图像
demo_img = visual ize_result(test_img,FLAGS,stage_heatmap_np,kalman_filter_array)
cv2.imshow('current heatmap',(demo_img).astype(np.uint8))
if cv2.waitKey(1)== ord(' q'):打破
打印('fps:%。2f'%(1 /(time.time() - t1)))


elif FLAGS.DEMO_TYPE = ='HM':

#推理
t1 = time.time()
stage_heatmap_np = sess.run([model.stage_heatmap [FLAGS.stages - 1]],
feed_dict = {'input_image:0':test_img_input,
'center_map:0':test_center_map})
print('fps:%。2f'%(1 /(time.time() - t1)))

demo_stage_heatmap = stage_heatmap_np [len(stage_heatmap_np) - 1] [0,:,,0:FLAGS.joints] .reshape(
(FLAGS.hmap_size,FLAGS) .hmap_size,FLAGS.joints))
demo_stage_heatmap = cv2.resize(demo_stage_heatmap,(FLAGS.input_size,FLAGS.input_size))

vertical_imgs = []
tmp_img =无
joint_coord_set = np.zeros((FLAGS。关节,2))

为范围内的joint_num(FLAGS.joints):
#Concat直到4 img
if(joint_num%4)== 0和joint_num!= 0 :
vertical_imgs.append(tmp_img)
tmp_img =无

demo_stage_heatmap [:,:,joint_num] * =(255 / np.max(demo_stage_heatmap [:,:,joint_num] ]))

#绘制颜色关节
如果np.min(demo_stage_heatmap [:,:,joint_num])> -50:
joint_coord = np.unravel_index(np.argmax(demo_stage_heatmap [:,:,joint_num]),
(FLAGS.input_size,FLAGS.input_size))
joint_coord_set [joint_num,: ] = joint_coord
color_code_num =(joint_num // 4)

如果[0,4,8,12,16]中的joint_num:
如果PYTHON_VERSION == 3:
joint_color = list(
map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num])

cv2.circle(test_img,center =(joint_coord [1],joint_coord [0]),radius = 3,color = joint_color,
厚度= -1)
else:
如果PYTHON_VERSION == 3:
joint_color = list(
map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num])

cv2.circle(test_img,center =(joint_coord [1 ],joint_coord [0]),radius = 3,color = joint_color,
thickness = -1)

#放置文本
tmp = demo_stage_heatmap [:,:,joint_num] .astype(np.uint8)
tmp = cv2.putText(tmp,'Min:'+ str(np.min(demo_stage_heatmap [:,:,joint_num])),
org =(5, 20),fontFace = cv2.FONT_HERSHEY_COMPLEX,fontScale = 0.3,color = 150)
tmp = cv2.putText(tmp,'Mean:'+ str(np.mean(demo_stage_heatmap [:,:,joint_num])),
org =(5,30),fontFace = cv2.FONT_HERSHEY_COMPLEX,fontScale = 0.3, color = 150)
tmp_img = np.concatenate((tmp_img,tmp),axis = 0)\
如果tmp_img不是别的tmp

#Plot limbs $ b范围内的limb_num为$ b(len(肢)):如果np.min(demo_stage_heatmap [:,:,limbs [limb_num] [0]])>则为
-2000和np.min(
demo_stage_heatmap [:,:,肢体[limb_num] [1]])> -2000:
x1 = joint_coord_set [limbs [limb_num] [0],0]
y1 = joint_coord_set [limbs [limb_num] [0],1]
x2 = joint_coord_set [肢体[limb_num] ] [1],0]
y2 = joint_coord_set [肢体[limb_num] [1],1]
长度=((x1 - x2)** 2 +(y1 - y2)** 2) **如果长度< 0.5
10000和长度> 5:
deg = math.degrees(math.atan2(x1 - x2,y1 - y2))
polygon = cv2.ellipse2Poly((int((y1 + y2)/ 2),int(( x1 + x2)/ 2)),
(int(length / 2),3),
int(deg),
0,360,1)
color_code_num = limb_num // 4
如果PYTHON_VERSION == 3:
limb_color = list(
map(lambda x:x + 35 *(limb_num%4),joint_color_code [color_code_num]))
else:
limb_color = map(lambda x:x + 35 *(limb_num%4),joint_color_code [color_code_num])

cv2.fillConvexPoly(test_img,polygon,color = limb_color)$ b如果tmp_img是
$ b不是:
tmp_img = np.lib.pad(tmp_img,((0,vertical_imgs [0] .shape [0] - tmp_img.shape [0]),(0,0)),
'constant',constant_values =(0,0))
vertical_imgs.append(tmp_img)

#Corcat水平
output_img =无
范围内的col(len (vertical_imgs)):
output_img = np.concatenate((output_img,vertical_imgs [col]),axis = 1)如果output_img不是None否则\
vertical_imgs [col]

output_img = output_img.astype(np.uint8)
output_img = cv2.applyColorMap(output_img,cv2.COLORMAP_JET)
test_img = cv2.resize(test_img,(300,300),cv2.INTER_LANCZOS4)
cv2.imshow('hm',output_img)
cv2.moveWindow('hm',2000,200)
cv2.imshow('rgb',test_img)
cv2.moveWindow('rgb',2000,750)
if cv2.waitKey(1)== ord('q'):break


def visualize_result(test_img, FLAGS,stage_heatmap_np,kalman_filter_array):
t1 = time.time()
demo_stage_heatmaps = []
if FLAGS.DEMO_TYPE =='MULTI':
for range in range(len (stage_heatmap_np)):
demo_stage_heatmap = stage_heatmap_np [stage] [0,:,:0:FLAGS.joints] .reshape(
(FLAGS.hmap_size,FLAGS.hmap_size,FLAGS.joints))
demo_stage_heatmap = cv2.resize(demo_stage_heatmap,(test_img.shape [1],test_img.shape [0]))
demo_stage_heatmap = np.amax(demo_stage_heatmap,axis = 2)
demo_stage_heatmap = np .reshape(demo_stage_heatmap,(test_img.shape [1],test_img.shape [0],1))
demo_stage_heatmap = np.repeat(demo_stage_heatmap,3,axis = 2)
demo_stage_heatmap * = 255
demo_stage_heatmaps.append(dem o_stage_heatmap)

last_heatmap = stage_heatmap_np [len(stage_heatmap_np) - 1] [0,:,,0:FLAGS.joints] .reshape(
(FLAGS.hmap_size,FLAGS.hmap_size, FLAGS.joints))
last_heatmap = cv2.resize(last_heatmap,(test_img.shape [1],test_img.shape [0]))
else:
last_heatmap = stage_heatmap_np [len(stage_heatmap_np) ) - 1] [0,:,:0:FLAGS.joints] .reshape(
(FLAGS.hmap_size,FLAGS.hmap_size,FLAGS.joints))
last_heatmap = cv2.resize(last_heatmap, (test_img.shape [1],test_img.shape [0]))
print('hm resize time%f'%(time.time() - t1))

t1 = time.time()
joint_coord_set = np.zeros((FLAGS.joints,2))

#绘制联合颜色
如果kalman_filter_array不是None:
for范围内的joint_num(FLAGS.joints):
joint_coord = np.unravel_index(np.argmax(last_heatmap [:,:,joint_num]),
(test_img.sha) pe [0],test_img.shape [1]))
joint_coord = np.array(joint_coord).reshape((2,1))。astype(np.float32)
kalman_filter_array [joint_num]。 correct(joint_coord)
kalman_pred = kalman_filter_array [joint_num] .predict()
joint_coord_set [joint_num,:] = np.array([kalman_pred [0],kalman_pred [1]])。reshape((2 ))

color_code_num =(joint_num // 4)
如果[0,4,8,12,16]中的joint_num:
如果PYTHON_VERSION == 3:
joint_color = list(map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%) 4),joint_color_code [color_code_num])

cv2.circle(test_img,center =(joint_coord [1],joint_coord [0]),radius = 3,color = joint_color,thickness = -1)
else:
如果PYTHON_VERSION == 3:
j oint_color = list(map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%4) ),joint_color_code [color_code_num])

cv2.circle(test_img,center =(joint_coord [1],joint_coord [0]),radius = 3,color = joint_color,thickness = -1)
else:
表示范围内的joint_num(FLAGS.joints):
joint_coord = np.unravel_index(np.argmax(last_heatmap [:,:,joint_num]),
(test_img.shape) [0],test_img.shape [1]))
joint_coord_set [joint_num,:] = [joint_coord [0],joint_coord [1]]

color_code_num =(joint_num // 4)
如果[0,4,8,12,16]中的joint_num:
如果PYTHON_VERSION == 3:
joint_color = list(map(lambda x:x + 35 *(joint_num%4) ),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num])

cv2.circle(test_img,center =(joint_coord [1],joint_coord [0]), radius = 3,color = joint_color,thickness = -1)
else:
如果PYTHON_VERSION == 3:
joint_color = list(map(lambda x:x + 35 *(joint_num%4) ),joint_color_code [color_code_num]))
else:
joint_color = map(lambda x:x + 35 *(joint_num%4),joint_color_code [color_code_num])

cv2。 circle(test_img,center =(joint_coord [1],joint_coord [0]),radius = 3,color = joint_color,thickness = -1)
print('plot joint time%f'%(time.time( ) - t1))

t1 = time.time()
#绘制肢体颜色
for limb_num范围(len(肢)):

x1 = joint_coord_set [limbs [limb_num] [0],0]
y1 = joint_coord_set [limbs [limb_num] [0],1]
x2 = j oint_coord_set [limbs [limb_num] [1],0]
y2 = joint_coord_set [limbs [limb_num] [1],1]
length =((x1 - x2)** 2 +(y1 - y2 )** 2)** 0.5
如果长度< 150和长度> 5:
deg = math.degrees(math.atan2(x1 - x2,y1 - y2))
polygon = cv2.ellipse2Poly((int((y1 + y2)/ 2),int(( x1 + x2)/ 2)),
(int(length / 2),3),
int(deg),
0,360,1)
color_code_num = limb_num // 4
如果PYTHON_VERSION == 3:
limb_color = list(map(lambda x:x + 35 *(limb_num%4),joint_color_code [color_code_num]))
else:
limb_color = map(lambda x:x + 35 *(limb_num%4),joint_color_code [color_code_num])

cv2.fillConvexPoly(test_img,polygon,color = limb_color)
print( 'plot limb time%f'%(time.time() - t1))

if FLAGS.DEMO_TYPE =='MULTI':
upper_img = np.concatenate((demo_stage_heatmaps [0 ],demo_stage_heatmaps [1],demo_stage_heatmaps [2]),axis = 1)
lower_img = n p.concatenate((demo_stage_heatmaps [3],demo_stage_heatmaps [len(stage_heatmap_np) - 1],test_img),
axis = 1)
demo_img = np.concatenate((upper_img,lower_img),axis = 0 )
返回demo_img
else:
返回test_img


如果__name__ =='__ main__':
tf.app.run()





我的尝试:



尝试重新安装张量流但是同样的错误

解决方案

参见模块:tf.app  |  TensorFlow [ ^ ]。

# For single hand and no body part in the picture
# ======================================================

import tensorflow as tf
from models.nets import cpm_hand_slim
import numpy as np
from utils import cpm_utils
import cv2
import time
import math
import sys

"""Parameters
"""
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('DEMO_TYPE',
                           #default_value='test_imgs/roger.png',
                           default_value='test_imgs/longhand.jpg',
                           #default_value='SINGLE',
                           docstring='MULTI: show multiple stage,'
                                    'SINGLE: only last stage,'
                                     'HM: show last stage heatmap,'
                                     'paths to .jpg or .png image')
tf.app.flags.DEFINE_string('model_path',
                           default_value='models/weights/cpm_hand.pkl',
                           docstring='Your model')
tf.app.flags.DEFINE_integer('input_size',
                            default_value=368,
                            docstring='Input image size')
tf.app.flags.DEFINE_integer('hmap_size',
                            default_value=46,
                            docstring='Output heatmap size')
tf.app.flags.DEFINE_integer('cmap_radius',
                            default_value=21,
                            docstring='Center map gaussian variance')
tf.app.flags.DEFINE_integer('joints',
                            default_value=21,
                            docstring='Number of joints')
tf.app.flags.DEFINE_integer('stages',
                            default_value=6,
                            docstring='How many CPM stages')
tf.app.flags.DEFINE_integer('cam_num',
                            default_value=0,
                            docstring='Webcam device number')
tf.app.flags.DEFINE_bool('KALMAN_ON',
                         default_value=True,
                         docstring='enalbe kalman filter')
tf.app.flags.DEFINE_float('kalman_noise',
                            default_value=3e-2,
                            docstring='Kalman filter noise value')
tf.app.flags.DEFINE_string('color_channel',
                           default_value='RGB',
                           docstring='')

# Set color for each finger
joint_color_code = [[139, 53, 255],
                    [0, 56, 255],
                    [43, 140, 237],
                    [37, 168, 36],
                    [147, 147, 0],
                    [70, 17, 145]]


limbs = [[0, 1],
         [1, 2],
         [2, 3],
         [3, 4],
         [0, 5],
         [5, 6],
         [6, 7],
         [7, 8],
         [0, 9],
         [9, 10],
         [10, 11],
         [11, 12],
         [0, 13],
         [13, 14],
         [14, 15],
         [15, 16],
         [0, 17],
         [17, 18],
         [18, 19],
         [19, 20]
         ]

if sys.version_info.major == 3:
    PYTHON_VERSION = 3
else:
    PYTHON_VERSION = 2


def main(argv):
    tf_device = '/gpu:0'
    with tf.device(tf_device):
        """Build graph
        """
        if FLAGS.color_channel == 'RGB':
            input_data = tf.placeholder(dtype=tf.float32, shape=[None, FLAGS.input_size, FLAGS.input_size, 3],
                                        name='input_image')
        else:
            input_data = tf.placeholder(dtype=tf.float32, shape=[None, FLAGS.input_size, FLAGS.input_size, 1],
                                        name='input_image')

        center_map = tf.placeholder(dtype=tf.float32, shape=[None, FLAGS.input_size, FLAGS.input_size, 1],
                                    name='center_map')

        model = cpm_hand_slim.CPM_Model(FLAGS.stages, FLAGS.joints + 1)
        model.build_model(input_data, center_map, 1)

    saver = tf.train.Saver()

    """Create session and restore weights
    """
    sess = tf.Session()

    sess.run(tf.global_variables_initializer())
    if FLAGS.model_path.endswith('pkl'):
        model.load_weights_from_file(FLAGS.model_path, sess, False)
    else:
        saver.restore(sess, FLAGS.model_path)

    test_center_map = cpm_utils.gaussian_img(FLAGS.input_size, FLAGS.input_size, FLAGS.input_size / 2,
                                             FLAGS.input_size / 2,
                                             FLAGS.cmap_radius)
    test_center_map = np.reshape(test_center_map, [1, FLAGS.input_size, FLAGS.input_size, 1])

    # Check weights
    for variable in tf.trainable_variables():
        with tf.variable_scope('', reuse=True):
            var = tf.get_variable(variable.name.split(':0')[0])
            print(variable.name, np.mean(sess.run(var)))

    if not FLAGS.DEMO_TYPE.endswith(('png', 'jpg')):
        cam = cv2.VideoCapture(FLAGS.cam_num)

    # Create kalman filters
    if FLAGS.KALMAN_ON:
        kalman_filter_array = [cv2.KalmanFilter(4, 2) for _ in range(FLAGS.joints)]
        for _, joint_kalman_filter in enumerate(kalman_filter_array):
            joint_kalman_filter.transitionMatrix = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]],
                                                            np.float32)
            joint_kalman_filter.measurementMatrix = np.array([[1, 0, 0, 0], [0, 1, 0, 0]], np.float32)
            joint_kalman_filter.processNoiseCov = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]],
                                                           np.float32) * FLAGS.kalman_noise
    else:
        kalman_filter_array = None

    with tf.device(tf_device):

        while True:
            t1 = time.time()
            if FLAGS.DEMO_TYPE.endswith(('png', 'jpg')):
                test_img = cpm_utils.read_image(FLAGS.DEMO_TYPE, [], FLAGS.input_size, 'IMAGE')
            else:
                test_img = cpm_utils.read_image([], cam, FLAGS.input_size, 'WEBCAM')

            test_img_resize = cv2.resize(test_img, (FLAGS.input_size, FLAGS.input_size))
            print('img read time %f' % (time.time() - t1))

            if FLAGS.color_channel == 'GRAY':
                test_img_resize = np.dot(test_img_resize[..., :3], [0.299, 0.587, 0.114]).reshape(
                    (FLAGS.input_size, FLAGS.input_size, 1))
                cv2.imshow('color', test_img.astype(np.uint8))
                cv2.imshow('gray', test_img_resize.astype(np.uint8))
                cv2.waitKey(1)

            test_img_input = test_img_resize / 256.0 - 0.5
            test_img_input = np.expand_dims(test_img_input, axis=0)


            if FLAGS.DEMO_TYPE.endswith(('png', 'jpg')):
                # Inference
                t1 = time.time()
                predict_heatmap, stage_heatmap_np = sess.run([model.current_heatmap,
                                                              model.stage_heatmap,
                                                              ],
                                                             feed_dict={'input_image:0': test_img_input,
                                                                        'center_map:0': test_center_map})

                # Show visualized image
                demo_img = visualize_result(test_img, FLAGS, stage_heatmap_np, kalman_filter_array)
                cv2.imshow('demo_img', demo_img.astype(np.uint8))
                if cv2.waitKey(0) == ord('q'): break
                print('fps: %.2f' % (1 / (time.time() - t1)))
            elif FLAGS.DEMO_TYPE == 'MULTI':

                # Inference
                t1 = time.time()
                predict_heatmap, stage_heatmap_np = sess.run([model.current_heatmap,
                                                              model.stage_heatmap,
                                                              ],
                                                             feed_dict={'input_image:0': test_img_input,
                                                                        'center_map:0': test_center_map})

                # Show visualized image
                demo_img = visualize_result(test_img, FLAGS, stage_heatmap_np, kalman_filter_array)
                cv2.imshow('demo_img', demo_img.astype(np.uint8))
                if cv2.waitKey(1) == ord('q'): break
                print('fps: %.2f' % (1 / (time.time() - t1)))


            elif FLAGS.DEMO_TYPE == 'SINGLE':

                # Inference
                t1 = time.time()
                stage_heatmap_np = sess.run([model.stage_heatmap[5]],
                                            feed_dict={'input_image:0': test_img_input,
                                                       'center_map:0': test_center_map})

                # Show visualized image
                demo_img = visualize_result(test_img, FLAGS, stage_heatmap_np, kalman_filter_array)
                cv2.imshow('current heatmap', (demo_img).astype(np.uint8))
                if cv2.waitKey(1) == ord('q'): break
                print('fps: %.2f' % (1 / (time.time() - t1)))


            elif FLAGS.DEMO_TYPE == 'HM':

                # Inference
                t1 = time.time()
                stage_heatmap_np = sess.run([model.stage_heatmap[FLAGS.stages - 1]],
                                            feed_dict={'input_image:0': test_img_input,
                                                       'center_map:0': test_center_map})
                print('fps: %.2f' % (1 / (time.time() - t1)))

                demo_stage_heatmap = stage_heatmap_np[len(stage_heatmap_np) - 1][0, :, :, 0:FLAGS.joints].reshape(
                    (FLAGS.hmap_size, FLAGS.hmap_size, FLAGS.joints))
                demo_stage_heatmap = cv2.resize(demo_stage_heatmap, (FLAGS.input_size, FLAGS.input_size))

                vertical_imgs = []
                tmp_img = None
                joint_coord_set = np.zeros((FLAGS.joints, 2))

                for joint_num in range(FLAGS.joints):
                    # Concat until 4 img
                    if (joint_num % 4) == 0 and joint_num != 0:
                        vertical_imgs.append(tmp_img)
                        tmp_img = None

                    demo_stage_heatmap[:, :, joint_num] *= (255 / np.max(demo_stage_heatmap[:, :, joint_num]))

                    # Plot color joints
                    if np.min(demo_stage_heatmap[:, :, joint_num]) > -50:
                        joint_coord = np.unravel_index(np.argmax(demo_stage_heatmap[:, :, joint_num]),
                                                       (FLAGS.input_size, FLAGS.input_size))
                        joint_coord_set[joint_num, :] = joint_coord
                        color_code_num = (joint_num // 4)

                        if joint_num in [0, 4, 8, 12, 16]:
                            if PYTHON_VERSION == 3:
                                joint_color = list(
                                    map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                            else:
                                joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                            cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color,
                                       thickness=-1)
                        else:
                            if PYTHON_VERSION == 3:
                                joint_color = list(
                                    map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                            else:
                                joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                            cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color,
                                       thickness=-1)

                    # Put text
                    tmp = demo_stage_heatmap[:, :, joint_num].astype(np.uint8)
                    tmp = cv2.putText(tmp, 'Min:' + str(np.min(demo_stage_heatmap[:, :, joint_num])),
                                      org=(5, 20), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.3, color=150)
                    tmp = cv2.putText(tmp, 'Mean:' + str(np.mean(demo_stage_heatmap[:, :, joint_num])),
                                      org=(5, 30), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.3, color=150)
                    tmp_img = np.concatenate((tmp_img, tmp), axis=0) \
                        if tmp_img is not None else tmp

                # Plot limbs
                for limb_num in range(len(limbs)):
                    if np.min(demo_stage_heatmap[:, :, limbs[limb_num][0]]) > -2000 and np.min(
                            demo_stage_heatmap[:, :, limbs[limb_num][1]]) > -2000:
                        x1 = joint_coord_set[limbs[limb_num][0], 0]
                        y1 = joint_coord_set[limbs[limb_num][0], 1]
                        x2 = joint_coord_set[limbs[limb_num][1], 0]
                        y2 = joint_coord_set[limbs[limb_num][1], 1]
                        length = ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
                        if length < 10000 and length > 5:
                            deg = math.degrees(math.atan2(x1 - x2, y1 - y2))
                            polygon = cv2.ellipse2Poly((int((y1 + y2) / 2), int((x1 + x2) / 2)),
                                                       (int(length / 2), 3),
                                                       int(deg),
                                                       0, 360, 1)
                            color_code_num = limb_num // 4
                            if PYTHON_VERSION == 3:
                                limb_color = list(
                                    map(lambda x: x + 35 * (limb_num % 4), joint_color_code[color_code_num]))
                            else:
                                limb_color = map(lambda x: x + 35 * (limb_num % 4), joint_color_code[color_code_num])

                            cv2.fillConvexPoly(test_img, polygon, color=limb_color)

                if tmp_img is not None:
                    tmp_img = np.lib.pad(tmp_img, ((0, vertical_imgs[0].shape[0] - tmp_img.shape[0]), (0, 0)),
                                         'constant', constant_values=(0, 0))
                    vertical_imgs.append(tmp_img)

                # Concat horizontally
                output_img = None
                for col in range(len(vertical_imgs)):
                    output_img = np.concatenate((output_img, vertical_imgs[col]), axis=1) if output_img is not None else \
                        vertical_imgs[col]

                output_img = output_img.astype(np.uint8)
                output_img = cv2.applyColorMap(output_img, cv2.COLORMAP_JET)
                test_img = cv2.resize(test_img, (300, 300), cv2.INTER_LANCZOS4)
                cv2.imshow('hm', output_img)
                cv2.moveWindow('hm', 2000, 200)
                cv2.imshow('rgb', test_img)
                cv2.moveWindow('rgb', 2000, 750)
                if cv2.waitKey(1) == ord('q'): break


def visualize_result(test_img, FLAGS, stage_heatmap_np, kalman_filter_array):
    t1 = time.time()
    demo_stage_heatmaps = []
    if FLAGS.DEMO_TYPE == 'MULTI':
        for stage in range(len(stage_heatmap_np)):
            demo_stage_heatmap = stage_heatmap_np[stage][0, :, :, 0:FLAGS.joints].reshape(
                (FLAGS.hmap_size, FLAGS.hmap_size, FLAGS.joints))
            demo_stage_heatmap = cv2.resize(demo_stage_heatmap, (test_img.shape[1], test_img.shape[0]))
            demo_stage_heatmap = np.amax(demo_stage_heatmap, axis=2)
            demo_stage_heatmap = np.reshape(demo_stage_heatmap, (test_img.shape[1], test_img.shape[0], 1))
            demo_stage_heatmap = np.repeat(demo_stage_heatmap, 3, axis=2)
            demo_stage_heatmap *= 255
            demo_stage_heatmaps.append(demo_stage_heatmap)

        last_heatmap = stage_heatmap_np[len(stage_heatmap_np) - 1][0, :, :, 0:FLAGS.joints].reshape(
            (FLAGS.hmap_size, FLAGS.hmap_size, FLAGS.joints))
        last_heatmap = cv2.resize(last_heatmap, (test_img.shape[1], test_img.shape[0]))
    else:
        last_heatmap = stage_heatmap_np[len(stage_heatmap_np) - 1][0, :, :, 0:FLAGS.joints].reshape(
            (FLAGS.hmap_size, FLAGS.hmap_size, FLAGS.joints))
        last_heatmap = cv2.resize(last_heatmap, (test_img.shape[1], test_img.shape[0]))
    print('hm resize time %f' % (time.time() - t1))

    t1 = time.time()
    joint_coord_set = np.zeros((FLAGS.joints, 2))

    # Plot joint colors
    if kalman_filter_array is not None:
        for joint_num in range(FLAGS.joints):
            joint_coord = np.unravel_index(np.argmax(last_heatmap[:, :, joint_num]),
                                           (test_img.shape[0], test_img.shape[1]))
            joint_coord = np.array(joint_coord).reshape((2, 1)).astype(np.float32)
            kalman_filter_array[joint_num].correct(joint_coord)
            kalman_pred = kalman_filter_array[joint_num].predict()
            joint_coord_set[joint_num, :] = np.array([kalman_pred[0], kalman_pred[1]]).reshape((2))

            color_code_num = (joint_num // 4)
            if joint_num in [0, 4, 8, 12, 16]:
                if PYTHON_VERSION == 3:
                    joint_color = list(map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                else:
                    joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color, thickness=-1)
            else:
                if PYTHON_VERSION == 3:
                    joint_color = list(map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                else:
                    joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color, thickness=-1)
    else:
        for joint_num in range(FLAGS.joints):
            joint_coord = np.unravel_index(np.argmax(last_heatmap[:, :, joint_num]),
                                           (test_img.shape[0], test_img.shape[1]))
            joint_coord_set[joint_num, :] = [joint_coord[0], joint_coord[1]]

            color_code_num = (joint_num // 4)
            if joint_num in [0, 4, 8, 12, 16]:
                if PYTHON_VERSION == 3:
                    joint_color = list(map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                else:
                    joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color, thickness=-1)
            else:
                if PYTHON_VERSION == 3:
                    joint_color = list(map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num]))
                else:
                    joint_color = map(lambda x: x + 35 * (joint_num % 4), joint_color_code[color_code_num])

                cv2.circle(test_img, center=(joint_coord[1], joint_coord[0]), radius=3, color=joint_color, thickness=-1)
    print('plot joint time %f' % (time.time() - t1))

    t1 = time.time()
    # Plot limb colors
    for limb_num in range(len(limbs)):

        x1 = joint_coord_set[limbs[limb_num][0], 0]
        y1 = joint_coord_set[limbs[limb_num][0], 1]
        x2 = joint_coord_set[limbs[limb_num][1], 0]
        y2 = joint_coord_set[limbs[limb_num][1], 1]
        length = ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
        if length < 150 and length > 5:
            deg = math.degrees(math.atan2(x1 - x2, y1 - y2))
            polygon = cv2.ellipse2Poly((int((y1 + y2) / 2), int((x1 + x2) / 2)),
                                       (int(length / 2), 3),
                                       int(deg),
                                       0, 360, 1)
            color_code_num = limb_num // 4
            if PYTHON_VERSION == 3:
                limb_color = list(map(lambda x: x + 35 * (limb_num % 4), joint_color_code[color_code_num]))
            else:
                limb_color = map(lambda x: x + 35 * (limb_num % 4), joint_color_code[color_code_num])

            cv2.fillConvexPoly(test_img, polygon, color=limb_color)
    print('plot limb time %f' % (time.time() - t1))

    if FLAGS.DEMO_TYPE == 'MULTI':
        upper_img = np.concatenate((demo_stage_heatmaps[0], demo_stage_heatmaps[1], demo_stage_heatmaps[2]), axis=1)
        lower_img = np.concatenate((demo_stage_heatmaps[3], demo_stage_heatmaps[len(stage_heatmap_np) - 1], test_img),
                                   axis=1)
        demo_img = np.concatenate((upper_img, lower_img), axis=0)
        return demo_img
    else:
        return test_img


if __name__ == '__main__':
    tf.app.run()



What I have tried:

tried reinstalling the tensorflow but the same error

解决方案

See Module: tf.app  |  TensorFlow[^].


这篇关于模块'tensorflow.app'没有属性'flags'的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆