用kafka接收消息
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

421 lines
16KB

  1. import numpy as np
  2. import time,ast,copy
  3. from flask import request, Flask,jsonify
  4. import base64,cv2,os,sys,json
  5. sys.path.extend(['../yolov5'])
  6. #from Send_tranfer import b64encode_function,JsonSend,name_dic,nameID_dic,getLogFileFp
  7. from segutils.segmodel import SegModel,get_largest_contours
  8. from models.experimental import attempt_load
  9. from utils.datasets import LoadStreams, LoadImages
  10. from utils.torch_utils import select_device, load_classifier, time_synchronized
  11. from queRiver import get_labelnames,get_label_arrays,post_process_,save_problem_images,time_str
  12. import subprocess as sp
  13. import matplotlib.pyplot as plt
  14. import torch,random,string
  15. import multiprocessing
  16. from multiprocessing import Process,Queue
  17. import traceback
  18. from kafka import KafkaProducer, KafkaConsumer,TopicPartition
  19. from kafka.errors import kafka_errors
  20. #torch.multiprocessing.set_start_method('spawn')
  21. import utilsK
  22. from utilsK.GPUtils import *
  23. from utilsK.masterUtils import *
  24. from utilsK.sendUtils import create_status_msg,update_json
  25. #from utilsK.modelEval import onlineModelProcess
  26. import random,string
  27. from Send_tranfer_oss import msg_dict_on,msg_dict_off
  28. process_id=0
  29. def onlineModelProcess(parIn ):
  30. DEBUG=False
  31. streamName = parIn['streamName']
  32. childCallback=parIn['callback']
  33. #try:
  34. for wan in ['test']:
  35. jsonfile=parIn['modelJson']
  36. with open(jsonfile,'r') as fp:
  37. parAll = json.load(fp)
  38. Detweights=parAll['gpu_process']['det_weights']
  39. seg_nclass = parAll['gpu_process']['seg_nclass']
  40. Segweights = parAll['gpu_process']['seg_weights']
  41. videoSave = parAll['AI_video_save']
  42. imageTxtFile = parAll['imageTxtFile']
  43. inSource,outSource=parIn['inSource'],parIn['outSource']
  44. kafka_par=parIn['kafka_par']
  45. producer = KafkaProducer(bootstrap_servers=kafka_par['server'],value_serializer=lambda v: v.encode('utf-8'),metadata_max_age_ms=120000)
  46. device = select_device(parIn['device'])
  47. half = device.type != 'cpu' # half precision only supported on CUDA
  48. model = attempt_load(Detweights, map_location=device) # load FP32 model
  49. if half: model.half()
  50. #print('###line116:,',len(dataset),dataset)
  51. if (inSource.endswith('.MP4')) or (inSource.endswith('.mp4')):
  52. fps,outW,outH,totalcnt=get_fps_rtmp(inSource,video=True)[0:4]
  53. else:
  54. fps,outW,outH,totalcnt=get_fps_rtmp(inSource,video=False)[0:4]
  55. fps = int(fps+0.5)
  56. segmodel = SegModel(nclass=seg_nclass,weights=Segweights,device=device)
  57. if outSource != 'NO':
  58. command=['ffmpeg','-y','-f', 'rawvideo','-vcodec','rawvideo','-pix_fmt', 'bgr24',
  59. '-s', "{}x{}".format(outW,outH),# 图片分辨率
  60. '-r', str(fps),# 视频帧率
  61. '-i', '-','-c:v', 'libx264','-pix_fmt', 'yuv420p',
  62. '-f', 'flv',outSource
  63. ]
  64. video_flag = videoSave['onLine']
  65. logdir = parAll['logChildProcessOnline']
  66. #print('*'*20,'###line82',command)
  67. else:
  68. video_flag = videoSave['offLine'] ;logdir = parAll['logChildProcessOffline']
  69. fp_log=create_logFile(logdir=logdir)
  70. # 管道配置,其中用到管道
  71. if outSource !='NO' :
  72. ppipe = sp.Popen(command, stdin=sp.PIPE)
  73. ##后处理参数
  74. par=parAll['post_process']
  75. conf_thres,iou_thres,classes=par['conf_thres'],par['iou_thres'],par['classes']
  76. outImaDir = par['outImaDir']
  77. outVideoDir = par['outVideoDir']
  78. labelnames=par['labelnames']
  79. rainbows=par['rainbows']
  80. fpsample = par['fpsample']
  81. names=get_labelnames(labelnames)
  82. label_arraylist = get_label_arrays(names,rainbows,outfontsize=40)
  83. dataset = LoadStreams(inSource, img_size=640, stride=32)
  84. childCallback.send('####model load success####')
  85. if (outVideoDir!='NO') and video_flag:
  86. msg_id = streamName.split('-')[2]
  87. save_path = os.path.join(outVideoDir,msg_id+'.MP4')
  88. vid_writer = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (outW,outH))
  89. iframe = 0;post_results=[];time_beg=time.time()
  90. t00=time.time()
  91. time_kafka0=time.time()
  92. for path, img, im0s, vid_cap in dataset:
  93. t0= time_synchronized()
  94. if not path:
  95. EndUrl='%s/%s_frame-9999-9999_type-结束_9999999999999999_s-%s_AI.jpg'%(outImaDir,time_str(),streamName)
  96. EndUrl = EndUrl.replace(' ','-').replace(':','-')
  97. img_end=np.zeros((100,100),dtype=np.uint8);cv2.imwrite(EndUrl,img_end)
  98. if imageTxtFile:
  99. EndUrl_txt = EndUrl.replace('.jpg','.txt')
  100. fp_t=open(EndUrl_txt,'w');fp_t.write(EndUrl+'\n');fp_t.close()
  101. EndUrl='%s/%s_frame-9999-9999_type-结束_9999999999999999_s-%s_OR.jpg'%(outImaDir,time_str(),streamName)
  102. EndUrl = EndUrl.replace(' ','-').replace(':','-')
  103. ret = cv2.imwrite(EndUrl,img_end)
  104. if imageTxtFile:
  105. EndUrl_txt = EndUrl.replace('.jpg','.txt')
  106. fp_t=open(EndUrl_txt,'w');fp_t.write(EndUrl+'\n');fp_t.close()
  107. #print(EndUrl,ret)
  108. childCallback.send('####strem ends####')
  109. if (outVideoDir!='NO') and video_flag:
  110. vid_writer.release()
  111. break###断流或者到终点
  112. if outSource == 'NO':###如果不推流,则显示进度条
  113. view_bar(iframe,totalcnt,time_beg ,parIn['process_uid'] )
  114. ###直播和离线都是1分钟发一次消息。直播发
  115. time_kafka1 = time.time()
  116. if time_kafka1 - time_kafka0 >60:
  117. time_kafka0 = time_kafka1
  118. ###发送状态信息waiting
  119. msg = copy.deepcopy(msg_dict_off);taskId,msgId = streamName.split('-')[1:3]
  120. msg['msg_id']= msgId; msg
  121. if outSource == 'NO':
  122. msg['progressbar']= '%.4f'%(iframe*1.0/totalcnt)
  123. msg['type']=1
  124. else:
  125. msg['progressbarOn']= str(iframe)
  126. msg['type']=2
  127. msg = json.dumps(msg, ensure_ascii=False)
  128. try:
  129. record_metadata = producer.send(kafka_par['topic'], msg).get()
  130. outstr='%s processing send progressbar or heartBeat to kafka: taskId:%s msgId:%s send:%s'%('-'*20,taskId, msgId,msg);
  131. wrtiteLog(fp_log,outstr);print( outstr);
  132. except Exception as e:
  133. outstr='#######kafka ERROR when processing sending progressbar or heartBeat:, error: %s'%(str(e))
  134. wrtiteLog(fp_log,outstr);print( outstr);
  135. try:
  136. producer = KafkaProducer(bootstrap_servers=par['server'], value_serializer=lambda v: v.encode('utf-8')).get()
  137. future = producer.send(par['topic'][2], msg).get()
  138. except Exception as e:
  139. outstr='%s re-send progressbar or heartBeat kafka,processing video or stream: taskId:%s msgId:%s send:%s'%('-'*20,taskId, msgId,msg);
  140. wrtiteLog(fp_log,outstr);print( outstr);
  141. time0=time.time()
  142. iframe +=1
  143. time1=time.time()
  144. img = torch.from_numpy(img).to(device)
  145. img = img.half() if half else img.float() # uint8 to fp16/32
  146. img /= 255.0 # 0 - 255 to 0.0 - 1.0
  147. timeseg0 = time.time()
  148. seg_pred,segstr = segmodel.eval(im0s[0] )
  149. timeseg1 = time.time()
  150. t1= time_synchronized()
  151. pred = model(img,augment=False)[0]
  152. time4 = time.time()
  153. datas = [path, img, im0s, vid_cap,pred,seg_pred,iframe]
  154. p_result,timeOut = post_process_(datas,conf_thres, iou_thres,names,label_arraylist,rainbows,iframe)
  155. t2= time_synchronized()
  156. #print('###line138:',timeOut,outSource,outVideoDir)
  157. ##每隔 fpsample帧处理一次,如果有问题就保存图片
  158. if (iframe % fpsample == 0) and (len(post_results)>0) :
  159. parImage=save_problem_images(post_results,iframe,names,streamName=streamName,outImaDir='problems/images_tmp',imageTxtFile=imageTxtFile)
  160. post_results=[]
  161. if len(p_result[2] )>0: ##
  162. post_results.append(p_result)
  163. t3= time_synchronized()
  164. image_array = p_result[1]
  165. if outSource!='NO':
  166. ppipe.stdin.write(image_array.tobytes())
  167. if (outVideoDir!='NO') and video_flag:
  168. ret = vid_writer.write(image_array)
  169. t4= time_synchronized()
  170. timestr2 = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
  171. if iframe%100==0:
  172. outstr='%s,,read:%.1f ms,copy:%.1f, infer:%.1f ms, detinfer:%.1f ms,draw:%.1f ms, save:%.1f ms total:%.1f ms \n'%(timestr2,(t0 - t00)*1000,(timeseg0-t0)*1000, (t1 - timeseg0)*1000,(t2-t1)*1000, (t3 - t2)*1000,(t4-t3)*1000, (t4-t00)*1000)
  173. wrtiteLog(fp_log,outstr);
  174. #print(outstr)
  175. t00 = t4;
  176. ##模型加载之类的错误
  177. #except Exception as e:
  178. # print(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime()) ,'*'*20,'###line177 ERROR:',e)
  179. # childCallback.send(e) #将异常通过管道送出
  180. def lauch_process(gpuid,inSource,outSource,taskId,msgId,modelJson,kafka_par):
  181. if outSource=='NO':
  182. streamName='off-%s-%s'%(taskId,msgId)
  183. else:
  184. streamName='live-%s-%s'%(taskId,msgId)
  185. dataPar ={
  186. 'imgData':'',
  187. 'imgName':'testW',
  188. 'streamName':streamName,
  189. 'taskId':taskId,
  190. 'msgId':msgId,
  191. 'device':str(gpuid),
  192. 'modelJson':modelJson,
  193. 'kafka_par':kafka_par,
  194. }
  195. #dataPar['inSource'] = 'http://images.5gai.taauav.com/video/8bc32984dd893930dabb2856eb92b4d1.mp4';dataPar['outSource'] = None
  196. dataPar['inSource'] = inSource;dataPar['outSource'] = outSource
  197. process_uid=''.join(random.sample(string.ascii_letters + string.digits, 16));dataPar['process_uid']=process_uid
  198. parent_conn, child_conn = multiprocessing.Pipe();dataPar['callback']=child_conn
  199. gpuProcess=Process(target=onlineModelProcess,name='process:%s'%( process_uid ),args=(dataPar,))
  200. gpuProcess.start()
  201. #print(dir(gpuProcess))
  202. child_return = parent_conn.recv()
  203. timestr2=time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime())
  204. print(timestr2,'-'*20,'progress:%s ,msgId:%s , taskId:%s return:'%(process_uid,msgId,taskId),child_return)
  205. return gpuProcess
  206. msg_dict_offline = {
  207. "biz_id":"hehuzhang",
  208. "mod_id":"ai",
  209. "msg_id":'bb'+''.join(random.sample(string.ascii_letters ,30) ) ,
  210. "offering_id":"http://vod.play.t-aaron.com/customerTrans/c49a2c620795d124f2ae4b10197b8d0e/303b7a58-17f3ef4494e-0004-f90c-f2c-7ec68.mp4",
  211. "offering_type":"mp4",
  212. "results_base_dir": "XJRW202203171535"+str(random.randint(10,99)),
  213. 'outSource':'NO'
  214. }
  215. def detector_0(par):
  216. ####初始化信息列表
  217. consumer = KafkaConsumer(
  218. bootstrap_servers=par['server'],
  219. group_id=par['group_id'],
  220. auto_offset_reset='earliest',
  221. #max_poll_interval_ms = 1000*60*6,
  222. #session_timeout_ms=1000*60*5,
  223. request_timeout_ms=15000,
  224. #enable_auto_commit=True
  225. )
  226. consumer.subscribe( par['topic'][0:2])
  227. kafka_par ={ 'server':par['server'],'topic':par['topic'][2] }
  228. producer = KafkaProducer(
  229. bootstrap_servers=par['server'],#tencent yun
  230. value_serializer=lambda v: v.encode('utf-8'),
  231. metadata_max_age_ms=120000)
  232. taskStatus={}
  233. taskStatus['onLine'] = Queue(100)
  234. taskStatus['offLine']= Queue(100)
  235. taskStatus['pidInfos']= {}
  236. fp_log=create_logFile(logdir=par['logDir'])
  237. wrtiteLog(fp_log,'###########masster starts in line222######\n')
  238. timeSleep=1
  239. #taskStatus['pidInfos'][31897]={'gpuProcess':'onlineProcess','type':'onLine'}
  240. time0=time.time()
  241. time0_kafQuery=time.time()
  242. time0_taskQuery=time.time()
  243. time0_sleep=time.time()
  244. time_interval=10; outStrList={}
  245. isleep=0
  246. while True:###每隔timeSleep秒,轮询一次
  247. #for isleep in range(1):
  248. ##1-读取kafka,更新任务类别
  249. try:
  250. #msgs = getAllRecords(consumer,par['topic'])
  251. msgs=[]
  252. for ii,msg in enumerate(consumer):
  253. consumer.commit()
  254. msgs.append(msg)
  255. except Exception as e:
  256. outstr='%s kafka connecting error:%s '%('#'*20,e)
  257. outstr=wrtiteLog(fp_log,outstr);print( outstr);
  258. time.sleep(timeSleep)
  259. continue
  260. #if get_whether_gpuProcess():
  261. for it in range(30):
  262. timestr=time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime())
  263. print('%s i=%d sleep:%s '%(timestr,isleep,it*10))
  264. time.sleep(10)
  265. isleep+=1
  266. print('########Program End#####')
  267. def detector(par):
  268. ####初始化信息列表
  269. consumer = KafkaConsumer(
  270. bootstrap_servers=par['server'],
  271. group_id=par['group_id'],
  272. auto_offset_reset='earliest',
  273. #max_poll_interval_ms = 1000*60*6,
  274. #session_timeout_ms=1000*60*5,
  275. #request_timeout_ms=11000,
  276. #enable_auto_commit=True
  277. )
  278. consumer.subscribe( par['topic'][0:2])
  279. kafka_par ={ 'server':par['server'],'topic':par['topic'][2] }
  280. producer = KafkaProducer(
  281. bootstrap_servers=par['server'],#tencent yun
  282. value_serializer=lambda v: v.encode('utf-8'),
  283. metadata_max_age_ms=120000)
  284. taskStatus={}
  285. taskStatus['onLine'] = Queue(100)
  286. taskStatus['offLine']= Queue(100)
  287. taskStatus['pidInfos']= {}
  288. timeSleep=1
  289. #taskStatus['pidInfos'][31897]={'gpuProcess':'onlineProcess','type':'onLine'}
  290. time0=time.time()
  291. time0_kafQuery=time.time()
  292. time0_taskQuery=time.time()
  293. time0_sleep=time.time()
  294. time_interval=10; outStrList={}
  295. isleep=0
  296. for ii,msg in enumerate(consumer):
  297. try:
  298. taskInfos = eval(msg.value.decode('utf-8') )
  299. except:
  300. outstr='%s msg format error,value:%s,offset:%d partition:%s topic:%s'%('#'*20,msg.value,msg.offset,msg.topic,msg.topic)
  301. continue
  302. outstr='%s value:%s,offset:%d partition:%s topic:%s'%('#'*20,msg.value,msg.offset,msg.partition,msg.topic)
  303. print(outstr)
  304. def get_file():
  305. print("文件名 :",__file__,sys._getframe().f_lineno)
  306. print("函数名: ", sys._getframe().f_code.co_name)
  307. print("模块名: ", sys._getframe().f_back.f_code.co_name)
  308. if __name__ == '__main__':
  309. par={};
  310. ###topic0--在线,topic1--离线
  311. #par['server']='212.129.223.66:9092';par['topic']=('thsw','thsw2','testReturn');par['group_id']='test';
  312. #101.132.127.1:19092
  313. '''
  314. par['server']='101.132.127.1:19092 ';par['topic']=('alg-online-tasks','alg-offline-tasks','alg-task-results');par['group_id']='test';
  315. par['kafka']='mintors/kafka'
  316. par['modelJson']='conf/model.json'
  317. '''
  318. masterFile="conf/master_ten.json"
  319. assert os.path.exists(masterFile)
  320. with open(masterFile,'r') as fp:
  321. data=json.load(fp)
  322. get_file()
  323. par=data['par']
  324. print(par)
  325. detector(par)