@@ -1,6 +1,6 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<project version="4"> | |||
<component name="PublishConfigData" serverName="192.168.11.8" remoteFilesAllowedToDisappearOnAutoupload="false"> | |||
<component name="PublishConfigData" serverName="外网" remoteFilesAllowedToDisappearOnAutoupload="false"> | |||
<serverData> | |||
<paths name="10.21"> | |||
<serverdata> | |||
@@ -46,6 +46,13 @@ | |||
</mappings> | |||
</serverdata> | |||
</paths> | |||
<paths name="外网"> | |||
<serverdata> | |||
<mappings> | |||
<mapping deploy="/home/thsw/chenyukun/tuoheng_alg" local="$PROJECT_DIR$" web="/" /> | |||
</mappings> | |||
</serverdata> | |||
</paths> | |||
</serverData> | |||
</component> | |||
</project> |
@@ -6,7 +6,8 @@ | |||
<sshConfig authType="PASSWORD" host="192.168.10.21" id="adf5e1da-4910-4668-bfbb-432f4e2ae77c" port="22" nameFormat="DESCRIPTIVE" username="th" /> | |||
<sshConfig authType="PASSWORD" host="192.168.10.22" id="ac18a75e-ff42-4875-a5da-ad98d2d695ea" port="22" nameFormat="DESCRIPTIVE" username="th" /> | |||
<sshConfig authType="PASSWORD" connectionConfig="{"serverAliveInterval":300}" host="192.168.10.66" id="dcf03076-1bc5-4ce3-a4e4-38f7f00ea74a" port="32782" nameFormat="DESCRIPTIVE" username="root" /> | |||
<sshConfig authType="PASSWORD" host="192.168.11.7" id="5bb44c10-4e9c-4059-a0c0-9f2596b74bc0" port="22" nameFormat="DESCRIPTIVE" username="th" /> | |||
<sshConfig authType="PASSWORD" connectionConfig="{"proxyParams":{"proxyHost":"","proxyPort":-1,"proxyType":"IDE_WIDE_PROXY"}}" host="192.168.11.7" id="5bb44c10-4e9c-4059-a0c0-9f2596b74bc0" port="22" nameFormat="DESCRIPTIVE" username="th" useOpenSSHConfig="true" /> | |||
<sshConfig authType="PASSWORD" host="221.226.114.142" id="2af8cb49-06d5-499e-85f2-e22072c6c979" port="1011" nameFormat="DESCRIPTIVE" username="thsw" useOpenSSHConfig="true" /> | |||
</configs> | |||
</component> | |||
</project> |
@@ -44,6 +44,13 @@ | |||
</advancedOptions> | |||
</fileTransfer> | |||
</webServer> | |||
<webServer id="e0e06591-e01f-4d76-88e9-9c8ee17b919f" name="外网"> | |||
<fileTransfer accessType="SFTP" host="192.168.11.7" port="22" sshConfigId="5bb44c10-4e9c-4059-a0c0-9f2596b74bc0" sshConfig="th@192.168.11.7:22 password"> | |||
<advancedOptions> | |||
<advancedOptions dataProtectionLevel="Private" keepAliveTimeout="0" passiveMode="true" shareSSLContext="true" /> | |||
</advancedOptions> | |||
</fileTransfer> | |||
</webServer> | |||
</option> | |||
</component> | |||
</project> |
@@ -6,7 +6,8 @@ | |||
<component name="ChangeListManager"> | |||
<list default="true" id="4f7dccd9-8f92-4a6e-90cc-33890d102263" name="Changes" comment="Changes"> | |||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" /> | |||
<change beforePath="$PROJECT_DIR$/util/Cv2Utils.py" beforeDir="false" afterPath="$PROJECT_DIR$/util/Cv2Utils.py" afterDir="false" /> | |||
<change beforePath="$PROJECT_DIR$/concurrency/IntelligentRecognitionProcess.py" beforeDir="false" afterPath="$PROJECT_DIR$/concurrency/IntelligentRecognitionProcess.py" afterDir="false" /> | |||
<change beforePath="$PROJECT_DIR$/test/路径/Test.py" beforeDir="false" afterPath="$PROJECT_DIR$/test/路径/Test.py" afterDir="false" /> | |||
</list> | |||
<option name="SHOW_DIALOG" value="false" /> | |||
<option name="HIGHLIGHT_CONFLICTS" value="true" /> | |||
@@ -135,7 +136,7 @@ | |||
"WebServerToolWindowPanel.toolwindow.show.date": "false", | |||
"WebServerToolWindowPanel.toolwindow.show.permissions": "false", | |||
"WebServerToolWindowPanel.toolwindow.show.size": "false", | |||
"last_opened_file_path": "D:/tuoheng/codenew/tuoheng_dsp", | |||
"last_opened_file_path": "D:/tuoheng/codenew/tuoheng_tool", | |||
"node.js.detected.package.eslint": "true", | |||
"node.js.detected.package.tslint": "true", | |||
"node.js.selected.package.eslint": "(autodetect)", | |||
@@ -150,22 +151,22 @@ | |||
}</component> | |||
<component name="RecentsManager"> | |||
<key name="CopyFile.RECENT_KEYS"> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\enums" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\entity" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\读写" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\config" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\算法" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\语法" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\正则" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\类型标注" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\游戏" /> | |||
</key> | |||
<key name="MoveFile.RECENT_KEYS"> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\config" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\test\设计模式\单例" /> | |||
<recent name="D:\tuoheng\codenew\tuoheng_alg\font" /> | |||
<recent name="D:\work\alg_new\tuoheng_alg\test\image" /> | |||
<recent name="D:\work\alg\tuoheng_alg\test\水印" /> | |||
</key> | |||
</component> | |||
<component name="RunManager" selected="Python.冒泡"> | |||
<configuration name="冒泡" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<component name="RunManager" selected="Python.test1"> | |||
<configuration name="CpuUtils" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
@@ -173,12 +174,12 @@ | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/算法" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/util" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/算法/冒泡.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/util/CpuUtils.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -187,7 +188,7 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="插入" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<configuration name="csv_test" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
@@ -195,12 +196,12 @@ | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/算法" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/读写" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/算法/插入.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/读写/csv_test.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -209,20 +210,20 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="选择" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<configuration name="editImage" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
<envs> | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/算法" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="SDK_HOME" value="$PROJECT_DIR$/../../../software/anaconda/envs/test/python.exe" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/editimage" /> | |||
<option name="IS_MODULE_SDK" value="false" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/算法/选择.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/editimage/editImage.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -231,20 +232,20 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="editImage" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true"> | |||
<configuration name="mysqltest" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
<envs> | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="$PROJECT_DIR$/../../../software/anaconda/envs/test/python.exe" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/editimage" /> | |||
<option name="IS_MODULE_SDK" value="false" /> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/editimage/editImage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/mysqltest.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -253,7 +254,7 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="mysqltest" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true"> | |||
<configuration name="test (1)" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
@@ -261,12 +262,12 @@ | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/内存优化/slots" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/mysqltest.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/内存优化/slots/test.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -275,7 +276,7 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="test (1)" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<configuration name="test" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
@@ -283,12 +284,12 @@ | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/语法/time" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/集合" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/语法/time/test.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/集合/test.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -297,7 +298,7 @@ | |||
<option name="INPUT_FILE" value="" /> | |||
<method v="2" /> | |||
</configuration> | |||
<configuration name="test (2)" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<configuration name="test1" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true"> | |||
<module name="tuoheng_alg" /> | |||
<option name="INTERPRETER_OPTIONS" value="" /> | |||
<option name="PARENT_ENVS" value="true" /> | |||
@@ -305,12 +306,12 @@ | |||
<env name="PYTHONUNBUFFERED" value="1" /> | |||
</envs> | |||
<option name="SDK_HOME" value="" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/语法/datetime" /> | |||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/test/内存优化/slots" /> | |||
<option name="IS_MODULE_SDK" value="true" /> | |||
<option name="ADD_CONTENT_ROOTS" value="true" /> | |||
<option name="ADD_SOURCE_ROOTS" value="true" /> | |||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/语法/datetime/test.py" /> | |||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test/内存优化/slots/test1.py" /> | |||
<option name="PARAMETERS" value="" /> | |||
<option name="SHOW_COMMAND_LINE" value="false" /> | |||
<option name="EMULATE_TERMINAL" value="false" /> | |||
@@ -322,15 +323,19 @@ | |||
<list> | |||
<item itemvalue="Python.editImage" /> | |||
<item itemvalue="Python.mysqltest" /> | |||
<item itemvalue="Python.test1" /> | |||
<item itemvalue="Python.CpuUtils" /> | |||
<item itemvalue="Python.csv_test" /> | |||
<item itemvalue="Python.test (1)" /> | |||
<item itemvalue="Python.test (2)" /> | |||
<item itemvalue="Python.冒泡" /> | |||
<item itemvalue="Python.test" /> | |||
</list> | |||
<recent_temporary> | |||
<list> | |||
<item itemvalue="Python.冒泡" /> | |||
<item itemvalue="Python.test (2)" /> | |||
<item itemvalue="Python.test1" /> | |||
<item itemvalue="Python.test (1)" /> | |||
<item itemvalue="Python.csv_test" /> | |||
<item itemvalue="Python.CpuUtils" /> | |||
<item itemvalue="Python.test" /> | |||
</list> | |||
</recent_temporary> | |||
</component> | |||
@@ -507,7 +512,28 @@ | |||
<workItem from="1685929597469" duration="1586000" /> | |||
<workItem from="1686009758832" duration="4033000" /> | |||
<workItem from="1686099127317" duration="8648000" /> | |||
<workItem from="1686181421528" duration="587000" /> | |||
<workItem from="1686181421528" duration="9733000" /> | |||
<workItem from="1686530580527" duration="10215000" /> | |||
<workItem from="1686708793889" duration="28856000" /> | |||
<workItem from="1686787483987" duration="42321000" /> | |||
<workItem from="1686882826411" duration="32824000" /> | |||
<workItem from="1686963632234" duration="27367000" /> | |||
<workItem from="1687046210304" duration="54489000" /> | |||
<workItem from="1687141700932" duration="30282000" /> | |||
<workItem from="1687219517554" duration="39842000" /> | |||
<workItem from="1687306657563" duration="921000" /> | |||
<workItem from="1687307950930" duration="44000" /> | |||
<workItem from="1687308509659" duration="25425000" /> | |||
<workItem from="1687652018398" duration="8524000" /> | |||
<workItem from="1687736740408" duration="603000" /> | |||
<workItem from="1687737713032" duration="3837000" /> | |||
<workItem from="1687779451916" duration="5176000" /> | |||
<workItem from="1687933838564" duration="4146000" /> | |||
<workItem from="1687954592393" duration="1199000" /> | |||
<workItem from="1687997778160" duration="3792000" /> | |||
<workItem from="1688021144565" duration="1972000" /> | |||
<workItem from="1688083600084" duration="65000" /> | |||
<workItem from="1688083679443" duration="2398000" /> | |||
</task> | |||
<servers /> | |||
</component> | |||
@@ -533,21 +559,11 @@ | |||
<line>24</line> | |||
<option name="timeStamp" value="1" /> | |||
</line-breakpoint> | |||
<line-breakpoint enabled="true" suspend="THREAD" type="python-line"> | |||
<url>file://$PROJECT_DIR$/util/Cv2Utils.py</url> | |||
<line>2</line> | |||
<option name="timeStamp" value="2" /> | |||
</line-breakpoint> | |||
<line-breakpoint enabled="true" suspend="THREAD" type="python-line"> | |||
<url>file://$PROJECT_DIR$/test/aliyun/ossdemo.py</url> | |||
<line>4</line> | |||
<option name="timeStamp" value="4" /> | |||
</line-breakpoint> | |||
<line-breakpoint enabled="true" suspend="THREAD" type="python-line"> | |||
<url>file://$PROJECT_DIR$/util/Cv2Utils.py</url> | |||
<line>1</line> | |||
<option name="timeStamp" value="5" /> | |||
</line-breakpoint> | |||
<line-breakpoint enabled="true" suspend="THREAD" type="python-line"> | |||
<url>file://$PROJECT_DIR$/test/collections/deque.py</url> | |||
<line>134</line> | |||
@@ -570,12 +586,13 @@ | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$3.coverage" NAME="视频添加文字水印3 Coverage Results" MODIFIED="1661906152928" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$SnakeGame.coverage" NAME="SnakeGame 覆盖结果" MODIFIED="1684825356565" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/游戏" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$wraps.coverage" NAME="wraps 覆盖结果" MODIFIED="1684913804419" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/偏函数" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$CpuUtils.coverage" NAME="CpuUtils 覆盖结果" MODIFIED="1686972304076" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/util" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$ffmpeg12.coverage" NAME="ffmpeg12 覆盖结果" MODIFIED="1675391366890" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/ffmpeg11" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$Test__2_.coverage" NAME="Test (2) 覆盖结果" MODIFIED="1681796501563" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/路径" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test1.coverage" NAME="test1 覆盖结果" MODIFIED="1685341746877" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/序列化" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test1.coverage" NAME="test1 覆盖结果" MODIFIED="1687661266628" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/内存优化/slots" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$ossdemo.coverage" NAME="ossdemo 覆盖结果" MODIFIED="1681715255761" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/aliyun" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$Counter.coverage" NAME="Counter 覆盖结果" MODIFIED="1684894898737" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__1_.coverage" NAME="test (1) 覆盖结果" MODIFIED="1685348439455" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/语法/time" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__1_.coverage" NAME="test (1) 覆盖结果" MODIFIED="1687056062763" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/内存优化/slots" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$aa1.coverage" NAME="aa1 覆盖结果" MODIFIED="1667351136888" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/ffmpeg11" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$singledispatch.coverage" NAME="singledispatch 覆盖结果" MODIFIED="1684912905741" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/偏函数" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg___$test.coverage" NAME="test 覆盖结果" MODIFIED="1668577200259" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/while" /> | |||
@@ -585,11 +602,12 @@ | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$ImageUtils.coverage" NAME="ImageUtils Coverage Results" MODIFIED="1663499421253" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/util" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$demo2.coverage" NAME="demo2 覆盖结果" MODIFIED="1684808407865" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/元类" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$ChainMap.coverage" NAME="ChainMap 覆盖结果" MODIFIED="1684905474944" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$dsp_master.coverage" NAME="dsp_master 覆盖结果" MODIFIED="1680503755624" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$dsp_master.coverage" NAME="dsp_master 覆盖结果" MODIFIED="1686926216806" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$IntelligentRecognitionProcess.coverage" NAME="IntelligentRecognitionProcess 覆盖结果" MODIFIED="1682651444560" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/concurrency" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$demo3.coverage" NAME="demo3 覆盖结果" MODIFIED="1684809071819" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/元类" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test.coverage" NAME="test 覆盖结果" MODIFIED="1685342843175" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/语法/statistics_test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$Test.coverage" NAME="Test 覆盖结果" MODIFIED="1683802532361" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/序列化" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test.coverage" NAME="test 覆盖结果" MODIFIED="1686930120727" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/集合" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$Test.coverage" NAME="Test 覆盖结果" MODIFIED="1686531035217" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/ffmpeg11" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$csv_test.coverage" NAME="csv_test 覆盖结果" MODIFIED="1687000802518" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/读写" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$mysqltest.coverage" NAME="mysqltest Coverage Results" MODIFIED="1660868712851" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$asnyc__1_.coverage" NAME="asnyc (1) Coverage Results" MODIFIED="1663458917599" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$cv2test1.coverage" NAME="cv2test1 覆盖结果" MODIFIED="1665738045603" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="/home/DATA/chenyukun/algSch/test/" /> | |||
@@ -607,10 +625,10 @@ | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$deque.coverage" NAME="deque 覆盖结果" MODIFIED="1684896079231" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$producer_start__1_.coverage" NAME="producer_start (1) 覆盖结果" MODIFIED="1665832569996" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg___$ffmpeg11.coverage" NAME="ffmpeg11 覆盖结果" MODIFIED="1668410004435" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/ffmpeg11" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$__init__.coverage" NAME="__init__ 覆盖结果" MODIFIED="1684908678532" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$demo.coverage" NAME="demo 覆盖结果" MODIFIED="1685071079173" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/语法/list" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$__init__.coverage" NAME="__init__ 覆盖结果" MODIFIED="1686535860174" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$demo.coverage" NAME="demo 覆盖结果" MODIFIED="1686927940237" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/读写" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$producer_start.coverage" NAME="producer_start1 覆盖结果" MODIFIED="1670999187123" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/kafka" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__3_.coverage" NAME="test (3) 覆盖结果" MODIFIED="1684802056733" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/设计模式/单例" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__3_.coverage" NAME="test (3) 覆盖结果" MODIFIED="1686902851380" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/内存优化/slots" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$numpy_test.coverage" NAME="numpy_test 覆盖结果" MODIFIED="1684205019028" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/numpy" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$namedtuple.coverage" NAME="namedtuple 覆盖结果" MODIFIED="1684898422076" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$.coverage" NAME="冒泡 覆盖结果" MODIFIED="1685368101589" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/算法" /> | |||
@@ -634,6 +652,6 @@ | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$defaultdict.coverage" NAME="defaultdict 覆盖结果" MODIFIED="1684900122612" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/collections" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$ffmpeg13.coverage" NAME="ffmpeg13 覆盖结果" MODIFIED="1675394160900" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/ffmpeg11" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$KafkaUtils.coverage" NAME="KafkaUtils Coverage Results" MODIFIED="1663465345491" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/util" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__2_.coverage" NAME="test (2) 覆盖结果" MODIFIED="1685349340367" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/语法/datetime" /> | |||
<SUITE FILE_PATH="coverage/tuoheng_alg$test__2_.coverage" NAME="test (2) 覆盖结果" MODIFIED="1686824265048" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/test/str" /> | |||
</component> | |||
</project> |
@@ -1,18 +1,23 @@ | |||
# -*- coding: utf-8 -*- | |||
# 配置文件名称 | |||
APPLICATION_CONFIG = "dsp_application.yml" | |||
APPLICATION_CONFIG = "dsp_application.json" | |||
# 编码格式 | |||
UTF_8 = "utf-8" | |||
# 文件读模式 | |||
R = 'r' | |||
ON_OR = "_on_or_" | |||
ON_AI = "_on_ai_" | |||
MP4 = ".mp4" | |||
# 初始化进度 | |||
init_progess = "0.0000" | |||
# 进度100% | |||
success_progess = "1.0000" | |||
# 拉流每帧图片缩小宽度大小限制, 大于1400像素缩小一半, 小于1400像素不变 | |||
width = 1400 | |||
COLOR = [ | |||
COLOR = ( | |||
[0, 0, 255], | |||
[255, 0, 0], | |||
[211, 0, 148], | |||
@@ -32,4 +37,9 @@ COLOR = [ | |||
[8, 101, 139], | |||
[171, 130, 255], | |||
[139, 112, 74], | |||
[205, 205, 180]] | |||
[205, 205, 180]) | |||
ONLINE = "online" | |||
OFFLINE = "offline" | |||
PHOTO = "photo" | |||
RECORDING = "recording" |
@@ -14,21 +14,77 @@ KAFKA_GROUP_ID = "group_id" | |||
KAFKA_AUTO_OFFSET_RESET = "auto_offset_reset" | |||
KAFKA_ENABLE_AUTO_COMMIT = "enable_auto_commit" | |||
KAFKA_MAX_POLL_RECORDS = "max_poll_records" | |||
REQUEST_ID = "request_id" | |||
REQUEST_IDREQUEST_ID = "request_id" | |||
FEEDBACK = "feedback" | |||
RECORDING = "recording" | |||
FBQUEUE = "fbQueue" | |||
CONTEXT = "context" | |||
MODEL = 'model' | |||
MSG = "msg" | |||
GPU_IDS = "gpu_ids" | |||
ANALYSE_TYPE = "analyse_type" | |||
COMMAND= "command" | |||
COMMAND = "command" | |||
START = "start" | |||
STOP = "stop" | |||
SERVICE = "service" | |||
FRAME_SCORE = "frame_score" | |||
PULL_URL = "pull_url" | |||
PUSH_URL = "push_url" | |||
ORIGINAL_URL = "original_url" | |||
ORIGINAL_TYPE = "original_type" | |||
IMAGE_URLS = "image_urls" | |||
RESULTS_BASE_DIR = "results_base_dir" | |||
MODELS = "models" | |||
CODE = 'code' | |||
CATEGORIES = "categories" | |||
ID = 'id' | |||
CONFIG = "config" | |||
VIDEO = "video" | |||
FILE_PATH = "file_path" | |||
KAFKA = "kafka" | |||
TOPIC = "topic" | |||
DSP_ALG_ONLINE_TASKS_TOPIC = "dsp-alg-online-tasks-topic" | |||
DSP_ALG_OFFLINE_TASKS_TOPIC = "dsp-alg-offline-tasks-topic" | |||
DSP_ALG_IMAGE_TASKS_TOPI = "dsp-alg-image-tasks-topic" | |||
DSP_RECORDING_TASKS_TOPI = "dsp-recording-task-topic" | |||
DSP_ALG_RESULTS_TOPIC = "dsp-alg-results-topic" | |||
DSP_RECORDING_RESULTS_TOPIC = "dsp-recording-result-topic" | |||
DSP = "dsp" | |||
ACTIVE = "active" | |||
PRODUCER = "producer" | |||
CONSUMER = "consumer" | |||
BOOTSTRAP_SERVERS = "bootstrap_servers" | |||
ALIYUN = "aliyun" | |||
ACCESS_KEY = "access_key" | |||
ACCESS_SECRET = "access_secret" | |||
OSS = "oss" | |||
ENDPOINT = "endpoint" | |||
BUCKET = "bucket" | |||
CONNECT_TIMEOUT = "connect_timeout" | |||
VOD = "vod" | |||
ECSREGIONID = "ecsRegionId" | |||
CATEID = "CateId" | |||
GPU = "gpu" | |||
ORDER = "order" | |||
LIMIT = "limit" | |||
MAXLOAD = "maxLoad" | |||
MAXMEMORY = "maxMemory" | |||
INCLUDENAN = "includeNan" | |||
EXCLUDEID = "excludeID" | |||
EXCLUDEUUID = "excludeUUID" | |||
BAIDU = "baidu" | |||
VEHICLE = "vehicle" | |||
APP_ID = "APP_ID" | |||
API_KEY = "API_KEY" | |||
SECRET_KEY = "SECRET_KEY" | |||
PERSON = "person" | |||
ORC = "orc" | |||
LOG = 'log' | |||
IS_VIDEO = "is_video" | |||
IS_IMAGE = "is_image" | |||
# 校验schema规则定义 | |||
SCHEMA = { | |||
REQUEST_ID: { | |||
"request_id": { | |||
'type': 'string', | |||
'required': True, | |||
'empty': False, | |||
@@ -39,31 +95,37 @@ SCHEMA = { | |||
'required': True, | |||
'allowed': [START, STOP] | |||
}, | |||
'pull_url': { | |||
PULL_URL: { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'maxlength': 255 | |||
}, | |||
PUSH_URL: { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'maxlength': 255 | |||
}, | |||
'push_url': { | |||
ORIGINAL_URL: { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'maxlength': 255 | |||
}, | |||
'original_url': { | |||
ORIGINAL_TYPE: { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'maxlength': 255 | |||
}, | |||
'original_type': { | |||
"logo_url": { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'maxlength': 255 | |||
}, | |||
'image_urls': { | |||
IMAGE_URLS: { | |||
'type': 'list', | |||
'required': False, | |||
'schema': { | |||
@@ -72,13 +134,13 @@ SCHEMA = { | |||
'maxlength': 5000 | |||
} | |||
}, | |||
'results_base_dir': { | |||
RESULTS_BASE_DIR: { | |||
'type': 'string', | |||
'required': False, | |||
'nullable': True, | |||
'regex': r'^[a-zA-Z0-9]{0,36}$' | |||
}, | |||
'models': { | |||
MODELS: { | |||
'type': 'list', | |||
'required': False, | |||
'nullable': True, | |||
@@ -86,30 +148,44 @@ SCHEMA = { | |||
'type': 'dict', | |||
'required': True, | |||
'schema': { | |||
'code': { | |||
CODE: { | |||
'type': 'string', | |||
'required': True, | |||
'empty': False, | |||
'dependencies': 'categories', | |||
'dependencies': CATEGORIES, | |||
'regex': r'^[a-zA-Z0-9]{1,255}$' | |||
}, | |||
'categories': { | |||
IS_VIDEO: { | |||
'type': 'string', | |||
'required': True, | |||
'empty': False, | |||
'dependencies': CODE, | |||
'allowed': ["0", "1"] | |||
}, | |||
IS_IMAGE: { | |||
'type': 'string', | |||
'required': True, | |||
'empty': False, | |||
'dependencies': CODE, | |||
'allowed': ["0", "1"] | |||
}, | |||
CATEGORIES: { | |||
'type': 'list', | |||
'required': True, | |||
'dependencies': 'code', | |||
'dependencies': CODE, | |||
'schema': { | |||
'type': 'dict', | |||
'required': True, | |||
'schema': { | |||
'id': { | |||
ID: { | |||
'type': 'string', | |||
'required': True, | |||
'empty': False, | |||
'regex': r'^[a-zA-Z0-9]{0,255}$'}, | |||
'config': { | |||
CONFIG: { | |||
'type': 'dict', | |||
'required': False, | |||
'dependencies': 'id', | |||
'dependencies': ID, | |||
} | |||
} | |||
} | |||
@@ -121,7 +197,11 @@ SCHEMA = { | |||
def get_file_path(context): | |||
return context["video"]["file_path"] | |||
return context[VIDEO][FILE_PATH] | |||
def get_video_add_water(context): | |||
return context[VIDEO]["video_add_water"] | |||
def get_online_tasks_topic(context): | |||
@@ -129,166 +209,196 @@ def get_online_tasks_topic(context): | |||
def get_offline_tasks_topic(context): | |||
return context["kafka"]["topic"]["dsp-alg-offline-tasks-topic"] | |||
return context[KAFKA][TOPIC][DSP_ALG_OFFLINE_TASKS_TOPIC] | |||
def get_image_tasks_topic(context): | |||
return context["kafka"]["topic"]["dsp-alg-image-tasks-topic"] | |||
return context[KAFKA][TOPIC][DSP_ALG_IMAGE_TASKS_TOPI] | |||
def get_recording_tasks_topic(context): | |||
return context["kafka"]["topic"]["dsp-recording-task-topic"] | |||
return context[KAFKA][TOPIC][DSP_RECORDING_TASKS_TOPI] | |||
def get_kafka_producer_config(context): | |||
return context["kafka"][context["dsp"]["active"]]["producer"] | |||
return context[KAFKA][context[DSP][ACTIVE]][PRODUCER] | |||
def get_kafka_consumer_config(context): | |||
return context["kafka"][context["dsp"]["active"]]["consumer"] | |||
return context[KAFKA][context[DSP][ACTIVE]][CONSUMER] | |||
def get_kafka_bootstrap_servers(context): | |||
return context["kafka"][context["dsp"]["active"]]["bootstrap_servers"] | |||
return context[KAFKA][context[DSP][ACTIVE]][BOOTSTRAP_SERVERS] | |||
def get_kafka_results_topic(context): | |||
return context["kafka"]["topic"]["dsp-alg-results-topic"] | |||
return context[KAFKA][TOPIC][DSP_ALG_RESULTS_TOPIC] | |||
def get_kafka_recording_result_topic(context): | |||
return context["kafka"]["topic"]["dsp-recording-result-topic"] | |||
return context[KAFKA][TOPIC][DSP_RECORDING_RESULTS_TOPIC] | |||
def get_aliyun_access_key(context): | |||
return context["aliyun"]["access_key"] | |||
return context[ALIYUN][ACCESS_KEY] | |||
def get_aliyun_access_secret(context): | |||
return context["aliyun"]["access_secret"] | |||
return context[ALIYUN][ACCESS_SECRET] | |||
def get_aliyun_oss_endpoint(context): | |||
return context["aliyun"]["oss"]["endpoint"] | |||
return context[ALIYUN][OSS][ENDPOINT] | |||
def get_aliyun_oss_bucket(context): | |||
return context["aliyun"]["oss"]["bucket"] | |||
return context[ALIYUN][OSS][BUCKET] | |||
def get_aliyun_oss_connect_timeout(context): | |||
return context["aliyun"]["oss"]["connect_timeout"] | |||
return context[ALIYUN][OSS][CONNECT_TIMEOUT] | |||
def get_aliyun_vod_ecsRegionId(context): | |||
return context["aliyun"]["vod"]["ecsRegionId"] | |||
return context[ALIYUN][VOD][ECSREGIONID] | |||
def get_aliyun_vod_cateId(context): | |||
return context["aliyun"]["vod"][context["dsp"]["active"]]["CateId"] | |||
return context[ALIYUN][VOD][context[DSP][ACTIVE]][CATEID] | |||
def get_gpu_order(context): | |||
return context["gpu"]["order"] | |||
return context[GPU][ORDER] | |||
def get_gpu_limit(context): | |||
return context["gpu"]["limit"] | |||
return context[GPU][LIMIT] | |||
def get_gpu_maxLoad(context): | |||
return context["gpu"]["maxLoad"] | |||
return context[GPU][MAXLOAD] | |||
def get_gpu_maxMemory(context): | |||
return context["gpu"]["maxMemory"] | |||
return context[GPU][MAXMEMORY] | |||
def get_gpu_includeNan(context): | |||
return context["gpu"]["includeNan"] | |||
return context[GPU][INCLUDENAN] | |||
def get_gpu_excludeID(context): | |||
return context["gpu"]["excludeID"] | |||
return context[GPU][EXCLUDEID] | |||
def get_gpu_excludeUUID(context): | |||
return context["gpu"]["excludeUUID"] | |||
return context[GPU][EXCLUDEUUID] | |||
def get_baidu_vehicle_APP_ID(context): | |||
return context["baidu"]["vehicle"]["APP_ID"] | |||
return context[BAIDU][VEHICLE][APP_ID] | |||
def get_baidu_vehicle_API_KEY(context): | |||
return context["baidu"]["vehicle"]["API_KEY"] | |||
return context[BAIDU][VEHICLE][API_KEY] | |||
def get_baidu_vehicle_SECRET_KEY(context): | |||
return context["baidu"]["vehicle"]["SECRET_KEY"] | |||
return context[BAIDU][VEHICLE][SECRET_KEY] | |||
def get_baidu_person_APP_ID(context): | |||
return context["baidu"]["person"]["APP_ID"] | |||
return context[BAIDU][PERSON][APP_ID] | |||
def get_baidu_person_API_KEY(context): | |||
return context["baidu"]["person"]["API_KEY"] | |||
return context[BAIDU][PERSON][API_KEY] | |||
def get_baidu_person_SECRET_KEY(context): | |||
return context["baidu"]["person"]["SECRET_KEY"] | |||
return context[BAIDU][PERSON][SECRET_KEY] | |||
def get_baidu_ocr_APP_ID(context): | |||
return context["baidu"]["orc"]["APP_ID"] | |||
return context[BAIDU][ORC][APP_ID] | |||
def get_baidu_ocr_API_KEY(context): | |||
return context["baidu"]["orc"]["API_KEY"] | |||
return context[BAIDU][ORC][API_KEY] | |||
def get_baidu_ocr_SECRET_KEY(context): | |||
return context["baidu"]["orc"]["SECRET_KEY"] | |||
return context[BAIDU][ORC][SECRET_KEY] | |||
def get_log_base_path(context): | |||
return context["log"]["base_path"] | |||
return context[LOG]["base_path"] | |||
def get_log_enable_file(context): | |||
return context["log"]["enable_file_log"] | |||
return context[LOG]["enable_file_log"] | |||
def get_log_log_name(context): | |||
return context["log"]["log_name"] | |||
return context[LOG]["log_name"] | |||
def get_log_rotation(context): | |||
return context["log"]["rotation"] | |||
return context[LOG]["rotation"] | |||
def get_log_retention(context): | |||
return context["log"]["retention"] | |||
return context[LOG]["retention"] | |||
def get_log_log_fmt(context): | |||
return context["log"]["log_fmt"] | |||
return context[LOG]["log_fmt"] | |||
def get_log_level(context): | |||
return context["log"]["level"] | |||
return context[LOG]["level"] | |||
def get_log_enqueue(context): | |||
return context["log"]["enqueue"] | |||
return context[LOG]["enqueue"] | |||
def get_log_encoding(context): | |||
return context["log"]["encoding"] | |||
return context[LOG]["encoding"] | |||
def get_log_enable_stderr(context): | |||
return context["log"]["enable_stderr"] | |||
return context[LOG]["enable_stderr"] | |||
CV2_PULL_STREAM_TIMEOUT = "cv2_pull_stream_timeout" | |||
CV2_READ_STREAM_TIMEOUT = "cv2_read_stream_timeout" | |||
def get_pull_stream_timeout(context): | |||
return int(context[SERVICE][CV2_PULL_STREAM_TIMEOUT]) | |||
def get_read_stream_timeout(context): | |||
return int(context[SERVICE][CV2_READ_STREAM_TIMEOUT]) | |||
def get_service_timeout(context): | |||
return int(context[SERVICE]["timeout"]) | |||
FILTER = "filter" | |||
def get_similarity(context): | |||
return context[SERVICE][FILTER]["similarity"] | |||
def get_picture_similarity(context): | |||
return context[SERVICE][FILTER]["picture_similarity"] | |||
def get_frame_step(context): | |||
return int(context[SERVICE][FILTER]["frame_step"]) | |||
@@ -4,19 +4,27 @@ from loguru import logger | |||
class Common(Thread): | |||
def __init__(self, content, func, param1, param2): | |||
__slots__ = [ | |||
'__context', | |||
'__func', | |||
'__param1', | |||
'__param2', | |||
'__result', | |||
] | |||
def __init__(self, context, func, param1, param2): | |||
super(Common, self).__init__() | |||
self.content = content | |||
self.func = func | |||
self.param1 = param1 | |||
self.param2 = param2 | |||
self.result = None | |||
self.__context = context | |||
self.__func = func | |||
self.__param1 = param1 | |||
self.__param2 = param2 | |||
self.__result = None | |||
def get_result(self): | |||
self.join(60 * 60 * 12) | |||
return self.result | |||
return self.__result | |||
def run(self): | |||
logger.info("开始执行线程!") | |||
self.result = self.func(self.param1, self.param2) | |||
self.__result = self.__func(self.__param1, self.__param2) | |||
logger.info("线程停止完成!") |
@@ -1,10 +1,11 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from threading import Thread | |||
from traceback import format_exc | |||
from loguru import logger | |||
from common import YmlConstant | |||
from util import KafkaUtils | |||
from util.KafkaUtils import CustomerKafkaProducer | |||
''' | |||
问题反馈线程 | |||
@@ -12,37 +13,42 @@ from util import KafkaUtils | |||
class FeedbackThread(Thread): | |||
__slots__ = [ | |||
'__fbQueue', | |||
'__context' | |||
] | |||
def __init__(self, fbQueue, context): | |||
super().__init__() | |||
self.__fbQueue = fbQueue | |||
self.__context = context | |||
self.__dsp_alg_results_topic = YmlConstant.get_kafka_results_topic(context) | |||
self.__dsp_recording_result_topic = YmlConstant.get_kafka_recording_result_topic(context) | |||
''' | |||
阻塞获取反馈消息 | |||
''' | |||
def getFeedback(self): | |||
return self.__fbQueue.get() | |||
def run(self): | |||
logger.info("启动问题反馈线程") | |||
kafkaProducer = KafkaUtils.CustomerKafkaProducer(self.__context) | |||
kafkaProducer = CustomerKafkaProducer(self.__context) | |||
dsp_alg_results_topic = self.__context["kafka"]["topic"]["dsp-alg-results-topic"] | |||
dsp_recording_result_topic = self.__context["kafka"]["topic"]["dsp-recording-result-topic"] | |||
while True: | |||
logger.info("问题反馈发送消息循环") | |||
feedback = {} | |||
try: | |||
fb = self.getFeedback() | |||
if fb is not None and len(fb) > 0: | |||
feedback = fb.get(YmlConstant.FEEDBACK) | |||
recording = fb.get(YmlConstant.RECORDING) | |||
feedback = fb.get("feedback") | |||
recording = fb.get("recording") | |||
if feedback is not None and len(feedback) > 0: | |||
kafkaProducer.sender(self.__dsp_alg_results_topic, feedback[YmlConstant.REQUEST_ID], feedback, 1) | |||
kafkaProducer.sender(dsp_alg_results_topic, feedback["request_id"], feedback, 1) | |||
if recording is not None and len(recording) > 0: | |||
kafkaProducer.sender(self.__dsp_recording_result_topic, recording[YmlConstant.REQUEST_ID], recording, 1) | |||
kafkaProducer.sender(dsp_recording_result_topic, recording["request_id"], recording, 1) | |||
else: | |||
time.sleep(1) | |||
except Exception as e: | |||
logger.exception("问题反馈异常:{}, requestId:{}", e, feedback.get(YmlConstant.REQUEST_ID)) | |||
except Exception: | |||
logger.error("问题反馈异常:{}, requestId:{}", format_exc(), feedback.get("request_id")) | |||
logger.info("问题反馈线程执行完成") |
@@ -1,9 +1,13 @@ | |||
import copy | |||
import time | |||
from concurrent.futures import ThreadPoolExecutor, as_completed | |||
from threading import Thread | |||
from time import sleep | |||
from traceback import format_exc | |||
from loguru import logger | |||
import cv2 | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
from util.AliyunSdk import AliyunOssSdk | |||
from util import TimeUtils, ImageUtils | |||
from entity import FeedBack | |||
@@ -12,41 +16,16 @@ from util.PlotsUtils import draw_painting_joint | |||
class FileUpload(Thread): | |||
def __init__(self, fbQueue, content, msg, imageQueue, analyse_type): | |||
__slots__ = ('_fbQueue', '_context', '_imageQueue', '_analyse_type', '_msg', '_base_dir') | |||
def __init__(self, fbQueue, context, msg, imageQueue, analyse_type, base_dir): | |||
super().__init__() | |||
self.fbQueue = fbQueue | |||
self.content = content | |||
self.imageQueue = imageQueue | |||
self.analyse_type = analyse_type | |||
self.msg = msg | |||
self.similarity = self.content["service"]["filter"]["similarity"] | |||
self.picture_similarity = self.content["service"]["filter"]["picture_similarity"] | |||
self.frame_step = int(self.content["service"]["filter"]["frame_step"]) | |||
# 推送执行结果 | |||
def sendResult(self, result): | |||
self.fbQueue.put(result) | |||
''' | |||
比如原图检测目标等信息,target用O表示 | |||
''' | |||
def build_image_name(self, current_frame, last_frame, mode_type, image_type, target): | |||
image_format = "{base_dir}/{time_now}_frame-{current_frame}-{last_frame}_type_{random_num}-{mode_type}" \ | |||
"-{target}-{requestId}_{image_type}.jpg" | |||
random_num = TimeUtils.now_date_to_str(TimeUtils.YMDHMSF) | |||
time_now = TimeUtils.now_date_to_str("%Y-%m-%d-%H-%M-%S") | |||
image_name = image_format.format( | |||
base_dir=self.msg.get('results_base_dir'), | |||
time_now=time_now, | |||
current_frame=current_frame, | |||
last_frame=last_frame, | |||
random_num=random_num, | |||
mode_type=mode_type, | |||
target=target, | |||
requestId=self.msg.get('request_id'), | |||
image_type=image_type) | |||
return image_name | |||
self._fbQueue = fbQueue | |||
self._context = context | |||
self._imageQueue = imageQueue | |||
self._analyse_type = analyse_type | |||
self._msg = msg | |||
self._base_dir = base_dir | |||
''' | |||
@@ -55,89 +34,44 @@ class FileUpload(Thread): | |||
class ImageFileUpload(FileUpload): | |||
def handle_image(self, high_score_image, frame_all): | |||
flag = True | |||
if self.picture_similarity and len(high_score_image) > 0: | |||
hash1 = ImageUtils.dHash(high_score_image.get("or_frame")) | |||
hash2 = ImageUtils.dHash(frame_all.get("frame")) | |||
dist = ImageUtils.Hamming_distance(hash1, hash2) | |||
similarity = 1 - dist * 1.0 / 64 | |||
if similarity >= self.similarity: | |||
flag = False | |||
if len(high_score_image) > 0: | |||
diff_frame_num = frame_all.get("cct_frame") - high_score_image.get("current_frame") | |||
if diff_frame_num < self.frame_step: | |||
flag = False | |||
det_result = frame_all.get("det_xywh") | |||
model_info = [] | |||
if flag and det_result is not None and len(det_result) > 0: | |||
''' | |||
det_xywh:{ | |||
'code':{ | |||
1: [[detect_targets_code, box, score, label_array, color]] | |||
} | |||
} | |||
模型编号:modeCode | |||
检测目标:detectTargetCode | |||
''' | |||
# 更加模型编码解析数据 | |||
for modelCode in list(det_result.keys()): | |||
# 模型编号下面的检测目标对象 | |||
det_info = det_result.get(modelCode) | |||
if det_info is not None and len(det_info) > 0: | |||
for detectTargetCode in list(det_info.keys()): | |||
target_list = det_info.get(detectTargetCode) | |||
if target_list is not None and len(target_list) > 0: | |||
orFrame = copy.deepcopy(frame_all.get("frame")) | |||
for target in target_list: | |||
draw_painting_joint(target[1], orFrame, target[3], target[2], target[4], "leftTop") | |||
model_info.append({ | |||
"modelCode": str(modelCode), | |||
"detectTargetCode": str(detectTargetCode), | |||
"frame": orFrame | |||
}) | |||
if len(model_info) > 0: | |||
high_score_image["or_frame"] = frame_all.get("frame") | |||
high_score_image["current_frame"] = frame_all.get("cct_frame") | |||
image_result = { | |||
"or_frame": frame_all.get("frame"), | |||
"model_info": model_info, | |||
"current_frame": frame_all.get("cct_frame"), | |||
"last_frame": frame_all.get("cct_frame") + self.frame_step, | |||
"progress": "", | |||
"mode_service": self.analyse_type, | |||
} | |||
return image_result | |||
return None | |||
__slots__ = () | |||
def run(self): | |||
logger.info("启动图片上传线程, requestId:{}", self.msg.get("request_id")) | |||
requestId = self._msg.get("request_id") | |||
logger.info("启动图片上传线程, requestId:{}", requestId) | |||
# 初始化oss客户端 | |||
aliyunOssSdk = AliyunOssSdk(self.content, self.msg.get("request_id")) | |||
aliyunOssSdk = AliyunOssSdk(self._base_dir, requestId) | |||
aliyunOssSdk.get_oss_bucket() | |||
high_score_image = {} | |||
similarity = self._context["service"]["filter"]["similarity"] | |||
picture_similarity = bool(self._context["service"]["filter"]["picture_similarity"]) | |||
frame_step = int(self._context["service"]["filter"]["frame_step"]) | |||
image_queue = self._imageQueue | |||
analyse_type = self._analyse_type | |||
results_base_dir = self._msg.get("results_base_dir") | |||
fb_queue = self._fbQueue | |||
with ThreadPoolExecutor(max_workers=5) as t: | |||
try: | |||
while True: | |||
try: | |||
# 获取队列中的消息 | |||
image_msg = self.imageQueue.get() | |||
image_msg = image_queue.get(timeout=43200) | |||
if image_msg is not None and len(image_msg) > 0: | |||
image_dict = image_msg.get("image") | |||
command = image_msg.get("command") | |||
if command == 'stop': | |||
if command == "stop": | |||
break | |||
if image_dict is not None and len(image_dict) > 0: | |||
image_result = self.handle_image(high_score_image, image_dict) | |||
if image_result is not None: | |||
image_result = handle_image(high_score_image, image_dict, picture_similarity, | |||
similarity, frame_step, analyse_type) | |||
if image_result: | |||
# 图片帧数编码 | |||
task = [] | |||
or_result, or_image = cv2.imencode(".jpg", image_result.get("or_frame")) | |||
or_image_name = self.build_image_name(str(image_result.get("current_frame")), | |||
str(image_result.get("last_frame")), | |||
image_result.get("mode_service"), | |||
"OR", "O") | |||
or_image_name = build_image_name(str(image_result.get("current_frame")), | |||
str(image_result.get("last_frame")), | |||
image_result.get("mode_service"), | |||
"OR", "O", results_base_dir, requestId) | |||
or_future = t.submit(aliyunOssSdk.sync_upload_file, or_image_name, | |||
or_image.tobytes()) | |||
task.append(or_future) | |||
@@ -145,17 +79,18 @@ class ImageFileUpload(FileUpload): | |||
msg_list = [] | |||
for model_info in model_info_list: | |||
ai_result, ai_image = cv2.imencode(".jpg", model_info.get("frame")) | |||
ai_image_name = self.build_image_name(str(image_result.get("current_frame")), | |||
str(image_result.get("last_frame")), | |||
image_result.get("mode_service"), | |||
"AI", model_info.get("detectTargetCode")) | |||
ai_image_name = build_image_name(str(image_result.get("current_frame")), | |||
str(image_result.get("last_frame")), | |||
image_result.get("mode_service"), | |||
"AI", model_info.get("detectTargetCode"), | |||
results_base_dir, requestId) | |||
ai_future = t.submit(aliyunOssSdk.sync_upload_file, ai_image_name, | |||
ai_image.tobytes()) | |||
task.append(ai_future) | |||
msg_list.append( | |||
{"feedback": FeedBack.message_feedback(self.msg.get('request_id'), | |||
{"feedback": FeedBack.message_feedback(requestId, | |||
AnalysisStatus.RUNNING.value, | |||
self.analyse_type, "", "", | |||
analyse_type, "", "", | |||
image_result.get("progress"), | |||
or_image_name, | |||
ai_image_name, | |||
@@ -165,12 +100,95 @@ class ImageFileUpload(FileUpload): | |||
for thread_result in as_completed(task): | |||
thread_result.result() | |||
for msg in msg_list: | |||
self.sendResult(msg) | |||
sendResult(fb_queue, msg, requestId) | |||
else: | |||
time.sleep(1) | |||
except Exception as e: | |||
logger.exception("图片上传异常:{}, requestId:{}", str(e), self.msg.get("request_id")) | |||
sleep(1) | |||
except Exception: | |||
logger.error("图片上传异常:{}, requestId:{}", format_exc(), requestId) | |||
finally: | |||
high_score_image.clear() | |||
logger.info("停止图片上传线程, requestId:{}", requestId) | |||
logger.info("停止图片上传线程, requestId:{}", self.msg.get("request_id")) | |||
def sendResult(fbQueue, result, requestId): | |||
try: | |||
fbQueue.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加反馈到队列超时异常:{}, requestId:{}", format_exc(), requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
def build_image_name(current_frame, last_frame, mode_type, image_type, target, results_base_dir, requestId): | |||
image_format = "{base_dir}/{time_now}_frame-{current_frame}-{last_frame}_type_{random_num}-{mode_type}" \ | |||
"-{target}-{requestId}_{image_type}.jpg" | |||
random_num = TimeUtils.now_date_to_str(TimeUtils.YMDHMSF) | |||
time_now = TimeUtils.now_date_to_str("%Y-%m-%d-%H-%M-%S") | |||
image_name = image_format.format( | |||
base_dir=results_base_dir, | |||
time_now=time_now, | |||
current_frame=current_frame, | |||
last_frame=last_frame, | |||
random_num=random_num, | |||
mode_type=mode_type, | |||
target=target, | |||
requestId=requestId, | |||
image_type=image_type) | |||
return image_name | |||
def handle_image(high_score_image, frame_all, picture_similarity, similarity, frame_step, analyse_type): | |||
flag = True | |||
if picture_similarity and len(high_score_image) > 0: | |||
hash1 = ImageUtils.dHash(high_score_image.get("or_frame")) | |||
hash2 = ImageUtils.dHash(frame_all[0][1]) | |||
dist = ImageUtils.Hamming_distance(hash1, hash2) | |||
similarity_1 = 1 - dist * 1.0 / 64 | |||
if similarity_1 >= similarity: | |||
flag = False | |||
if len(high_score_image) > 0: | |||
diff_frame_num = frame_all[0][2] - high_score_image.get("current_frame") | |||
if diff_frame_num < frame_step: | |||
flag = False | |||
det_result = frame_all[1] | |||
model_info = [] | |||
if flag and det_result is not None and len(det_result) > 0: | |||
''' | |||
det_xywh:{ | |||
'code':{ | |||
1: [[detect_targets_code, box, score, label_array, color]] | |||
} | |||
} | |||
模型编号:modeCode | |||
检测目标:detectTargetCode | |||
''' | |||
# 更加模型编码解析数据 | |||
for modelCode in list(det_result.keys()): | |||
# 模型编号下面的检测目标对象 | |||
det_info = det_result.get(modelCode) | |||
if det_info is not None and len(det_info) > 0: | |||
for detectTargetCode in list(det_info.keys()): | |||
target_list = det_info.get(detectTargetCode) | |||
if target_list is not None and len(target_list) > 0: | |||
# orFrame = loads(dumps(frame_all.get("frame"))) | |||
orFrame = frame_all[0][1].copy() | |||
for target in target_list: | |||
draw_painting_joint(target[1], orFrame, target[3], target[2], target[4], "leftTop") | |||
model_info.append({ | |||
"modelCode": str(modelCode), | |||
"detectTargetCode": str(detectTargetCode), | |||
"frame": orFrame | |||
}) | |||
if len(model_info) > 0: | |||
high_score_image["or_frame"] = frame_all[0][1] | |||
high_score_image["current_frame"] = frame_all[0][2] | |||
image_result = { | |||
"or_frame": frame_all[0][1], | |||
"model_info": model_info, | |||
"current_frame": frame_all[0][2], | |||
"last_frame": frame_all[0][2] + frame_step, | |||
"progress": "", | |||
"mode_service": analyse_type, | |||
} | |||
return image_result | |||
return None |
@@ -1,62 +1,84 @@ | |||
# -*- coding: utf-8 -*- | |||
from queue import Queue | |||
from threading import Thread | |||
import time | |||
from traceback import format_exc | |||
from loguru import logger | |||
from common.Constant import init_progess | |||
from common.YmlConstant import FEEDBACK | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
from util import TimeUtils | |||
from enums.AnalysisStatusEnum import AnalysisStatus | |||
from entity.FeedBack import message_feedback | |||
class Heartbeat(Thread): | |||
__slots__ = ('__fbQueue', '__hbQueue', '__request_id', '__analyse_type', '__progress') | |||
def __init__(self, fbQueue, hbQueue, request_id, analyse_type): | |||
super().__init__() | |||
self.fbQueue = fbQueue | |||
self.hbQueue = hbQueue | |||
self.request_id = request_id | |||
self.analyse_type = analyse_type | |||
self.progress = "0.0000" | |||
self.__fbQueue = fbQueue | |||
self.__hbQueue = hbQueue | |||
self.__request_id = request_id | |||
self.__analyse_type = analyse_type | |||
self.__progress = init_progess | |||
def getHbQueue(self): | |||
eBody = None | |||
try: | |||
eBody = self.hbQueue.get(block=False) | |||
eBody = self.__hbQueue.get(block=False) | |||
except Exception as e: | |||
pass | |||
return eBody | |||
# 推送执行结果 | |||
def sendResult(self, result): | |||
self.fbQueue.put(result) | |||
try: | |||
self.__fbQueue.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加反馈到队列超时异常:{}, requestId:{}", format_exc(), self.__request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
def sendHbQueue(self, result): | |||
self.hbQueue.put(result) | |||
try: | |||
self.__hbQueue.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加心跳到队列超时异常:{}, requestId:{}", format_exc(), self.__request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
def sendhbMessage(self, analysisStatus): | |||
self.sendResult({"feedback": message_feedback(self.request_id, | |||
analysisStatus, | |||
self.analyse_type, | |||
progress=self.progress, | |||
analyse_time=TimeUtils.now_date_to_str())}) | |||
self.sendResult({FEEDBACK: message_feedback(self.__request_id, | |||
analysisStatus, | |||
self.__analyse_type, | |||
progress=self.__progress, | |||
analyse_time=TimeUtils.now_date_to_str())}) | |||
def run(self): | |||
logger.info("开始启动心跳线程!requestId:{}", self.request_id) | |||
logger.info("开始启动心跳线程!requestId:{}", self.__request_id) | |||
hb_init_num = 0 | |||
start_time = time.time() | |||
while True: | |||
try: | |||
if time.time() - start_time > 43200: | |||
logger.info("心跳线程运行超时!!!!requestId:{}", self.__request_id) | |||
break | |||
time.sleep(3) | |||
hb_msg = self.getHbQueue() | |||
if hb_msg is not None and len(hb_msg) > 0: | |||
command = hb_msg.get("command") | |||
hb_value = hb_msg.get("hb_value") | |||
if 'stop' == command: | |||
logger.info("开始终止心跳线程, requestId:{}", self.request_id) | |||
logger.info("开始终止心跳线程, requestId:{}", self.__request_id) | |||
break | |||
if hb_value is not None: | |||
self.progress = hb_value | |||
self.__progress = hb_value | |||
if hb_init_num % 30 == 0: | |||
self.sendhbMessage(AnalysisStatus.RUNNING.value) | |||
hb_init_num += 3 | |||
except Exception as e: | |||
logger.exception("心跳线程异常:{}, requestId:{}", e, self.request_id) | |||
logger.info("心跳线程停止完成!requestId:{}", self.request_id) | |||
except Exception: | |||
logger.error("心跳线程异常:{}, requestId:{}", format_exc(), self.__request_id) | |||
logger.info("心跳线程停止完成!requestId:{}", self.__request_id) |
@@ -1,301 +1,400 @@ | |||
# -*- coding: utf-8 -*- | |||
import os | |||
import time | |||
from multiprocessing import Process, Queue | |||
from os import getpid | |||
from traceback import format_exc | |||
import psutil | |||
from loguru import logger | |||
from util.LogUtils import init_log | |||
from concurrency.FileUploadThread import ImageFileUpload | |||
from concurrency.HeartbeatThread import Heartbeat | |||
from entity.FeedBack import message_feedback | |||
from enums.AnalysisStatusEnum import AnalysisStatus | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
from util import LogUtils | |||
from util.Cv2Utils import Cv2Util | |||
from util.Cv2Utils import check_video_stream, build_video_info, pull_read_video_stream, clear_pull_p | |||
from util.TimeUtils import now_date_to_str | |||
class PullVideoStreamProcess(Process): | |||
def __init__(self, msg, content, pullQueue, fbQueue, hbQueue, imageQueue, analyse_type): | |||
__slots__ = ('_command', '_msg', '_context', '_pullQueue', '_fbQueue', '_hbQueue', '_imageQueue', '_analyse_type', | |||
"_base_dir") | |||
def __init__(self, msg, context, pullQueue, fbQueue, hbQueue, imageQueue, analyse_type, base_dir): | |||
super().__init__() | |||
self.command = Queue() | |||
self.msg = msg | |||
self.content = content | |||
self.pullQueue = pullQueue | |||
self.fbQueue = fbQueue | |||
self.hbQueue = hbQueue | |||
self.imageQueue = imageQueue | |||
self.analyse_type = analyse_type | |||
self.pull_stream_timeout = int(self.content["service"]["cv2_pull_stream_timeout"]) | |||
self.read_stream_timeout = int(self.content["service"]["cv2_read_stream_timeout"]) | |||
self.service_timeout = int(self.content["service"]["timeout"]) | |||
self._command = Queue() | |||
self._msg = msg | |||
self._context = context | |||
self._pullQueue = pullQueue | |||
self._fbQueue = fbQueue | |||
self._hbQueue = hbQueue | |||
self._imageQueue = imageQueue | |||
self._analyse_type = analyse_type | |||
self._base_dir = base_dir | |||
def getCommand(self): | |||
eBody = None | |||
def sendCommand(self, result, enable_ex=True): | |||
try: | |||
eBody = self.command.get(block=False) | |||
except Exception as e: | |||
pass | |||
return eBody | |||
def sendCommand(self, result): | |||
self.command.put(result) | |||
def sendPullQueue(self, result): | |||
self.pullQueue.put(result) | |||
def sendImageResult(self, result): | |||
self.imageQueue.put(result) | |||
def start_File_upload(self): | |||
imageFileUpload = ImageFileUpload(self.fbQueue, self.content, self.msg, self.imageQueue, self.analyse_type) | |||
imageFileUpload.setDaemon(True) | |||
imageFileUpload.start() | |||
self._command.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加队列超时异常:{}, requestId:{}", format_exc(), self._msg.get("request_id")) | |||
if enable_ex: | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
def start_heartbeat(self, hb): | |||
if hb is None: | |||
hb = Heartbeat(self._fbQueue, self._hbQueue, self._msg.get("request_id"), self._analyse_type) | |||
hb.setDaemon(True) | |||
hb.start() | |||
start_time = time.time() | |||
retry_count = 0 | |||
while True: | |||
if imageFileUpload.is_alive(): | |||
return imageFileUpload | |||
if not imageFileUpload.is_alive(): | |||
logger.warning("图片上传线程异常等待中, requestId:{}", self.msg.get("request_id")) | |||
if hb.is_alive(): | |||
return hb | |||
retry_count += 1 | |||
if retry_count > 10: | |||
logger.error("心跳线程异常重试失败!requestId:{}", self._msg.get("request_id")) | |||
raise Exception("心跳线程启动失败") | |||
if not hb.is_alive(): | |||
logger.warning("心跳线程异常等待中, requestId:{}", self._msg.get("request_id")) | |||
if time.time() - start_time <= 3: | |||
time.sleep(0.5) | |||
continue | |||
elif int(time.time() - start_time) <= 5: | |||
logger.warning("图片上传线程异常重启中, requestId:{}", self.msg.get("request_id")) | |||
imageFileUpload.start() | |||
time.sleep(1) | |||
if time.time() - start_time > 3: | |||
logger.warning("心跳线程异常重启中, requestId:{}", self._msg.get("request_id")) | |||
hb.start() | |||
continue | |||
elif int(time.time() - start_time) > 5: | |||
raise Exception("图片上传线程启动异常") | |||
def start_heartbeat(self): | |||
hb = Heartbeat(self.fbQueue, self.hbQueue, self.msg.get("request_id"), self.analyse_type) | |||
hb.setDaemon(True) | |||
hb.start() | |||
def start_File_upload(self, imageFileUpload): | |||
if imageFileUpload is None: | |||
imageFileUpload = ImageFileUpload(self._fbQueue, self._context, self._msg, self._imageQueue, | |||
self._analyse_type, self._base_dir) | |||
imageFileUpload.setDaemon(True) | |||
imageFileUpload.start() | |||
start_time = time.time() | |||
retry_count = 0 | |||
while True: | |||
if hb.is_alive(): | |||
return hb | |||
if not hb.is_alive(): | |||
logger.warning("心跳线程异常等待中, requestId:{}", self.msg.get("request_id")) | |||
if imageFileUpload.is_alive(): | |||
return imageFileUpload | |||
retry_count += 1 | |||
if retry_count > 10: | |||
logger.error("图片上传线程异常重试失败!requestId:{}", self._msg.get("request_id")) | |||
raise Exception("图片线程启动失败") | |||
if not imageFileUpload.is_alive(): | |||
logger.warning("图片上传线程异常等待中, requestId:{}", self._msg.get("request_id")) | |||
if time.time() - start_time <= 3: | |||
time.sleep(0.5) | |||
continue | |||
elif int(time.time() - start_time) <= 5: | |||
logger.warning("心跳线程异常重启中, requestId:{}", self.msg.get("request_id")) | |||
hb.start() | |||
time.sleep(1) | |||
if time.time() - start_time > 3: | |||
logger.warning("图片上传线程异常重启中, requestId:{}", self._msg.get("request_id")) | |||
imageFileUpload.start() | |||
start_time = time.time() | |||
continue | |||
elif int(time.time() - start_time) > 5: | |||
raise Exception("心跳线程启动异常") | |||
def check(self, start_time, imageFileUpload, hb): | |||
create_task_time = time.time() - start_time | |||
if create_task_time > self.service_timeout: | |||
logger.error("分析超时, 超时时间:{}, requestId: {}", create_task_time, self.msg.get("request_id")) | |||
raise ServiceException(ExceptionType.ANALYSE_TIMEOUT_EXCEPTION.value[0], | |||
ExceptionType.ANALYSE_TIMEOUT_EXCEPTION.value[1]) | |||
# 检测图片上传线程是否正常运行 | |||
if imageFileUpload is not None and not imageFileUpload.is_alive(): | |||
logger.error("未检测到图片上传线程活动,图片上传线程可能出现异常, reuqestId:{}", self.msg.get("request_id")) | |||
raise Exception("未检测到图片上传线程活动,图片上传线程可能出现异常!") | |||
# 检测心跳线程是否正常运行 | |||
if hb is not None and not hb.is_alive(): | |||
logger.error("未检测到心跳线程活动,心跳线程可能出现异常, reuqestId:{}", self.msg.get("request_id")) | |||
raise Exception("未检测到心跳线程活动,心跳线程可能出现异常!") | |||
def putQueue(queue, result, requestId, enable_ex=True): | |||
try: | |||
queue.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加队列超时异常:{}, requestId:{}", format_exc(), requestId) | |||
if enable_ex: | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
def getNoBlockQueue(queue): | |||
eBody = None | |||
try: | |||
eBody = queue.get(block=False) | |||
except Exception: | |||
pass | |||
return eBody | |||
def check(start_time, service_timeout, requestId, imageFileUpload, hb): | |||
create_task_time = time.time() - start_time | |||
if create_task_time > service_timeout: | |||
logger.error("分析超时, 超时时间:{}, requestId: {}", create_task_time, requestId) | |||
raise ServiceException(ExceptionType.ANALYSE_TIMEOUT_EXCEPTION.value[0], | |||
ExceptionType.ANALYSE_TIMEOUT_EXCEPTION.value[1]) | |||
# 检测图片上传线程是否正常运行 | |||
if imageFileUpload is not None and not imageFileUpload.is_alive(): | |||
logger.error("未检测到图片上传线程活动,图片上传线程可能出现异常, requestId:{}", requestId) | |||
raise Exception("未检测到图片上传线程活动,图片上传线程可能出现异常!") | |||
# 检测心跳线程是否正常运行 | |||
if hb is not None and not hb.is_alive(): | |||
logger.error("未检测到心跳线程活动,心跳线程可能出现异常, requestId:{}", requestId) | |||
raise Exception("未检测到心跳线程活动,心跳线程可能出现异常!") | |||
class OnlinePullVideoStreamProcess(PullVideoStreamProcess): | |||
__slots__ = () | |||
def run(self): | |||
cv2tool = None | |||
pull_p = None | |||
imageFileUpload = None | |||
hb = None | |||
requestId = '1' | |||
pull_queue = self._pullQueue | |||
fb_queue = self._fbQueue | |||
image_queue = self._imageQueue | |||
hb_queue = self._hbQueue | |||
try: | |||
base_dir = self._base_dir | |||
# 加载日志框架 | |||
LogUtils.init_log(self.content) | |||
logger.info("开启视频拉流进程, requestId:{}", self.msg.get("request_id")) | |||
init_log(base_dir) | |||
requestId = self._msg.get("request_id") | |||
pull_url = self._msg.get("pull_url") | |||
logger.info("开启视频拉流进程, requestId:{}", requestId) | |||
pull_stream_timeout = int(self._context["service"]["cv2_pull_stream_timeout"]) | |||
read_stream_timeout = int(self._context["service"]["cv2_read_stream_timeout"]) | |||
service_timeout = int(self._context["service"]["timeout"]) | |||
command_queue = self._command | |||
# 视频相关配置 | |||
width = None | |||
height = None | |||
width_height_3 = None | |||
all_frames = 0 | |||
w_2 = None | |||
h_2 = None | |||
# 开启图片上传线程 | |||
imageFileUpload = self.start_File_upload() | |||
imageFileUpload = self.start_File_upload(imageFileUpload) | |||
# 开启心跳线程 | |||
hb = self.start_heartbeat() | |||
hb = self.start_heartbeat(hb) | |||
# 初始化拉流工具类 | |||
cv2tool = Cv2Util(self.msg.get('pull_url'), requestId=self.msg.get("request_id"), log=logger) | |||
cv2_init_num = 1 | |||
init_pull_num = 1 | |||
start_time = time.time() | |||
pull_stream_start_time = time.time() | |||
pull_stream_read_start_time = time.time() | |||
kill_parent_process_timeout = time.time() | |||
concurrent_frame = 1 | |||
stop_pull_stream_step = False | |||
while True: | |||
# 检测任务执行是否超时、心跳线程是否正常、图片上传线程是否正常 | |||
self.check(start_time, imageFileUpload, hb) | |||
check(start_time, service_timeout, requestId, imageFileUpload, hb) | |||
# 获取指令信息 | |||
command = self.getCommand() | |||
if command is not None and len(command) > 0: | |||
# 停止拉流 | |||
command = getNoBlockQueue(command_queue) | |||
if command is not None: | |||
if 'stop_pull_stream' == command.get("command"): | |||
self.sendPullQueue({"status": "9"}) # 9 停止拉流 | |||
putQueue(pull_queue, ("9",), requestId) # 9 停止拉流 | |||
stop_pull_stream_step = True | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
continue | |||
# 停止图片上传线程 | |||
if 'stop_image_hb' == command.get("command"): | |||
self.sendImageResult({"command": "stop"}) | |||
self.hbQueue.put({"command": "stop"}) | |||
putQueue(image_queue, {"command": "stop"}, requestId) | |||
putQueue(hb_queue, {"command": "stop"}, requestId) | |||
clear_pull_p(pull_p, requestId) | |||
imageFileUpload.join(60 * 3) | |||
hb.join(60 * 3) | |||
logger.error("图片线程停止完成, reuqestId:{}", self.msg.get("request_id")) | |||
logger.error("图片线程停止完成, requestId:{}", requestId) | |||
break | |||
if stop_pull_stream_step: | |||
time.sleep(1) | |||
continue | |||
# 检测视频信息是否存在或拉流对象是否存在 | |||
if cv2tool.checkconfig(): | |||
logger.info("开始重新获取视频信息: {}次, requestId: {}", cv2_init_num, self.msg.get("request_id")) | |||
check_vide_result = check_video_stream(width, height) | |||
if check_vide_result: | |||
logger.info("开始重新获取视频信息: {}次, requestId: {}", cv2_init_num, requestId) | |||
pull_stream_init_timeout = time.time() - pull_stream_start_time | |||
if pull_stream_init_timeout > self.pull_stream_timeout: | |||
logger.info("开始拉流超时, 超时时间:{}, requestId:{}", pull_stream_init_timeout, | |||
self.msg.get("request_id")) | |||
if pull_stream_init_timeout > pull_stream_timeout: | |||
logger.info("开始拉流超时, 超时时间:{}, requestId:{}", pull_stream_init_timeout, requestId) | |||
raise ServiceException(ExceptionType.PULLSTREAM_TIMEOUT_EXCEPTION.value[0], | |||
ExceptionType.PULLSTREAM_TIMEOUT_EXCEPTION.value[1]) | |||
cv2_init_num += 1 | |||
time.sleep(1) | |||
cv2tool.get_video_info() | |||
width, height, width_height_3, all_frames, w_2, h_2 = build_video_info(pull_url, requestId) | |||
continue | |||
pull_stream_start_time = time.time() | |||
cv2_init_num = 1 | |||
frame = cv2tool.read() | |||
frame, pull_p, width, height, width_height_3 = pull_read_video_stream(pull_p, pull_url, width, | |||
height, width_height_3, w_2, h_2, | |||
requestId) | |||
if frame is None: | |||
logger.info("获取帧为空, 开始重试: {}次, requestId: {}", init_pull_num, self.msg.get("request_id")) | |||
logger.info("获取帧为空, 开始重试: {}次, requestId: {}", init_pull_num, requestId) | |||
pull_stream_read_timeout = time.time() - pull_stream_read_start_time | |||
if pull_stream_read_timeout > self.read_stream_timeout: | |||
if pull_stream_read_timeout > read_stream_timeout: | |||
logger.info("拉流过程中断了重试超时, 超时时间: {}, requestId: {}", pull_stream_read_timeout, | |||
self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "3"}) # 3 超时 | |||
requestId) | |||
putQueue(pull_queue, ("3",), requestId) # 3 超时 | |||
stop_pull_stream_step = True | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
continue | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
init_pull_num += 1 | |||
continue | |||
init_pull_num = 1 | |||
pull_stream_read_start_time = time.time() | |||
if self.pullQueue.full(): | |||
time.sleep(0.1) | |||
if pull_queue.full(): | |||
logger.info("pull拉流队列满了:{}, requestId: {}", os.getppid(), requestId) | |||
# 如果一直有视频流,队列一直是满的,应该是父进程挂了,直接等待60退出 | |||
if time.time() - kill_parent_process_timeout > 60: | |||
logger.info("检测到父进程异常停止, 请检测服务器资源是否负载过高, requestId: {}", requestId) | |||
putQueue(fb_queue, {"feedback": message_feedback(requestId, | |||
AnalysisStatus.FAILED.value, | |||
self._analyse_type, | |||
ExceptionType.NO_CPU_RESOURCES.value[0], | |||
ExceptionType.NO_CPU_RESOURCES.value[1], | |||
analyse_time=now_date_to_str())}, | |||
requestId) | |||
break | |||
# logger.info("当前视频帧队列处理满队列状态, requestId: {}", requestId) | |||
if psutil.Process(getpid()).ppid() == 1: | |||
logger.info("检测到父进程异常停止, 请检测服务器资源是否负载过高, requestId: {}", requestId) | |||
putQueue(fb_queue, {"feedback": message_feedback(requestId, | |||
AnalysisStatus.FAILED.value, | |||
self._analyse_type, | |||
ExceptionType.NO_CPU_RESOURCES.value[0], | |||
ExceptionType.NO_CPU_RESOURCES.value[1], | |||
analyse_time=now_date_to_str())}, | |||
requestId) | |||
break | |||
continue | |||
self.sendPullQueue({"status": "4", | |||
"frame": frame, | |||
"cct_frame": concurrent_frame, | |||
"width": cv2tool.width, | |||
"height": cv2tool.height, | |||
"fps": cv2tool.fps, | |||
"all_frame": cv2tool.all_frames}) | |||
kill_parent_process_timeout = time.time() | |||
putQueue(pull_queue, ("4", frame, concurrent_frame, w_2, h_2, all_frames), requestId) | |||
concurrent_frame += 1 | |||
except ServiceException as s: | |||
logger.exception("实时拉流异常: {}, requestId:{}", s.msg, self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "1", "error": {"code": s.code, "msg": s.msg}}) | |||
except Exception as e: | |||
logger.exception("实时拉流异常: {}, requestId:{}", str(e), self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "1", "error": {"code": ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
"msg": ExceptionType.SERVICE_INNER_EXCEPTION.value[1]}}) | |||
logger.error("实时拉流异常: {}, 队列大小:{}, requestId:{}", s.msg, pull_queue.qsize(), requestId) | |||
putQueue(pull_queue, ("1", s.code, s.msg), requestId, enable_ex=False) | |||
except Exception: | |||
logger.error("实时拉流异常: {}, requestId:{}", format_exc(), requestId) | |||
putQueue(pull_queue, ("1", ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]), requestId, enable_ex=False) | |||
finally: | |||
if cv2tool: | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
if imageFileUpload: | |||
self.sendImageResult({"command": "stop"}) | |||
putQueue(image_queue, {"command": "stop"}, requestId, enable_ex=False) | |||
imageFileUpload.join(60 * 3) | |||
if hb: | |||
self.hbQueue.put({"command": "stop"}) | |||
putQueue(hb_queue, {"command": "stop"}, requestId, enable_ex=False) | |||
hb.join(60 * 3) | |||
logger.info("实时拉流线程结束, requestId: {}", self.msg.get("request_id")) | |||
logger.info("实时拉流线程结束, requestId: {}", requestId) | |||
class OfflinePullVideoStreamProcess(PullVideoStreamProcess): | |||
__slots__ = () | |||
def run(self): | |||
cv2tool = None | |||
pull_p = None | |||
imageFileUpload = None | |||
hb = None | |||
requestId = '1' | |||
pull_queue = self._pullQueue | |||
fb_queue = self._fbQueue | |||
image_queue = self._imageQueue | |||
hb_queue = self._hbQueue | |||
try: | |||
# 初始化日志 | |||
LogUtils.init_log(self.content) | |||
base_dir = self._base_dir | |||
init_log(base_dir) | |||
requestId = self._msg.get("request_id") | |||
pull_url = self._msg.get("original_url") | |||
logger.info("开启离线视频拉流进程, requestId:{}", requestId) | |||
service_timeout = int(self._context["service"]["timeout"]) | |||
command_queue = self._command | |||
# 开启图片上传线程 | |||
imageFileUpload = self.start_File_upload() | |||
imageFileUpload = self.start_File_upload(imageFileUpload) | |||
# 开启心跳线程 | |||
hb = self.start_heartbeat() | |||
cv2tool = Cv2Util(pullUrl=self.msg.get('original_url'), requestId=self.msg.get("request_id"), log=logger) | |||
hb = self.start_heartbeat(hb) | |||
cv2_init_num = 1 | |||
start_time = time.time() | |||
cv2tool.get_video_info() | |||
concurrent_frame = 1 | |||
stop_pull_stream_step = False | |||
kill_parent_process_timeout = time.time() | |||
width, height, width_height_3, all_frames, w_2, h_2 = build_video_info(pull_url, requestId) | |||
while True: | |||
self.check(start_time, imageFileUpload, hb) | |||
body = self.getCommand() | |||
if body is not None and len(body) > 0: | |||
if 'stop_pull_stream' == body.get("command"): | |||
self.sendPullQueue({"status": "9"}) # 9 停止拉流 | |||
check(start_time, service_timeout, requestId, imageFileUpload, hb) | |||
command = getNoBlockQueue(command_queue) | |||
if command is not None and len(command) > 0: | |||
if 'stop_pull_stream' == command.get("command"): | |||
putQueue(pull_queue, ("9",), requestId) # 9 停止拉流 | |||
stop_pull_stream_step = True | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
continue | |||
if 'stop_image_hb' == body.get("command"): | |||
self.sendImageResult({"command": "stop"}) | |||
self.hbQueue.put({"command": "stop"}) | |||
if 'stop_image_hb' == command.get("command"): | |||
putQueue(image_queue, {"command": "stop"}, requestId) | |||
putQueue(hb_queue, {"command": "stop"}, requestId) | |||
clear_pull_p(pull_p, requestId) | |||
imageFileUpload.join(60 * 3) | |||
hb.join(60 * 3) | |||
logger.info("图片线程停止完成, reuqestId:{}", self.msg.get("request_id")) | |||
logger.error("图片线程停止完成, requestId:{}", requestId) | |||
break | |||
if stop_pull_stream_step: | |||
time.sleep(1) | |||
continue | |||
if self.pullQueue.full(): | |||
time.sleep(0.1) | |||
if pull_queue.full(): | |||
logger.info("当前视频帧队列处理满队列状态, requestId: {}", requestId) | |||
# 如果一直有视频流,队列一直是满的,应该是父进程挂了,直接等待60退出 | |||
if time.time() - kill_parent_process_timeout > 60: | |||
logger.info("检测到父进程异常停止, 请检测服务器资源是否负载过高, requestId: {}", requestId) | |||
putQueue(fb_queue, {"feedback": message_feedback(requestId, | |||
AnalysisStatus.FAILED.value, | |||
self._analyse_type, | |||
ExceptionType.NO_CPU_RESOURCES.value[0], | |||
ExceptionType.NO_CPU_RESOURCES.value[1], | |||
analyse_time=now_date_to_str())}, | |||
requestId) | |||
break | |||
if psutil.Process(getpid()).ppid() == 1: | |||
logger.info("检测到父进程异常停止, 请检测服务器资源是否负载过高, requestId: {}", requestId) | |||
putQueue(fb_queue, {"feedback": message_feedback(requestId, | |||
AnalysisStatus.FAILED.value, | |||
self._analyse_type, | |||
ExceptionType.NO_CPU_RESOURCES.value[0], | |||
ExceptionType.NO_CPU_RESOURCES.value[1], | |||
analyse_time=now_date_to_str())}, | |||
requestId) | |||
break | |||
continue | |||
if cv2tool.checkconfig(): | |||
logger.info("开始重新获取视频信息: {}次, requestId: {}", cv2_init_num, self.msg.get("request_id")) | |||
kill_parent_process_timeout = time.time() | |||
check_vide_result = check_video_stream(width, height) | |||
if check_vide_result: | |||
logger.info("开始重新获取视频信息: {}次, requestId: {}", cv2_init_num, requestId) | |||
if cv2_init_num > 3: | |||
logger.info("视频信息获取失败, 重试: {}次, requestId: {}", cv2_init_num, | |||
self.msg.get("request_id")) | |||
logger.info("视频信息获取失败, 重试: {}次, requestId: {}", cv2_init_num, requestId) | |||
raise ServiceException(ExceptionType.OR_VIDEO_ADDRESS_EXCEPTION.value[0], | |||
ExceptionType.OR_VIDEO_ADDRESS_EXCEPTION.value[1]) | |||
cv2_init_num += 1 | |||
time.sleep(1) | |||
cv2tool.get_video_info() | |||
width, height, width_height_3, all_frames, w_2, h_2 = build_video_info(pull_url, requestId) | |||
continue | |||
frame = cv2tool.read() | |||
frame, pull_p, width, height, width_height_3 = pull_read_video_stream(pull_p, pull_url, width, | |||
height, width_height_3, w_2, h_2, | |||
requestId) | |||
if frame is None: | |||
logger.info("总帧数: {}, 当前帧数: {}, requestId: {}", cv2tool.all_frames, concurrent_frame, | |||
self.msg.get("request_id")) | |||
logger.info("总帧数: {}, 当前帧数: {}, requestId: {}", all_frames, concurrent_frame, requestId) | |||
# 允许100帧的误差 | |||
if concurrent_frame < cv2tool.all_frames - 100: | |||
logger.info("离线拉流异常结束:requestId: {}", self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "3"}) | |||
if concurrent_frame < all_frames - 100: | |||
logger.info("离线拉流异常结束:requestId: {}", requestId) | |||
putQueue(pull_queue, ("3",), requestId) | |||
stop_pull_stream_step = True | |||
cv2tool.close() | |||
continue | |||
logger.info("离线拉流线程结束, requestId: {}", self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "2"}) | |||
cv2tool.close() | |||
logger.info("离线拉流线程结束, requestId: {}", requestId) | |||
putQueue(pull_queue, ("2",), requestId) | |||
stop_pull_stream_step = True | |||
continue | |||
self.sendPullQueue({"status": "4", | |||
"frame": frame, | |||
"cct_frame": concurrent_frame, | |||
"width": cv2tool.width, | |||
"height": cv2tool.height, | |||
"fps": cv2tool.fps, | |||
"all_frame": cv2tool.all_frames}) | |||
putQueue(pull_queue, ("4", frame, concurrent_frame, w_2, h_2, all_frames), requestId) | |||
concurrent_frame += 1 | |||
except ServiceException as s: | |||
self.sendPullQueue({"status": "1", "error": {"code": s.code, "msg": s.msg}}) | |||
except Exception as e: | |||
logger.exception("离线拉流异常: {}, requestId:{}", e, self.msg.get("request_id")) | |||
self.sendPullQueue({"status": "1", "error": {"code": ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
"msg": ExceptionType.SERVICE_INNER_EXCEPTION.value[1]}}) | |||
logger.error("离线任务拉流出现异常:{}, requestId:{}", s.msg, requestId) | |||
putQueue(pull_queue, ("1", s.code, s.msg), requestId, enable_ex=False) | |||
except Exception: | |||
logger.error("离线拉流异常: {}, requestId:{}", format_exc(), requestId) | |||
putQueue(pull_queue, ("1", ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]), requestId, enable_ex=False) | |||
finally: | |||
if cv2tool is not None: | |||
cv2tool.close() | |||
clear_pull_p(pull_p, requestId) | |||
if imageFileUpload: | |||
self.sendImageResult({"command": "stop"}) | |||
putQueue(image_queue, {"command": "stop"}, requestId, enable_ex=False) | |||
imageFileUpload.join(60 * 3) | |||
if hb: | |||
self.hbQueue.put({"command": "stop"}) | |||
putQueue(hb_queue, {"command": "stop"}, requestId, enable_ex=False) | |||
hb.join(60 * 3) | |||
logger.info("离线拉流线程结束, requestId: {}", self.msg.get("request_id")) | |||
logger.info("离线拉流线程结束, requestId: {}", requestId) |
@@ -0,0 +1,22 @@ | |||
{ | |||
"access_key": "LTAI5tSJ62TLMUb4SZuf285A", | |||
"access_secret": "MWYynm30filZ7x0HqSHlU3pdLVNeI7", | |||
"oss": { | |||
"endpoint": "http://oss-cn-shanghai.aliyuncs.com", | |||
"bucket": "ta-tech-image", | |||
"connect_timeout": 30 | |||
}, | |||
"vod": { | |||
"host_address": "https://vod.play.t-aaron.com/", | |||
"ecsRegionId": "cn-shanghai", | |||
"dev": { | |||
"CateId": 1000468341 | |||
}, | |||
"test": { | |||
"CateId": 1000468338 | |||
}, | |||
"prod": { | |||
"CateId": 1000468340 | |||
} | |||
} | |||
} |
@@ -0,0 +1,116 @@ | |||
{ | |||
"dsp": { | |||
"active": "dev" | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
}, | |||
"dev": { | |||
"bootstrap_servers": ["192.168.11.13:9092"], | |||
"dsp-alg-online-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-offline-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-task-results": { | |||
"partition": [0] | |||
}, | |||
"producer": { | |||
"acks": -1, | |||
"retries": 3, | |||
"linger_ms": 50, | |||
"retry_backoff_ms": 1000, | |||
"max_in_flight_requests_per_connection": 5 | |||
}, | |||
"consumer": { | |||
"client_id": "dsp_ai_server", | |||
"group_id": "dsp-ai-dev", | |||
"auto_offset_reset": "latest", | |||
"enable_auto_commit": 0, | |||
"max_poll_records": 1 | |||
} | |||
}, | |||
"test": { | |||
"bootstrap_servers": ["106.14.96.218:19092"], | |||
"dsp-alg-online-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-offline-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-task-results": { | |||
"partition": [0] | |||
}, | |||
"producer": { | |||
"acks": -1, | |||
"retries": 3, | |||
"linger_ms": 50, | |||
"retry_backoff_ms": 1000, | |||
"max_in_flight_requests_per_connection": 5 | |||
}, | |||
"consumer": { | |||
"client_id": "dsp_ai_server", | |||
"group_id": "dsp-ai-test", | |||
"auto_offset_reset": "latest", | |||
"enable_auto_commit": 0, | |||
"max_poll_records": 1 | |||
} | |||
}, | |||
"prod": { | |||
"bootstrap_servers": ["101.132.127.1:19094"], | |||
"dsp-alg-online-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-offline-tasks": { | |||
"partition": [0] | |||
}, | |||
"dsp-alg-task-results": { | |||
"partition": [0] | |||
}, | |||
"producer": { | |||
"acks": -1, | |||
"retries": 3, | |||
"linger_ms": 50, | |||
"retry_backoff_ms": 1000, | |||
"max_in_flight_requests_per_connection": 5 | |||
}, | |||
"consumer": { | |||
"client_id": "dsp_ai_server", | |||
"group_id": "dsp-ai-prod", | |||
"auto_offset_reset": "latest", | |||
"enable_auto_commit": 0, | |||
"max_poll_records": 1 | |||
} | |||
} | |||
}, | |||
"video": { | |||
"file_path": "../dsp/video/", | |||
"video_add_water": 0 | |||
}, | |||
"service": { | |||
"frame_score": 0.4, | |||
"filter": { | |||
"picture_similarity": 1, | |||
"similarity": 0.65, | |||
"frame_step": 160 | |||
}, | |||
"timeout": 21600, | |||
"cv2_pull_stream_timeout": 1000, | |||
"cv2_read_stream_timeout": 1000, | |||
"recording_pull_stream_timeout": 600 | |||
}, | |||
"model": { | |||
"limit": 3 | |||
}, | |||
"task": { | |||
"limit": 5 | |||
} | |||
} | |||
@@ -0,0 +1,17 @@ | |||
{ | |||
"orc": { | |||
"APP_ID": 28173504, | |||
"API_KEY": "kqrFE7VuygIaFer7z6cRxzoi", | |||
"SECRET_KEY": "yp7xBokyl4TItyGhay7skAN1cMwfvEXf" | |||
}, | |||
"vehicle": { | |||
"APP_ID": 31096670, | |||
"API_KEY": "Dam3O4tgPRN3qh4OYE82dbg7", | |||
"SECRET_KEY": "1PGZ9LAXRR5zcT5MN9rHcW8kLBIS5DAa" | |||
}, | |||
"person": { | |||
"APP_ID": 31096755, | |||
"API_KEY": "CiWrt4iyxOly36n3kR7utiAG", | |||
"SECRET_KEY": "K7y6V3XTGdyXvgtCNCwTGUEooxxDuX9v" | |||
} | |||
} |
@@ -0,0 +1,12 @@ | |||
{ | |||
"enable_file_log": 1, | |||
"enable_stderr": 1, | |||
"base_path": "../dsp/logs", | |||
"log_name": "dsp.log", | |||
"log_fmt": "{time:YYYY-MM-DD HH:mm:ss.SSS} [{level}][{process.name}-{process.id}-{thread.name}-{thread.id}][{line}] {module}-{function} - {message}", | |||
"level": "INFO", | |||
"rotation": "00:00", | |||
"retention": "7 days", | |||
"encoding": "utf8" | |||
} | |||
@@ -1,215 +0,0 @@ | |||
dsp: | |||
active: dev | |||
kafka: | |||
topic: | |||
dsp-alg-online-tasks-topic: dsp-alg-online-tasks | |||
dsp-alg-offline-tasks-topic: dsp-alg-offline-tasks | |||
dsp-alg-image-tasks-topic: dsp-alg-image-tasks | |||
dsp-alg-results-topic: dsp-alg-task-results | |||
dsp-recording-task-topic: dsp-recording-task | |||
dsp-recording-result-topic: dsp-recording-result | |||
local: | |||
bootstrap_servers: ['192.168.10.11:9092'] | |||
# dsp-alg-online-tasks: | |||
# partition: [0] | |||
# dsp-alg-offline-tasks: | |||
# partition: [0] | |||
# dsp-alg-task-results: | |||
# partition: [0] | |||
producer: | |||
acks: -1 | |||
retries: 3 | |||
linger_ms: 50 | |||
retry_backoff_ms: 1000 | |||
max_in_flight_requests_per_connection: 5 | |||
consumer: | |||
client_id: dsp_ai_server | |||
group_id: dsp-ai-local | |||
auto_offset_reset: latest | |||
enable_auto_commit: False | |||
max_poll_records: 1 | |||
dev: | |||
bootstrap_servers: ['192.168.11.13:9092'] | |||
# dsp-alg-online-tasks: | |||
# partition: [0] | |||
# dsp-alg-offline-tasks: | |||
# partition: [0] | |||
# dsp-alg-task-results: | |||
# partition: [0] | |||
producer: | |||
acks: -1 | |||
retries: 3 | |||
linger_ms: 50 | |||
retry_backoff_ms: 1000 | |||
max_in_flight_requests_per_connection: 5 | |||
consumer: | |||
client_id: dsp_ai_server | |||
group_id: dsp-ai-dev | |||
auto_offset_reset: latest | |||
enable_auto_commit: False | |||
max_poll_records: 1 | |||
test: | |||
bootstrap_servers: ['106.14.96.218:19092'] | |||
# dsp-alg-online-tasks: | |||
# partition: [0] | |||
# dsp-alg-offline-tasks: | |||
# partition: [0] | |||
# dsp-alg-task-results: | |||
# partition: [0] | |||
producer: | |||
acks: -1 | |||
retries: 3 | |||
linger_ms: 50 | |||
retry_backoff_ms: 1000 | |||
max_in_flight_requests_per_connection: 5 | |||
consumer: | |||
client_id: dsp_ai_server | |||
group_id: dsp-ai-test | |||
auto_offset_reset: latest | |||
enable_auto_commit: False | |||
max_poll_records: 1 | |||
prod: | |||
bootstrap_servers: ['101.132.127.1:19094'] | |||
# dsp-alg-online-tasks: | |||
# partition: [1] | |||
# dsp-alg-offline-tasks: | |||
# partition: [1] | |||
# dsp-alg-task-results: | |||
# partition: [1] | |||
producer: | |||
acks: -1 | |||
retries: 3 | |||
linger_ms: 50 | |||
retry_backoff_ms: 1000 | |||
max_in_flight_requests_per_connection: 5 | |||
consumer: | |||
client_id: dsp_ai_server | |||
group_id: dsp-ai-prod | |||
auto_offset_reset: latest | |||
enable_auto_commit: False | |||
max_poll_records: 1 | |||
gpu: | |||
# 'first'- 按升序排列可用的 GPU 设备 ID(默认) | |||
# 'last'- 按 id 降序排列可用的 GPU 设备 id | |||
# 'random'- 随机订购可用的 GPU 设备 ID | |||
# 'load'- 按负载递增排序可用的 GPU 设备 ID | |||
# 'memory'- 通过升序内存使用来排序可用的 GPU 设备 ID | |||
order: 'first' | |||
# 获取可用gpu数量 | |||
limit: 10 | |||
# 最大负载 | |||
maxLoad: 0.85 | |||
# 最大内存 | |||
maxMemory: 0.85 | |||
includeNan: False | |||
excludeID: [] | |||
excludeUUID: [] | |||
video: | |||
# 视频拉取失败,保存路径 | |||
file_path: ../dsp/video/ | |||
# 视频加水印开关 | |||
video_add_water: True | |||
aliyun: | |||
access_key: LTAI5tSJ62TLMUb4SZuf285A | |||
access_secret: MWYynm30filZ7x0HqSHlU3pdLVNeI7 | |||
oss: | |||
endpoint: http://oss-cn-shanghai.aliyuncs.com | |||
bucket: 'ta-tech-image' | |||
connect_timeout: 30 | |||
vod: | |||
host_address: https://vod.play.t-aaron.com/ | |||
ecsRegionId: "cn-shanghai" | |||
dev: | |||
CateId: 1000468341 | |||
test: | |||
CateId: 1000468338 | |||
prod: | |||
CateId: 1000468340 | |||
service: | |||
frame_score: 0.4 # 获取最低得分以上的图片 | |||
filter: | |||
# 识别相似度是否开启 | |||
picture_similarity: True | |||
# 相似度阀值 | |||
similarity: 0.65 | |||
frame_step: 160 | |||
timeout: 21600 # 一次识别任务超时时间,单位秒,默认6个小时 | |||
cv2_pull_stream_timeout: 1000 # 直播开始视频未推流超时时间 | |||
cv2_read_stream_timeout: 1000 # 直播读流中超时时间 | |||
recording_pull_stream_timeout: 600 # 录屏拉流超时时间 | |||
baidu: | |||
orc: | |||
APP_ID: 28173504 | |||
API_KEY: kqrFE7VuygIaFer7z6cRxzoi | |||
SECRET_KEY: yp7xBokyl4TItyGhay7skAN1cMwfvEXf | |||
# 车辆检测 | |||
vehicle: | |||
APP_ID: 31096670 | |||
API_KEY: Dam3O4tgPRN3qh4OYE82dbg7 | |||
SECRET_KEY: 1PGZ9LAXRR5zcT5MN9rHcW8kLBIS5DAa | |||
# 人 | |||
person: | |||
APP_ID: 31096755 | |||
API_KEY: CiWrt4iyxOly36n3kR7utiAG | |||
SECRET_KEY: K7y6V3XTGdyXvgtCNCwTGUEooxxDuX9v | |||
# 模型相关配置 | |||
model: | |||
limit: 3 # 模型组合个数限制 | |||
# 日志设置 | |||
log: | |||
# 是否开启文件输出 True:开启 False:关闭 | |||
enable_file_log: True | |||
# 是否开启控制台日志输出 True:开启 False:关闭 | |||
enable_stderr: True | |||
# 日志打印文件夹 | |||
base_path: ../dsp/logs/ | |||
# 日志文件名称 | |||
log_name: dsp.log | |||
# 日志打印格式 | |||
log_fmt: "{time:YYYY-MM-DD HH:mm:ss.SSS} [{level}][{process.name}-{process.id}-{thread.name}-{thread.id}][{line}] {module}-{function} - {message}" | |||
# 日志隔离级别 | |||
level: INFO | |||
# 日志每天0点创建新文件 | |||
rotation: 00:00 | |||
# 日志保存时间15天 | |||
retention: 15 days | |||
# 线程安全 | |||
enqueue: True | |||
# 编码格式 | |||
encoding: utf8 | |||
#mysql: | |||
# # 数据库信息 | |||
# dev: | |||
# host: 192.168.11.13 | |||
# port: 3306 | |||
# dbname: tuheng_dsp | |||
# username: root | |||
# password: idontcare | |||
# test: | |||
# host: 192.168.11.242 | |||
# port: 3306 | |||
# dbname: tuheng_dsp | |||
# username: root | |||
# password: idontcare | |||
# prod: | |||
# host: 172.16.1.22 | |||
# port: 3306 | |||
# dbname: tuheng_dsp | |||
# username: root | |||
# password: TH22#2022 | |||
# db_charset: utf8 | |||
# # mincached : 启动时开启的闲置连接数量(缺省值 0 开始时不创建连接) | |||
# db_min_cached: 0 | |||
# # maxcached : 连接池中允许的闲置的最多连接数量(缺省值 0 代表不闲置连接池大小) | |||
# db_max_cached: 10 | |||
# # maxshared : 共享连接数允许的最大数量(缺省值 0 代表所有连接都是专用的)如果达到了最大数量,被请求为共享的连接将会被共享使用 | |||
# db_max_shared: 10 | |||
# # maxconnecyions : 创建连接池的最大数量(缺省值 0 代表不限制) | |||
# db_max_connecyions: 20 | |||
# # maxusage : 单个连接的最大允许复用次数(缺省值 0 或 False 代表不限制的复用).当达到最大数时,连接会自动重新连接(关闭和重新打开) | |||
# db_blocking: True | |||
# # maxusage : 单个连接的最大允许复用次数(缺省值 0 或 False 代表不限制的复用).当达到最大数时,连接会自动重新连接(关闭和重新打开) | |||
# db_max_usage: 0 | |||
# # setsession : 一个可选的SQL命令列表用于准备每个会话,如["set datestyle to german", ...] | |||
# db_set_session: None |
@@ -1,16 +1,24 @@ | |||
# -*- coding: utf-8 -*- | |||
import os | |||
import sys | |||
from os.path import dirname, realpath | |||
from sys import argv | |||
from service import Dispatcher | |||
import torch | |||
from loguru import logger | |||
from torch import multiprocessing | |||
from service.Dispatcher import DispatcherService | |||
from util.LogUtils import init_log | |||
''' | |||
dsp主程序入口 | |||
''' | |||
if __name__ == '__main__': | |||
print("(♥◠‿◠)ノ゙ DSP【算法调度服务】开始启动 ლ(´ڡ`ლ)゙") | |||
multiprocessing.set_start_method('spawn') | |||
base_dir = dirname(realpath(__file__)) | |||
init_log(base_dir) | |||
logger.info("(♥◠‿◠)ノ゙ DSP【算法调度服务】开始启动 ლ(´ڡ`ლ)゙") | |||
# 获取主程序执行根路径 | |||
base_dir = os.path.dirname(os.path.realpath(sys.argv[0])) | |||
torch.multiprocessing.set_start_method('spawn') | |||
Dispatcher.DispatcherService(base_dir).start_service() | |||
arg = argv | |||
logger.info("脚本启动参数: {}", arg) | |||
envs = ('dev', 'test', 'prod') | |||
active = [env for env in envs if env in arg] | |||
DispatcherService(base_dir, active) |
@@ -0,0 +1,14 @@ | |||
class PullStreamDto: | |||
__slots__ = ('msg', 'context', 'pullQueue', 'fbQueue', 'hbQueue', 'imageQueue', 'analyse_type') | |||
def __init__(self, msg, context, pullQueue, fbQueue, hbQueue, imageQueue, analyse_type): | |||
self.msg = msg | |||
self.context = context | |||
self.pullQueue = pullQueue | |||
self.fbQueue = fbQueue | |||
self.hbQueue = hbQueue | |||
self.imageQueue = imageQueue | |||
self.analyse_type = analyse_type | |||
@@ -0,0 +1,12 @@ | |||
class Param: | |||
__slots__ = ('fbqueue', 'msg', 'analyse_type', 'base_dir', 'context', 'gpu_name') | |||
def __init__(self, fbqueue, msg, analyse_type, base_dir, context, gpu_name): | |||
self.fbqueue = fbqueue | |||
self.msg = msg | |||
self.analyse_type = analyse_type | |||
self.base_dir = base_dir | |||
self.context = context | |||
self.gpu_name = gpu_name |
@@ -63,6 +63,14 @@ class ExceptionType(Enum): | |||
PUSH_STREAM_EXCEPTION = ("SP028", "推流异常!") | |||
NOT_REQUESTID_TASK_EXCEPTION = ("SP993", "未查询到该任务,无法停止任务!") | |||
GPU_EXCEPTION = ("SP994", "GPU出现异常!") | |||
NO_RESOURCES = ("SP995", "服务器暂无资源可以使用,请稍后30秒后再试!") | |||
NO_CPU_RESOURCES = ("SP996", "暂无CPU资源可以使用,请稍后再试!") | |||
SERVICE_COMMON_EXCEPTION = ("SP997", "公共服务异常!") | |||
NO_GPU_RESOURCES = ("SP998", "暂无GPU资源可以使用,请稍后再试!") |
@@ -6,40 +6,38 @@ from enum import Enum, unique | |||
2. 模型编号 | |||
3. 模型名称 | |||
4. 选用的模型名称 | |||
5. 是否可以参与多个模型组合调用 | |||
0: 视频、图片模型组合都支持 | |||
1: 只支持视频模型之间的组合 | |||
2: 只支持图片模型之间的组合 | |||
''' | |||
@unique | |||
class ModelType(Enum): | |||
WATER_SURFACE_MODEL = ("1", "001", "河道模型", 'river', 0) | |||
WATER_SURFACE_MODEL = ("1", "001", "河道模型", 'river') | |||
FOREST_FARM_MODEL = ("2", "002", "森林模型", 'forest2', 0) | |||
FOREST_FARM_MODEL = ("2", "002", "森林模型", 'forest2') | |||
TRAFFIC_FARM_MODEL = ("3", "003", "交通模型", 'highWay2', 0) | |||
TRAFFIC_FARM_MODEL = ("3", "003", "交通模型", 'highWay2') | |||
EPIDEMIC_PREVENTION_MODEL = ("4", "004", "防疫模型", None, 2) | |||
EPIDEMIC_PREVENTION_MODEL = ("4", "004", "防疫模型", None) | |||
PLATE_MODEL = ("5", "005", "车牌模型", None, 2) | |||
PLATE_MODEL = ("5", "005", "车牌模型", None) | |||
VEHICLE_MODEL = ("6", "006", "车辆模型", 'vehicle', 0) | |||
VEHICLE_MODEL = ("6", "006", "车辆模型", 'vehicle') | |||
PEDESTRIAN_MODEL = ("7", "007", "行人模型", 'pedestrian', 0) | |||
PEDESTRIAN_MODEL = ("7", "007", "行人模型", 'pedestrian') | |||
SMOGFIRE_MODEL = ("8", "008", "烟火模型", 'smogfire', 0) | |||
SMOGFIRE_MODEL = ("8", "008", "烟火模型", 'smogfire') | |||
ANGLERSWIMMER_MODEL = ("9", "009", "钓鱼游泳模型", 'AnglerSwimmer', 0) | |||
ANGLERSWIMMER_MODEL = ("9", "009", "钓鱼游泳模型", 'AnglerSwimmer') | |||
COUNTRYROAD_MODEL = ("10", "010", "乡村模型", 'countryRoad', 0) | |||
COUNTRYROAD_MODEL = ("10", "010", "乡村模型", 'countryRoad') | |||
SHIP_MODEL = ("11", "011", "船只模型", 'ship2', 0) | |||
SHIP_MODEL = ("11", "011", "船只模型", 'ship2') | |||
BAIDU_MODEL = ("12", "012", "百度AI图片识别模型", None, 2) | |||
BAIDU_MODEL = ("12", "012", "百度AI图片识别模型", None) | |||
CHANNEL_EMERGENCY_MODEL = ("13", "013", "航道模型", 'channelEmergency', 0) | |||
CHANNEL_EMERGENCY_MODEL = ("13", "013", "航道模型", 'channelEmergency') | |||
RIVER2_MODEL = ("15", "015", "河道检测模型", 'river2') | |||
def checkCode(code): | |||
for model in ModelType: | |||
@@ -58,10 +56,10 @@ class ModelType(Enum): | |||
@unique | |||
class BaiduModelTarget(Enum): | |||
VEHICLE_DETECTION = ( | |||
"车辆检测", 0, lambda client0, client1, url, request_id: client0.vehicleDetectUrl(url, request_id)) | |||
"车辆检测", 0, lambda client0, client1, url, request_id: client0.vehicleDetectUrl(url, request_id)) | |||
HUMAN_DETECTION = ( | |||
"人体检测与属性识别", 1, lambda client0, client1, url, request_id: client1.bodyAttr(url, request_id)) | |||
"人体检测与属性识别", 1, lambda client0, client1, url, request_id: client1.bodyAttr(url, request_id)) | |||
PEOPLE_COUNTING = ("人流量统计", 2, lambda client0, client1, url, request_id: client1.bodyNum(url, request_id)) | |||
@@ -1,24 +1,30 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from traceback import format_exc | |||
import torch | |||
from cerberus import Validator | |||
from torch.cuda import is_available | |||
from common import YmlConstant | |||
from common.YmlConstant import SCHEMA | |||
from concurrency.FeedbackThread import FeedbackThread | |||
from entity.FeedBack import message_feedback, recording_feedback | |||
from entity.TaskParam import Param | |||
from enums.AnalysisStatusEnum import AnalysisStatus | |||
from enums.AnalysisTypeEnum import AnalysisType | |||
from enums.ExceptionEnum import ExceptionType | |||
from enums.RecordingStatusEnum import RecordingStatus | |||
from exception.CustomerException import ServiceException | |||
from util import YmlUtils, FileUtils, LogUtils, KafkaUtils, TimeUtils | |||
from util import TimeUtils | |||
from loguru import logger | |||
from multiprocessing import Queue | |||
from concurrency.IntelligentRecognitionProcess import OnlineIntelligentRecognitionProcess, \ | |||
OfflineIntelligentRecognitionProcess, PhotosIntelligentRecognitionProcess, ScreenRecordingProcess | |||
from util import GPUtils | |||
from util.GPUtils import get_first_gpu_name | |||
from util.CpuUtils import check_cpu, print_cpu_ex_status | |||
from util.FileUtils import create_dir_not_exist | |||
from util.GPUtils import get_first_gpu_name, print_gpu_ex_status | |||
from util.KafkaUtils import CustomerKafkaConsumer | |||
from util.RWUtils import getConfigs | |||
''' | |||
分发服务 | |||
@@ -26,92 +32,111 @@ from util.GPUtils import get_first_gpu_name | |||
class DispatcherService: | |||
__slots__ = ( | |||
'__base_dir', | |||
'__context', | |||
'__feedbackThread', | |||
'__listeningProcesses', | |||
'__fbQueue', | |||
'__topics', | |||
'__analysisType', | |||
'__gpu_name', | |||
'__resource_status' | |||
) | |||
""" | |||
初始化 | |||
""" | |||
def __init__(self, base_dir): | |||
if not torch.cuda.is_available(): | |||
def __init__(self, base_dir, active): | |||
if not is_available(): | |||
raise Exception("cuda不在活动状态, 请检测显卡驱动是否正常!!!!") | |||
self.__context = getConfigs(base_dir, 'config/dsp_application.json') | |||
create_dir_not_exist(base_dir, self.__context["video"]["file_path"]) | |||
self.__base_dir = base_dir | |||
if len(active) > 0: | |||
self.__context["dsp"]["active"] = active[0] | |||
# 初始化alg相关配置 ###################################################################### | |||
self.__base_dir = base_dir # 根路径 | |||
self.__context = YmlUtils.getConfigs(base_dir) # 获取alg需要使用的配置 | |||
self.__context[YmlConstant.BASE_DIR] = base_dir # 将根路径设置到上下文中 | |||
self.__resource_status = False | |||
self.__feedbackThread = None # 初始化反馈线程对象 | |||
# 初始化日志框架 ######################################################################### | |||
LogUtils.init_log(self.__context) # 初始化日志框架 | |||
# 初始化视频保存文件夹 ##################################################################### | |||
FileUtils.create_dir_not_exist(YmlConstant.get_file_path(self.__context)) # 创建文件夹 | |||
# 创建任务记录字典 ######################################################################## | |||
self.onlineProcesses = {} # 记录当前正在执行的实时流分析任务 | |||
self.offlineProcesses = {} # 记录当前正在执行的离线视频分析任务 | |||
self.photoProcesses = {} # 记录当前正在执行的图片分析任务 | |||
self.recordingProcesses = {} # 记录当前录屏任务 | |||
self.listeningProcesses = [self.onlineProcesses, self.offlineProcesses, | |||
self.photoProcesses, self.recordingProcesses] | |||
# 反馈队列 ############################################################################### | |||
self.fbQueue = Queue() | |||
# 监听topic信息 ########################################################################## | |||
self.online_topic = YmlConstant.get_online_tasks_topic(self.__context) | |||
self.offline_topic = YmlConstant.get_offline_tasks_topic(self.__context) | |||
self.image_topic = YmlConstant.get_image_tasks_topic(self.__context) | |||
self.recording_task_topic = YmlConstant.get_recording_tasks_topic(self.__context) | |||
self.topics = [self.online_topic, self.offline_topic, self.image_topic, self.recording_task_topic] | |||
self.analysisType = { | |||
self.online_topic: (AnalysisType.ONLINE.value, lambda x, y: self.online(x, y), | |||
lambda x, y, z: self.identify_method(x, y, z)), | |||
self.offline_topic: (AnalysisType.OFFLINE.value, lambda x, y: self.offline(x, y), | |||
lambda x, y, z: self.identify_method(x, y, z)), | |||
self.image_topic: (AnalysisType.IMAGE.value, lambda x, y: self.image(x, y), | |||
self.__listeningProcesses = {} | |||
self.__fbQueue = Queue() | |||
self.__topics = ( | |||
self.__context["kafka"]["topic"]["dsp-alg-online-tasks-topic"], | |||
self.__context["kafka"]["topic"]["dsp-alg-offline-tasks-topic"], | |||
self.__context["kafka"]["topic"]["dsp-alg-image-tasks-topic"], | |||
self.__context["kafka"]["topic"]["dsp-recording-task-topic"] | |||
) | |||
self.__analysisType = { | |||
self.__topics[0]: (AnalysisType.ONLINE.value, lambda x, y: self.online(x, y), | |||
lambda x, y, z: self.identify_method(x, y, z)), | |||
self.__topics[1]: (AnalysisType.OFFLINE.value, lambda x, y: self.offline(x, y), | |||
lambda x, y, z: self.identify_method(x, y, z)), | |||
self.recording_task_topic: (AnalysisType.RECORDING.value, lambda x, y: self.recording(x, y), | |||
lambda x, y, z: self.recording_method(x, y, z)) | |||
self.__topics[2]: (AnalysisType.IMAGE.value, lambda x, y: self.image(x, y), | |||
lambda x, y, z: self.identify_method(x, y, z)), | |||
self.__topics[3]: (AnalysisType.RECORDING.value, lambda x, y: self.recording(x, y), | |||
lambda x, y, z: self.recording_method(x, y, z)) | |||
} | |||
# 获取当前显卡设备名称 ##################################################################### | |||
gpu_name = get_first_gpu_name() | |||
gpu_codes = YmlConstant.GPU_CODES | |||
gpu_array = [g for g in gpu_codes if g in gpu_name] | |||
gpu_name_array = get_first_gpu_name() | |||
gpu_codes = ('3090', '2080', '4090', 'A10') | |||
gpu_array = [g for g in gpu_codes if g in gpu_name_array] | |||
self.__gpu_name = '2080Ti' | |||
if len(gpu_array) > 0: | |||
self.__context[YmlConstant.GPU_NAME] = gpu_array[0] | |||
if gpu_array[0] == YmlConstant.GPU_2080: | |||
self.__context[YmlConstant.GPU_NAME] = YmlConstant.GPU_2080_Ti | |||
if gpu_array[0] != '2080': | |||
self.__gpu_name = gpu_array[0] | |||
else: | |||
raise Exception("GPU资源不在提供的模型所支持的范围内!请先提供对应的GPU模型!") | |||
logger.info("当前服务环境为: {}, 服务器GPU使用型号: {}", self.__context["dsp"]["active"], self.__gpu_name) | |||
self.start_service() | |||
# 服务调用启动方法 | |||
def start_service(self): | |||
# 初始化kafka监听者 | |||
customerKafkaConsumer = KafkaUtils.CustomerKafkaConsumer(self.__context, topics=self.topics) | |||
customerKafkaConsumer = CustomerKafkaConsumer(self.__context, topics=self.__topics) | |||
logger.info("(♥◠‿◠)ノ゙ DSP【算法调度服务】启动成功 ლ(´ڡ`ლ)゙") | |||
# 循环消息处理 | |||
start_time = time.time() | |||
persistent_time = time.time() | |||
full_count = 0 | |||
while True: | |||
try: | |||
# 检查任务进程运行情况,去除活动的任务 | |||
self.check_process_task() | |||
start_time = self.check_service_resource(start_time) | |||
if len(self.__listeningProcesses) > 0: | |||
now = time.time() | |||
requestIds = list(self.__listeningProcesses.keys()) | |||
requestId = requestIds[-1] | |||
task_process = self.__listeningProcesses.get(requestId) | |||
end_time = now - task_process.start_proccess_time | |||
if end_time > 80 and task_process.pullQueue.full() and time.time() - persistent_time < 10: | |||
full_count += 1 | |||
if full_count > 4: | |||
logger.error("服务器资源限制, 暂无资源可以使用! requestId:{}", requestId) | |||
task_process.sendEvent({"command": "stop_ex"}) | |||
full_count = 0 | |||
persistent_time = time.time() | |||
if end_time > 80 and task_process.pullQueue.full() and time.time() - persistent_time >= 10: | |||
full_count = 0 | |||
persistent_time = time.time() | |||
self.start_feedback_thread() | |||
msg = customerKafkaConsumer.poll() | |||
time.sleep(1) | |||
if msg is not None and len(msg) > 0: | |||
for k, v in msg.items(): | |||
for m in v: | |||
message = m.value | |||
# 提交offset | |||
customerKafkaConsumer.commit_offset(m) | |||
requestId = self.getRequestId(message.get("request_id")) | |||
logger.info("当前拉取到的消息, topic:{}, offset:{}, partition: {}, body: {}, requestId:{}", | |||
m.topic, m.offset, m.partition, message, | |||
self.getRequestId(message.get(YmlConstant.REQUEST_ID))) | |||
topic_method = self.analysisType.get(m.topic) | |||
topic_method[2](m, message, topic_method[0]) | |||
else: | |||
time.sleep(1) | |||
except Exception as e: | |||
logger.exception("主线程异常:", e) | |||
m.topic, m.offset, m.partition, message, requestId) | |||
topic_method = self.__analysisType.get(m.topic) | |||
topic_method[2](m.topic, message, topic_method[0]) | |||
except Exception: | |||
logger.exception("主线程异常:{}", format_exc()) | |||
''' | |||
考虑到requestId为空的场景 | |||
@@ -119,135 +144,155 @@ class DispatcherService: | |||
@staticmethod | |||
def getRequestId(request_id): | |||
if request_id is None or len(request_id) == 0: | |||
return '' | |||
if not request_id: | |||
return '1' | |||
return request_id | |||
''' | |||
实时、离线、图片识别逻辑 | |||
1. m kafka消息体 | |||
2. 请求消息体 | |||
3. 分析类型:实时、离线、图片 | |||
''' | |||
def identify_method(self, m, message, analysisType): | |||
def identify_method(self, topic, message, analysisType): | |||
""" | |||
实时、离线、图片识别逻辑 | |||
1. topic topic | |||
2. 请求消息体 | |||
3. 分析类型:实时、离线、图片 | |||
""" | |||
try: | |||
# 校验参数 | |||
check_result = self.check_msg(message) | |||
if not check_result: | |||
return | |||
self.analysisType.get(m.topic)[1](message, analysisType) | |||
if not is_available(): | |||
raise ServiceException(ExceptionType.GPU_EXCEPTION.value[0], | |||
ExceptionType.GPU_EXCEPTION.value[1]) | |||
self.__analysisType.get(topic)[1](message, analysisType) | |||
except ServiceException as s: | |||
logger.exception("消息监听异常:{}, requestId: {}", s.msg, | |||
self.getRequestId(message.get(YmlConstant.REQUEST_ID))) | |||
if message.get(YmlConstant.REQUEST_ID): | |||
self.fbQueue.put({ | |||
YmlConstant.FEEDBACK: message_feedback(message.get(YmlConstant.REQUEST_ID), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
s.code, | |||
s.msg, | |||
analyse_time=TimeUtils.now_date_to_str())}) | |||
except Exception as e: | |||
logger.exception("消息监听异常:{}, requestId: {}", e, | |||
self.getRequestId(message.get(YmlConstant.REQUEST_ID))) | |||
if message.get(YmlConstant.REQUEST_ID): | |||
self.fbQueue.put({ | |||
YmlConstant.FEEDBACK: message_feedback(message.get(YmlConstant.REQUEST_ID), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1], | |||
analyse_time=TimeUtils.now_date_to_str())}) | |||
def recording_method(self, m, message, analysisType): | |||
logger.error("消息监听异常:{}, requestId: {}", s.msg, | |||
self.getRequestId(message.get("request_id"))) | |||
if message.get("request_id"): | |||
self.__fbQueue.put({ | |||
"feedback": message_feedback(message.get("request_id"), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
s.code, | |||
s.msg, | |||
analyse_time=TimeUtils.now_date_to_str())}, timeout=10) | |||
except Exception: | |||
logger.error("消息监听异常:{}, requestId: {}", format_exc(), | |||
self.getRequestId(message.get("request_id"))) | |||
if message.get("request_id"): | |||
self.__fbQueue.put({ | |||
"feedback": message_feedback(message.get("request_id"), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1], | |||
analyse_time=TimeUtils.now_date_to_str())}, timeout=10) | |||
def recording_method(self, topic, message, analysisType): | |||
try: | |||
# 校验参数 | |||
check_result = self.check_msg(message) | |||
if not check_result: | |||
return | |||
self.analysisType.get(m.topic)[1](message, analysisType) | |||
self.__analysisType.get(topic)[1](message, analysisType) | |||
except ServiceException as s: | |||
logger.exception("消息监听异常:{}, requestId: {}", s.msg, | |||
self.getRequestId(message.get(YmlConstant.REQUEST_ID))) | |||
if message.get(YmlConstant.REQUEST_ID): | |||
self.fbQueue.put({ | |||
YmlConstant.RECORDING: recording_feedback(message.get(YmlConstant.REQUEST_ID), | |||
RecordingStatus.RECORDING_FAILED.value[0], | |||
error_code=s.code, | |||
error_msg=s.msg)}) | |||
except Exception as e: | |||
logger.exception("消息监听异常:{}, requestId: {}", e, | |||
self.getRequestId(message.get(YmlConstant.REQUEST_ID))) | |||
if message.get(YmlConstant.REQUEST_ID): | |||
self.fbQueue.put({ | |||
YmlConstant.RECORDING: recording_feedback(message.get(YmlConstant.REQUEST_ID), | |||
RecordingStatus.RECORDING_FAILED.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1])}) | |||
logger.error("消息监听异常:{}, requestId: {}", s.msg, | |||
self.getRequestId(message.get("request_id"))) | |||
if message.get("request_id"): | |||
self.__fbQueue.put({ | |||
"recording": recording_feedback(message.get("request_id"), | |||
RecordingStatus.RECORDING_FAILED.value[0], | |||
error_code=s.code, | |||
error_msg=s.msg)}, timeout=10) | |||
except Exception: | |||
logger.error("消息监听异常:{}, requestId: {}", format_exc(), | |||
self.getRequestId(message.get("request_id"))) | |||
if message.get("request_id"): | |||
self.__fbQueue.put({ | |||
"recording": recording_feedback(message.get("request_id"), | |||
RecordingStatus.RECORDING_FAILED.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1])}, timeout=10) | |||
# 开启实时进程 | |||
def startOnlineProcess(self, msg, gpu_ids, analysisType): | |||
if self.onlineProcesses.get(msg.get(YmlConstant.REQUEST_ID)): | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
def startOnlineProcess(self, msg, analysisType): | |||
if self.__listeningProcesses.get(msg.get("request_id")): | |||
logger.warning("实时重复任务,请稍后再试!requestId:{}", msg.get("request_id")) | |||
return | |||
cfg = {YmlConstant.FBQUEUE: self.fbQueue, YmlConstant.CONTEXT: self.__context, YmlConstant.MSG: msg, | |||
YmlConstant.GPU_IDS: gpu_ids, YmlConstant.ANALYSE_TYPE: analysisType} | |||
param = Param(self.__fbQueue, msg, analysisType, self.__base_dir, self.__context, self.__gpu_name) | |||
# 创建在线识别进程并启动 | |||
oirp = OnlineIntelligentRecognitionProcess(cfg) | |||
oirp.start() | |||
coir = OnlineIntelligentRecognitionProcess(param) | |||
coir.start() | |||
# 记录请求与进程映射 | |||
self.onlineProcesses[msg.get(YmlConstant.REQUEST_ID)] = oirp | |||
self.__listeningProcesses[msg.get("request_id")] = coir | |||
# 结束实时进程 | |||
def stopOnlineProcess(self, msg): | |||
ps = self.onlineProcesses.get(msg.get(YmlConstant.REQUEST_ID)) | |||
def stopOnlineProcess(self, msg, analysisType): | |||
ps = self.__listeningProcesses.get(msg.get("request_id")) | |||
if ps is None: | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get("request_id")) | |||
putQueue(self.__fbQueue, { | |||
"feedback": message_feedback(msg.get("request_id"), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[0], | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[1], | |||
analyse_time=TimeUtils.now_date_to_str())}, msg.get("request_id")) | |||
return | |||
ps.sendEvent({'command': 'stop'}) | |||
''' | |||
检查实时、离线进程任务运行情况,去除不活动的任务 | |||
''' | |||
ps.sendEvent({"command": "stop"}) | |||
def check_service_resource(self, start_time, requestId=None): | |||
if len(self.__listeningProcesses) > 0: | |||
gpu_result = print_gpu_ex_status(requestId) | |||
cpu_result = print_cpu_ex_status(self.__base_dir, requestId) | |||
if gpu_result or cpu_result: | |||
self.__resource_status = True | |||
return time.time() | |||
if not gpu_result and not cpu_result and time.time() - start_time > 30: | |||
self.__resource_status = False | |||
return time.time() | |||
return start_time | |||
def check_process_task(self): | |||
for process in self.listeningProcesses: | |||
for requestId in list(process.keys()): | |||
if not process[requestId].is_alive(): | |||
del process[requestId] | |||
for requestId in list(self.__listeningProcesses.keys()): | |||
if not self.__listeningProcesses[requestId].is_alive(): | |||
del self.__listeningProcesses[requestId] | |||
# 开启离线进程 | |||
def startOfflineProcess(self, msg, gpu_ids, analysisType): | |||
if self.offlineProcesses.get(msg.get(YmlConstant.REQUEST_ID)): | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
def startOfflineProcess(self, msg, analysisType): | |||
if self.__listeningProcesses.get(msg.get("request_id")): | |||
logger.warning("离线重复任务,请稍后再试!requestId:{}", msg.get("request_id")) | |||
return | |||
cfg = {YmlConstant.FBQUEUE: self.fbQueue, YmlConstant.CONTEXT: self.__context, YmlConstant.MSG: msg, | |||
YmlConstant.GPU_IDS: gpu_ids, YmlConstant.ANALYSE_TYPE: analysisType} | |||
ofirp = OfflineIntelligentRecognitionProcess(cfg) | |||
ofirp.start() | |||
self.offlineProcesses[msg.get(YmlConstant.REQUEST_ID)] = ofirp | |||
param = Param(self.__fbQueue, msg, analysisType, self.__base_dir, self.__context, self.__gpu_name) | |||
first = OfflineIntelligentRecognitionProcess(param) | |||
first.start() | |||
self.__listeningProcesses[msg.get("request_id")] = first | |||
# 结束离线进程 | |||
def stopOfflineProcess(self, msg): | |||
ps = self.offlineProcesses.get(msg.get(YmlConstant.REQUEST_ID)) | |||
def stopOfflineProcess(self, msg, analysisType): | |||
ps = self.__listeningProcesses.get(msg.get("request_id")) | |||
if ps is None: | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get("request_id")) | |||
putQueue(self.__fbQueue, { | |||
"feedback": message_feedback(msg.get("request_id"), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[0], | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[1], | |||
analyse_time=TimeUtils.now_date_to_str())}, msg.get("request_id")) | |||
return | |||
ps.sendEvent({'command': 'stop'}) | |||
ps.sendEvent({"command": "stop"}) | |||
# 开启图片分析进程 | |||
def startImageProcess(self, msg, gpu_ids, analysisType): | |||
pp = self.photoProcesses.get(msg.get(YmlConstant.REQUEST_ID)) | |||
def startImageProcess(self, msg, analysisType): | |||
pp = self.__listeningProcesses.get(msg.get("request_id")) | |||
if pp is not None: | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get("request_id")) | |||
return | |||
cfg = {YmlConstant.FBQUEUE: self.fbQueue, YmlConstant.CONTEXT: self.__context, YmlConstant.MSG: msg, | |||
YmlConstant.GPU_IDS: gpu_ids, YmlConstant.ANALYSE_TYPE: analysisType} | |||
param = Param(self.__fbQueue, msg, analysisType, self.__base_dir, self.__context, self.__gpu_name) | |||
# 创建在线识别进程并启动 | |||
imagep = PhotosIntelligentRecognitionProcess(cfg) | |||
imagep.start() | |||
self.photoProcesses[msg.get(YmlConstant.REQUEST_ID)] = imagep | |||
imaged = PhotosIntelligentRecognitionProcess(param) | |||
imaged.start() | |||
self.__listeningProcesses[msg.get("request_id")] = imaged | |||
''' | |||
校验kafka消息 | |||
@@ -256,17 +301,17 @@ class DispatcherService: | |||
@staticmethod | |||
def check_msg(msg): | |||
try: | |||
v = Validator(YmlConstant.SCHEMA, allow_unknown=True) | |||
v = Validator(SCHEMA, allow_unknown=True) | |||
result = v.validate(msg) | |||
if not result: | |||
logger.error("参数校验异常: {}", v.errors) | |||
if msg.get(YmlConstant.REQUEST_ID) is not None and len(msg.get(YmlConstant.REQUEST_ID)) > 0: | |||
if msg.get("request_id"): | |||
raise ServiceException(ExceptionType.ILLEGAL_PARAMETER_FORMAT.value[0], v.errors) | |||
return result | |||
except ServiceException as s: | |||
raise s | |||
except Exception as e: | |||
logger.exception("参数校验异常: {}", e) | |||
except Exception: | |||
logger.error("参数校验异常: {}", format_exc()) | |||
raise ServiceException(ExceptionType.ILLEGAL_PARAMETER_FORMAT.value[0], | |||
ExceptionType.ILLEGAL_PARAMETER_FORMAT.value[1]) | |||
@@ -276,100 +321,102 @@ class DispatcherService: | |||
def start_feedback_thread(self): | |||
if self.__feedbackThread is None: | |||
self.__feedbackThread = FeedbackThread(self.fbQueue, self.__context) | |||
self.__feedbackThread = FeedbackThread(self.__fbQueue, self.__context) | |||
self.__feedbackThread.setDaemon(True) | |||
self.__feedbackThread.start() | |||
start_time = time.time() | |||
retry_count = 0 | |||
while True: | |||
if self.__feedbackThread.is_alive(): | |||
break | |||
retry_count += 1 | |||
if retry_count > 8: | |||
self.__feedbackThread = None | |||
logger.error("反馈线程异常重试失败!!!!!!") | |||
break | |||
if time.time() - start_time <= 3: | |||
logger.error("反馈线程异常等待中") | |||
time.sleep(1) | |||
continue | |||
logger.error("反馈线程异常重启中") | |||
self.__feedbackThread.start() | |||
start_time = time.time() | |||
while True: | |||
if self.__feedbackThread.is_alive(): | |||
return | |||
if not self.__feedbackThread.is_alive(): | |||
logger.warning("反馈线程异常等待中") | |||
if time.time() - start_time <= 3: | |||
continue | |||
elif int(time.time() - start_time) <= 5: | |||
logger.warning("反馈线程异常重启中") | |||
self.__feedbackThread.start() | |||
time.sleep(2) | |||
continue | |||
elif int(time.time() - start_time) > 5: | |||
raise Exception("反馈线程程启动异常") | |||
# 如果反馈线程为空,启动反馈线程,如果反馈线程意外停止,再次启动反馈线程 | |||
if self.__feedbackThread is not None and not self.__feedbackThread.is_alive(): | |||
start_time_1 = time.time() | |||
while True: | |||
if self.__feedbackThread.is_alive(): | |||
return | |||
if not self.__feedbackThread.is_alive(): | |||
logger.warning("反馈线程异常等待中") | |||
if time.time() - start_time_1 <= 3: | |||
continue | |||
elif time.time() - start_time_1 <= 5: | |||
logger.warning("反馈线程异常重启中") | |||
self.__feedbackThread.start() | |||
time.sleep(1) | |||
continue | |||
elif time.time() - start_time_1 > 5: | |||
raise Exception("反馈线程程启动异常") | |||
continue | |||
''' | |||
在线分析逻辑 | |||
''' | |||
def online(self, message, analysisType): | |||
if YmlConstant.START == message.get(YmlConstant.COMMAND): | |||
gpu_ids = GPUtils.check_gpu_resource(self.__context) | |||
self.startOnlineProcess(message, gpu_ids, analysisType) | |||
elif YmlConstant.STOP == message.get(YmlConstant.COMMAND): | |||
self.stopOnlineProcess(message) | |||
if "start" == message.get("command"): | |||
if self.__resource_status or len(self.__listeningProcesses) >= int(self.__context["task"]["limit"]): | |||
raise ServiceException(ExceptionType.NO_RESOURCES.value[0], | |||
ExceptionType.NO_RESOURCES.value[1]) | |||
self.startOnlineProcess(message, analysisType) | |||
elif "stop" == message.get("command"): | |||
self.stopOnlineProcess(message, analysisType) | |||
else: | |||
pass | |||
def offline(self, message, analysisType): | |||
if YmlConstant.START == message.get(YmlConstant.COMMAND): | |||
gpu_ids = GPUtils.check_gpu_resource(self.__context) | |||
self.startOfflineProcess(message, gpu_ids, analysisType) | |||
elif YmlConstant.STOP == message.get(YmlConstant.COMMAND): | |||
self.stopOfflineProcess(message) | |||
if "start" == message.get("command"): | |||
if self.__resource_status or len(self.__listeningProcesses) >= int(self.__context["task"]["limit"]): | |||
raise ServiceException(ExceptionType.NO_RESOURCES.value[0], | |||
ExceptionType.NO_RESOURCES.value[1]) | |||
self.startOfflineProcess(message, analysisType) | |||
elif "stop" == message.get("command"): | |||
self.stopOfflineProcess(message, analysisType) | |||
else: | |||
pass | |||
def image(self, message, analysisType): | |||
if YmlConstant.START == message.get(YmlConstant.COMMAND): | |||
gpu_ids = GPUtils.check_gpu_resource(self.__context) | |||
self.startImageProcess(message, gpu_ids, analysisType) | |||
if "start" == message.get("command"): | |||
self.startImageProcess(message, analysisType) | |||
else: | |||
pass | |||
def recording(self, message, analysisType): | |||
if YmlConstant.START == message.get(YmlConstant.COMMAND): | |||
if "start" == message.get("command"): | |||
logger.info("开始录屏") | |||
check_cpu(self.__base_dir, message.get("request_id")) | |||
GPUtils.check_gpu_resource(message.get("request_id")) | |||
self.startRecordingProcess(message, analysisType) | |||
elif YmlConstant.STOP == message.get(YmlConstant.COMMAND): | |||
self.stopRecordingProcess(message) | |||
elif "stop" == message.get("command"): | |||
self.stopRecordingProcess(message, analysisType) | |||
else: | |||
pass | |||
# 开启录屏进程 | |||
def startRecordingProcess(self, msg, analysisType): | |||
if self.recordingProcesses.get(msg.get(YmlConstant.REQUEST_ID)): | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
if self.__listeningProcesses.get(msg.get("request_id")): | |||
logger.warning("重复任务,请稍后再试!requestId:{}", msg.get("request_id")) | |||
return | |||
cfg = {YmlConstant.FBQUEUE: self.fbQueue, YmlConstant.CONTEXT: self.__context, YmlConstant.MSG: msg, | |||
YmlConstant.ANALYSE_TYPE: analysisType} | |||
srp = ScreenRecordingProcess(cfg) | |||
param = Param(self.__fbQueue, msg, analysisType, self.__base_dir, self.__context, self.__gpu_name) | |||
srp = ScreenRecordingProcess(param) | |||
srp.start() | |||
self.recordingProcesses[msg.get(YmlConstant.REQUEST_ID)] = srp | |||
self.__listeningProcesses[msg.get("request_id")] = srp | |||
# 结束录屏进程 | |||
def stopRecordingProcess(self, msg): | |||
rdp = self.recordingProcesses.get(msg.get(YmlConstant.REQUEST_ID)) | |||
def stopRecordingProcess(self, msg, analysisType): | |||
rdp = self.__listeningProcesses.get(msg.get("request_id")) | |||
if rdp is None: | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get(YmlConstant.REQUEST_ID)) | |||
logger.warning("未查询到该任务,无法停止任务!requestId:{}", msg.get("request_id")) | |||
putQueue(self.__fbQueue, { | |||
"recording": message_feedback(msg.get("request_id"), | |||
AnalysisStatus.FAILED.value, | |||
analysisType, | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[0], | |||
ExceptionType.NOT_REQUESTID_TASK_EXCEPTION.value[1], | |||
analyse_time=TimeUtils.now_date_to_str())}, msg.get("request_id")) | |||
return | |||
rdp.sendEvent({'command': 'stop'}) | |||
rdp.sendEvent({"command": "stop"}) | |||
def putQueue(queue, result, requestId, enable_ex=True): | |||
try: | |||
queue.put(result, timeout=10) | |||
except Exception: | |||
logger.error("添加队列超时异常:{}, requestId:{}", format_exc(), requestId) | |||
if enable_ex: | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) |
@@ -0,0 +1,3 @@ | |||
dd = {} | |||
print(dd.get('name', 'aaa')) |
@@ -41,4 +41,23 @@ def get_cpu_speed(): | |||
return speed | |||
print(get_cpu_speed()) | |||
# print(get_cpu_speed()) | |||
import psutil | |||
# CPU逻辑数量 | |||
# print(psutil.cpu_count()) | |||
# CPU物理核心 | |||
# print(psutil.cpu_count(logical=False)) | |||
# print(psutil.cpu_percent(interval=1, percpu=True)) | |||
# | |||
# print(psutil.virtual_memory()) | |||
# print(psutil.virtual_memory().percent)#获取内存使用率 | |||
# print(psutil.swap_memory()) | |||
# print(psutil.disk_partitions()) # 磁盘分区信息 | |||
# print(psutil.disk_usage('/')) # 磁盘使用情况 | |||
# print(psutil.disk_io_counters()) # 磁盘IO | |||
print(len(psutil.pids())) | |||
@@ -11,12 +11,10 @@ import subprocess as sp | |||
import cv2 | |||
import numpy as np | |||
from aip import AipImageClassify | |||
import sys | |||
from enums.BaiduSdkEnum import BAIDUERRORDATA, VehicleEnumVALUE | |||
from enums.ExceptionEnum import ExceptionType | |||
from enums.ModelTypeEnum import ModelType | |||
from exception.CustomerException import ServiceException | |||
from util.ModelUtils import Model | |||
def get_recording_video_info(url): |
@@ -0,0 +1,27 @@ | |||
import asyncio | |||
import aiohttp | |||
import json | |||
async def get_video_info(url: str) -> dict: | |||
async with aiohttp.ClientSession() as session: | |||
params = { | |||
'format': 'json', | |||
'url': url | |||
} | |||
async with session.get('rtmp://192.168.10.101:19350/rlive/stream_107?sign=NQe66OXS') as resp: | |||
if resp.status == 200: | |||
text = await resp.text() | |||
return json.loads(text) | |||
return {} | |||
async def main(): | |||
urls = ['rtmp://192.168.10.101:19350/rlive/stream_107?sign=NQe66OXS'] # 一个RTMP流URL的列表 | |||
tasks = [get_video_info(url) for url in urls] | |||
results = await asyncio.gather(*tasks) | |||
for result in results: | |||
print(result) | |||
if __name__ == '__main__': | |||
asyncio.run(main()) |
@@ -1,9 +1,22 @@ | |||
aaa={"1":"1","2":"2","3":"3","4": {"1": "4"}} | |||
for i,v in aaa.items(): | |||
if i =="4": | |||
v["1"] = "5" | |||
# aaa={"1":"1","2":"2","3":"3","4": {"1": "4"}} | |||
# for i,v in aaa.items(): | |||
# if i =="4": | |||
# v["1"] = "5" | |||
# | |||
# print(aaa) | |||
class base(object): | |||
__slots__=('x') | |||
var=8 | |||
def __init__(self): | |||
pass | |||
def aa(self): | |||
print("aa") | |||
b=base() | |||
b.x=88 | |||
print(b.aa()) | |||
print(aaa) |
@@ -1,13 +1,24 @@ | |||
import time | |||
def aa(): | |||
while True: | |||
print("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") | |||
return True | |||
# def aa(): | |||
# while True: | |||
# | |||
# print("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") | |||
# return True | |||
# | |||
# | |||
# # aa() | |||
# | |||
# aa = 810*3/2 | |||
# aa_1 = 810*3//2 | |||
# print(aa==aa_1, aa, aa_1) | |||
# aa() | |||
aa = 810*3/2 | |||
aa_1 = 810*3//2 | |||
print(aa==aa_1, aa, aa_1) | |||
while True: | |||
print("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") | |||
time.sleep(2) | |||
num = 1 | |||
while True: | |||
time.sleep(1) | |||
if num == 1: | |||
print("bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb") | |||
break |
@@ -0,0 +1,49 @@ | |||
import sys | |||
import time | |||
from collections import namedtuple | |||
from memory_profiler import profile | |||
class A: | |||
__slots__ = ('_name', '_age', '_score') | |||
def __init__(self, name=None, age=None, score=None): | |||
self._name = 'aaaa' | |||
self._age = 'vbba' | |||
self._score = '1111' | |||
def test1(self): | |||
num =1 | |||
while True: | |||
num = num + 1 | |||
if num > 1000000: | |||
break | |||
ddd=self._name | |||
for i in range(100): | |||
ddd | |||
class B(A): | |||
# __slots__ = () | |||
def __init__(self): | |||
super().__init__() | |||
def test(self): | |||
print(self._name) | |||
a= A() | |||
b = B() | |||
b.test() | |||
# print(b._name) | |||
# print(sys.getsizeof(a), sys.getsizeof(b)) | |||
# print(sys.getsizeof(a.__dict__), sys.getsizeof(b.__dict__)) | |||
@profile | |||
def main(): | |||
Point = namedtuple('Point', ('x', 'y', 'z')) | |||
object_list = [Point(i,i,i) for i in range(100000)] | |||
if __name__=='__main__': | |||
# main() | |||
# print(A().__dict__) | |||
ss = time.time() | |||
A().test1() | |||
print(time.time() - ss) |
@@ -0,0 +1,200 @@ | |||
# # # import timeit | |||
# # # class MyClass: | |||
# # # @staticmethod | |||
# # # def my_static_method(): | |||
# # # pass | |||
# # # def my_function(): | |||
# # # pass | |||
# # # # 通过类名调用静态方法 | |||
# # # def test_static_method(): | |||
# # # MyClass.my_static_method() | |||
# # # # 通过函数名调用函数方法 | |||
# # # def test_function(): | |||
# # # my_function() | |||
# # # # 测试执行速度 | |||
# # # print("static method: ", timeit.timeit(test_static_method, number=10000000)) | |||
# # # print("function: ", timeit.timeit(test_function, number=10000000)) | |||
# # import copy | |||
# # import os | |||
# # import pickle | |||
# # import time | |||
# # import timeit | |||
# # import traceback | |||
# # from collections import namedtuple | |||
# # from concurrent.futures import ThreadPoolExecutor | |||
# # from multiprocessing import Queue | |||
# # from threading import Thread | |||
# # | |||
# # import cv2 | |||
# # import psutil | |||
# # | |||
# # # def mu(): | |||
# # # aa ={ | |||
# # # 'half': 1, | |||
# # # 'iou_thres': (1,2,3), | |||
# # # 'allowedList': [1,3], | |||
# # # 'aa': {"1":"2"}, | |||
# # # } | |||
# # # bb = aa.copy() | |||
# # # def mu1(): | |||
# # # aa ={ | |||
# # # 'half': 1, | |||
# # # 'iou_thres': (1,2,3), | |||
# # # 'allowedList': [1,3], | |||
# # # 'aa': {"1":"2"}, | |||
# # # } | |||
# # # # copy.deepcopy(aa) | |||
# # # print(pickle.loads(pickle.dumps(aa))) | |||
# # # mu1() | |||
# # # print("static method1: ", timeit.timeit(mu, number=100000)) | |||
# # # print("static method2: ", timeit.timeit(mu1, number=100000)) | |||
# # | |||
# # | |||
# # # def aa(): | |||
# # # Point = namedtuple('Point', ('x', 'y', 'z')) | |||
# # # aa = Point("1111111111", "22222222222", "333333333333") | |||
# # # aa.x | |||
# # # aa.y | |||
# # # aa.z | |||
# # # def aa1(): | |||
# # # aa = ["1111111111", "22222222222", "333333333333"] | |||
# # # aa[0] | |||
# # # aa[1] | |||
# # # aa[2] | |||
# # # print("static method1: ", timeit.timeit(aa, number=100000)) | |||
# # # print("static method2: ", timeit.timeit(aa1, number=100000)) | |||
# # # aa=[] | |||
# # # aa[28] = "11111" | |||
# # # print(aa) | |||
# # # def aa1(): | |||
# # # # 获取当前进程的父进程的PID | |||
# # # if psutil.pid_exists(psutil.Process(os.getpid()).ppid()): | |||
# # # print("11111") | |||
# # # else: | |||
# # # print("2222") | |||
# # # print("static method2: ", timeit.timeit(aa1, number=100)) | |||
# # # def aa(): | |||
# # # num=1 | |||
# # # while num < 100: | |||
# # # num+=1 | |||
# # # time.sleep(100) | |||
# # # dd = Thread(target=aa, args=()) | |||
# # # dd.setDaemon(True) | |||
# # # dd.start() | |||
# # # try: | |||
# # # dd.join(1) | |||
# # # except Exception: | |||
# # # print(traceback.format_exc()) | |||
# # # a = Queue() | |||
# # # aa = {"aa": "11"} | |||
# # # a.put(aa) | |||
# # # bb = a.get() | |||
# # # bb["aa"] = "2222" | |||
# # # print(aa) | |||
# # # print(bb) | |||
# # | |||
# # | |||
# # # def dd(): | |||
# # # w = ["1111", "2222", "3333"] | |||
# # # a = w[2] | |||
# # # | |||
# # # | |||
# # # def cc(): | |||
# # # aa = {"aa": "1111", "bb": "2222", "cc": "3333"} | |||
# # # t= aa["cc"] | |||
# # # | |||
# # # print("static method1: ", timeit.timeit(dd, number=1000)) | |||
# # # print("static method2: ", timeit.timeit(cc, number=1000)) | |||
# # import numpy as np | |||
# # | |||
# # | |||
# # # # 创建一个numpy数组 | |||
# # # arr1 = np.array([[[1,2,3]], [[2,3, 4]]]) | |||
# # # # 使用copy()方法深度拷贝数组 | |||
# # # arr2 = arr1.copy() | |||
# # # # 修改arr2中的元素 | |||
# # # arr2[0][0][1] = 5 | |||
# # # # 打印arr1和arr2 | |||
# # # print("arr1:", arr1) | |||
# # # print("arr2:", arr2) | |||
# # # def cop2(): | |||
# # # arr1 = np.array([[1, 2], [3, 4]]) | |||
# # # arr2 = arr1 | |||
# # # def cop(): | |||
# # # arr1 = np.array([[1, 2], [3, 4]]) | |||
# # # arr2 = arr1.copy() | |||
# # # def cop1(): | |||
# # # arr1 = np.array([[1, 2], [3, 4]]) | |||
# # # arr2 = copy.deepcopy(arr1) | |||
# # # print("static method1: ", timeit.timeit(cop2, number=1000)) | |||
# # # print("static method1: ", timeit.timeit(cop, number=1000)) | |||
# # # print("static method2: ", timeit.timeit(cop1, number=1000)) | |||
# # | |||
# # # aa = {} | |||
# # # def dd(aa): | |||
# # # aa["aa"] = 1 | |||
# # # | |||
# # # dd(aa) | |||
# # # print(aa) | |||
# # | |||
# # def aa(num): | |||
# # while True: | |||
# # time.sleep(4) | |||
# # num+= 1 | |||
# # raise Exception("1111") | |||
# # | |||
# # | |||
# # def dd(): | |||
# # num = 1 | |||
# # with ThreadPoolExecutor(max_workers=1) as t: | |||
# # ddd = t.submit(aa, num) | |||
# # while True: | |||
# # ddd.result() | |||
# # time.sleep(1) | |||
# # print(num) | |||
# # | |||
# # image = cv2.imread(r'D:\tuoheng\codenew\tuoheng_alg\image\logo.png') | |||
# # image1 = np.array(image) | |||
# # or_result, or_image = cv2.imencode(".jpg", image1) | |||
# # aa = or_image.tobytes() | |||
# # | |||
# # numpy_array = np.frombuffer(aa, dtype=np.uint8) | |||
# # img_bgr = cv2.cvtColor(numpy_array, cv2.COLOR_RGB2BGR) | |||
# # cv2.sh | |||
# # print(img_bgr) | |||
# import os | |||
# import time | |||
# from os import getpid | |||
# | |||
# import psutil | |||
# | |||
# | |||
# from multiprocessing import Process | |||
# | |||
# def fun2(name, pid): | |||
# while True: | |||
# time.sleep(1) | |||
# print("fun2", getpid(), os.getppid(), psutil.Process(getpid()).ppid(), psutil.pid_exists(psutil.Process(getpid()).ppid())) | |||
# | |||
# def fun1(name): | |||
# print('测试%s多进程' %name) | |||
# p = Process(target=fun2,args=('Python',getpid(),)) #实例化进程对象 | |||
# # p.daemon = True | |||
# p.start() | |||
# print("funn1", getpid(), p.pid) | |||
# | |||
# | |||
# | |||
# if __name__ == '__main__': | |||
# p = Process(target=fun1,args=('Python',)) #实例化进程对象 | |||
# # p.daemon = True | |||
# p.start() | |||
# | |||
# | |||
# print('结束测试', os.getpid()) | |||
# | |||
listeningProcesses={"11111": "1111", "2222": "2222"} | |||
p = list(listeningProcesses.keys()) | |||
p.reverse() | |||
print(p) |
@@ -3,18 +3,27 @@ import csv | |||
import numpy as np | |||
with open("data.csv") as f: | |||
csv_reader = csv.reader(f) | |||
for row in csv_reader: | |||
print(row) | |||
import pandas as pd | |||
df = pd.read_csv("data.csv", encoding="utf-8") | |||
print(df) | |||
df_array = np.array(df) # 将pandas读取的数据转化为array | |||
df_list = df_array.tolist() # 将数组转化为list | |||
print(df_list) | |||
x = df[['name', 'age']] | |||
print(x) | |||
y = df[['name']] | |||
print(y) | |||
# with open("data.csv") as f: | |||
# csv_reader = csv.reader(f) | |||
# for row in csv_reader: | |||
# print(row) | |||
# | |||
# import pandas as pd | |||
# df = pd.read_csv("data.csv", encoding="utf-8") | |||
# print(df) | |||
# df_array = np.array(df) # 将pandas读取的数据转化为array | |||
# df_list = df_array.tolist() # 将数组转化为list | |||
# print(df_list) | |||
# x = df[['name', 'age']] | |||
# print(x) | |||
# y = df[['name']] | |||
# print(y) | |||
# def aa(): | |||
# num=3 | |||
# while True: | |||
# if num==3: | |||
# return | |||
# print("aaaaaaaaaa") | |||
# | |||
# aa() | |||
# print("11111111") |
@@ -1,29 +1,71 @@ | |||
""" | |||
模式 描述 | |||
r 以只读的形式打开文件,文件的指针在开头 | |||
r+ 读写,文件指针在开头 | |||
rb 以二进制的形式,只读文件指针在开头 | |||
w 只写,文件不存在,则创建新的,存在则覆盖,指针在开头 | |||
w+ 读写,文件不存在,则创建新的,存在则覆盖,指针在开头 | |||
wb 只写,以二进制的形式 | |||
a 追加模式,文件指针在结尾 | |||
a+ 读写,不存在则创建,存在直接追加 | |||
ab 以二进制形式追加 | |||
# """ | |||
# 模式 描述 | |||
# r 以只读的形式打开文件,文件的指针在开头 | |||
# r+ 读写,文件指针在开头 | |||
# rb 以二进制的形式,只读文件指针在开头 | |||
# w 只写,文件不存在,则创建新的,存在则覆盖,指针在开头 | |||
# w+ 读写,文件不存在,则创建新的,存在则覆盖,指针在开头 | |||
# wb 只写,以二进制的形式 | |||
# a 追加模式,文件指针在结尾 | |||
# a+ 读写,不存在则创建,存在直接追加 | |||
# ab 以二进制形式追加 | |||
# | |||
# 1. close(): 关闭文件---非常重要 | |||
# 2. read([count]): 读取文件中的内容 count:字节数量 | |||
# 3. readlines(): 读取所有内容,打包成列表 | |||
# 4. readline(): 读取一行数据,追加读取,读取过的不能再次读取 | |||
# 5. seek(offset,[from]): 修改指针的位置:从from位置移动了offset个字节, | |||
# from:0则表示从起始位置,1则表示从当前位置开始,2则表示从末尾开始 | |||
# offset:要移动的字节数 | |||
# 6. write(): 向文件中写入内容 | |||
# """ | |||
import json | |||
import sys | |||
import time | |||
import traceback | |||
from collections import namedtuple | |||
from queue import Queue | |||
1. close(): 关闭文件---非常重要 | |||
2. read([count]): 读取文件中的内容 count:字节数量 | |||
3. readlines(): 读取所有内容,打包成列表 | |||
4. readline(): 读取一行数据,追加读取,读取过的不能再次读取 | |||
5. seek(offset,[from]): 修改指针的位置:从from位置移动了offset个字节, | |||
from:0则表示从起始位置,1则表示从当前位置开始,2则表示从末尾开始 | |||
offset:要移动的字节数 | |||
6. write(): 向文件中写入内容 | |||
""" | |||
import yaml | |||
# 写文件 | |||
# with open("hello.txt", 'w') as f: | |||
# f.write("hello world") | |||
# 读文件 | |||
with open("hello.txt", 'r') as f: | |||
print(f.read()) | |||
# | |||
# import yaml | |||
# | |||
# | |||
# # 写文件 | |||
# # with open("hello.txt", 'w') as f: | |||
# # f.write("hello world") | |||
# import pandas as pd | |||
# # 读文件 | |||
ss = time.time() | |||
# with open(r"D:\tuoheng\codenew\tuoheng_alg\config\dsp_application.json",'r', encoding='utf-8') as f: | |||
# data = yaml.safe_load(f) | |||
with open(r"D:\tuoheng\codenew\tuoheng_alg\test\读写\dsp_application.json", 'r', encoding='utf-8') as f: | |||
a = json.loads(f.read()) | |||
print(a) | |||
print(time.time()-ss) | |||
# # try: | |||
# # aa = Queue(1) | |||
# # aa.put(1, timeout=2) | |||
# # aa.put(2, block=True, timeout=5) | |||
# # except Exception as e: | |||
# # traceback_str = traceback.format_exc() | |||
# # print("aaaaaaaaaaaaaa", traceback_str) | |||
# import time | |||
# class a(): | |||
# def __init__(self, value): | |||
# self.value = value | |||
# | |||
# def test(self): | |||
# num = 0 | |||
# aa = self.value | |||
# while True: | |||
# num += 1 | |||
# bb = aa | |||
# if num > 10000000: | |||
# break | |||
# ss = time.time() | |||
# a("1111").test() | |||
# print(time.time()-ss) | |||
envs = ('dev', 'test', 'prod') | |||
print('dev1' in envs) |
@@ -0,0 +1,234 @@ | |||
{ | |||
"dsp": {"active": "dev"}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
} | |||
, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
}, | |||
"kafka": { | |||
"topic": { | |||
"dsp-alg-online-tasks-topic": "dsp-alg-online-tasks", | |||
"dsp-alg-offline-tasks-topic": "dsp-alg-offline-tasks", | |||
"dsp-alg-image-tasks-topic": "dsp-alg-image-tasks", | |||
"dsp-alg-results-topic": "dsp-alg-task-results", | |||
"dsp-recording-task-topic": "dsp-recording-task", | |||
"dsp-recording-result-topic": "dsp-recording-result" | |||
} | |||
} | |||
} |
@@ -1,16 +1,16 @@ | |||
import os | |||
import sys | |||
from util import YmlUtils, LogUtils | |||
from loguru import logger | |||
print(os.getcwd()) | |||
print(os.path.relpath(__file__)) | |||
base_dir = os.path.dirname(os.path.realpath(sys.argv[0])) | |||
content = YmlUtils.getConfigs(base_dir + "/../../") | |||
LogUtils.init_log(content) | |||
try: | |||
2/0 | |||
except Exception as e: | |||
logger.exception("异常信息:{}", e) | |||
# import os | |||
# import sys | |||
# | |||
# from util import RWUtils, LogUtils | |||
# from loguru import logger | |||
# print(os.getcwd()) | |||
# | |||
# print(os.path.relpath(__file__)) | |||
# base_dir = os.path.dirname(os.path.realpath(sys.argv[0])) | |||
# content = YmlUtils.getConfigs(base_dir + "/../../") | |||
# LogUtils.init_log(content) | |||
# | |||
# try: | |||
# 2/0 | |||
# except Exception as e: | |||
# logger.exception("异常信息:{}", e) |
@@ -13,11 +13,14 @@ import numpy as np | |||
# print(tl)c | |||
# print(box[0][1]) | |||
import cv2 | |||
ai_video_file = cv2.VideoWriter(r"C:\Users\chenyukun\Desktop\fsdownload\aa.mp4", cv2.VideoWriter_fourcc(*'mp4v'), 25, | |||
(1920,1080)) | |||
# ai_video_file.set(cv2.VIDEOWRITER_PROP_BITRATE, 4000) | |||
ai_video_file.set(cv2.CAP_PROP_BITRATE, 4000) | |||
ai_video_file.set(cv2.VIDEOWRITER_PROP_QUALITY, 80) | |||
print(help(cv2.VideoWriter.set)) | |||
print(dir(cv2)) | |||
print(help(cv2)) | |||
# ai_video_file = cv2.VideoWriter(r"C:\Users\chenyukun\Desktop\fsdownload\aa.mp4", cv2.VideoWriter_fourcc(*'mp4v'), 25, | |||
# (1920,1080)) | |||
# # ai_video_file.set(cv2.VIDEOWRITER_PROP_BITRATE, 4000) | |||
# ai_video_file.set(cv2.CAP_PROP_BITRATE, 4000) | |||
# ai_video_file.set(cv2.VIDEOWRITER_PROP_QUALITY, 80) | |||
# print(help(cv2.VideoWriter.set)) | |||
# print(dir(cv2)) | |||
# print(help(cv2)) | |||
print(bool(0)) | |||
print(False) |
@@ -1,37 +1,39 @@ | |||
# -*- coding: utf-8 -*- | |||
from json import loads | |||
from traceback import format_exc | |||
import oss2 | |||
import time | |||
from aliyunsdkvod.request.v20170321.GetPlayInfoRequest import GetPlayInfoRequest | |||
from loguru import logger | |||
from common import YmlConstant | |||
from exception.CustomerException import ServiceException | |||
from enums.ExceptionEnum import ExceptionType | |||
import json | |||
from aliyunsdkcore.client import AcsClient | |||
from aliyunsdkvod.request.v20170321 import GetPlayInfoRequest | |||
from util.RWUtils import getConfigs | |||
from vodsdk.AliyunVodUploader import AliyunVodUploader | |||
from vodsdk.UploadVideoRequest import UploadVideoRequest | |||
class AliyunOssSdk: | |||
def __init__(self, context, requestId): | |||
# LogUtils.init_log(context) | |||
self.__context = context | |||
__slots__ = ('bucket', '__requestId', '__aliyun_config') | |||
def __init__(self, base_dir, requestId): | |||
self.bucket = None | |||
self.__requestId = requestId | |||
self.__aliyun_config = getConfigs(base_dir, 'config/dsp_aliyun.json') | |||
def get_oss_bucket(self): | |||
if self.bucket is None: | |||
logger.info("初始化oss桶, requestId:{}", self.__requestId) | |||
auth = oss2.Auth(YmlConstant.get_aliyun_access_key(self.__context), | |||
YmlConstant.get_aliyun_access_secret(self.__context)) | |||
self.bucket = oss2.Bucket(auth, YmlConstant.get_aliyun_oss_endpoint(self.__context), | |||
YmlConstant.get_aliyun_oss_bucket(self.__context), | |||
connect_timeout=YmlConstant.get_aliyun_oss_connect_timeout(self.__context)) | |||
auth = oss2.Auth(self.__aliyun_config["access_key"], self.__aliyun_config["access_secret"]) | |||
self.bucket = oss2.Bucket(auth, self.__aliyun_config["oss"]["endpoint"], | |||
self.__aliyun_config["oss"]["bucket"], | |||
connect_timeout=self.__aliyun_config["oss"]["connect_timeout"]) | |||
def sync_upload_file(self, updatePath, fileByte): | |||
logger.info("开始上传文件到oss, requestId:{}", self.__requestId) | |||
@@ -48,19 +50,21 @@ class AliyunOssSdk: | |||
time.sleep(1) | |||
logger.info("上传文件到oss失败, 重试次数:{}, requestId:{}", retry_count, self.__requestId) | |||
if retry_count > MAX_RETRIES: | |||
logger.exception("上传文件到oss重试失败:{}, requestId:{}", e, self.__requestId) | |||
logger.error("上传文件到oss重试失败:{}, requestId:{}", format_exc(), self.__requestId) | |||
raise e | |||
class ThAliyunVodSdk: | |||
def __init__(self, context, requestId): | |||
# LogUtils.init_log(context) | |||
self.__context = context | |||
__slots__ = ('__aliyun_config', '__requestId', "__active") | |||
def __init__(self, base_dir, requestId, active): | |||
self.__requestId = requestId | |||
self.__active = active | |||
self.__aliyun_config = getConfigs(base_dir, 'config/dsp_aliyun.json') | |||
def init_vod_client(self, accessKeyId, accessKeySecret): | |||
regionId = YmlConstant.get_aliyun_vod_ecsRegionId(self.__context) | |||
regionId = self.__aliyun_config["vod"]["ecsRegionId"] | |||
return AcsClient(accessKeyId, accessKeySecret, regionId, auto_retry=True, max_retry_time=3, timeout=30) | |||
def get_play_info(self, videoId): | |||
@@ -68,13 +72,13 @@ class ThAliyunVodSdk: | |||
start = time.time() | |||
while True: | |||
try: | |||
clt = self.init_vod_client(YmlConstant.get_aliyun_access_key(self.__context), | |||
YmlConstant.get_aliyun_access_secret(self.__context)) | |||
clt = self.init_vod_client(self.__aliyun_config["access_key"], | |||
self.__aliyun_config["access_secret"]) | |||
request: GetPlayInfoRequest = GetPlayInfoRequest.GetPlayInfoRequest() | |||
request.set_accept_format('JSON') | |||
request.set_VideoId(videoId) | |||
request.set_AuthTimeout(3600 * 5) | |||
response = json.loads(clt.do_action_with_exception(request)) | |||
response = loads(clt.do_action_with_exception(request)) | |||
play_url = response["PlayInfoList"]["PlayInfo"][0]["PlayURL"] | |||
logger.info("获取视频地址成功,视频地址: {}, requestId: {}", play_url, self.__requestId) | |||
return play_url | |||
@@ -83,27 +87,27 @@ class ThAliyunVodSdk: | |||
time.sleep(5) | |||
current_time = time.time() | |||
if "HTTP Status: 403" not in str(e): | |||
logger.error("获取视频地址失败: {}, requestId: {}", str(e), self.__requestId) | |||
logger.error("获取视频地址失败: {}, requestId: {}", format_exc(), self.__requestId) | |||
raise ServiceException(ExceptionType.GET_VIDEO_URL_EXCEPTION.value[0], | |||
ExceptionType.GET_VIDEO_URL_EXCEPTION.value[1]) | |||
if "HTTP Status: 403" in str(e) and ("UploadFail" in str(e) or "TranscodeFail" in str(e)): | |||
logger.error("获取视频地址失败: {}, requestId: {}", str(e), self.__requestId) | |||
logger.error("获取视频地址失败: {}, requestId: {}", format_exc(), self.__requestId) | |||
raise ServiceException(ExceptionType.GET_VIDEO_URL_EXCEPTION.value[0], | |||
ExceptionType.GET_VIDEO_URL_EXCEPTION.value[1]) | |||
diff_time = current_time - start | |||
if diff_time > 60 * 60 * 5: | |||
logger.error("获取视频地址失败超时异常: {},超时时间:{}, requestId: {}", str(e), diff_time, | |||
self.__requestId) | |||
logger.error("获取视频地址失败超时异常: {},超时时间:{}, requestId: {}", format_exc(), | |||
diff_time, self.__requestId) | |||
raise ServiceException(ExceptionType.GET_VIDEO_URL_TIMEOUT_EXCEPTION.value[0], | |||
ExceptionType.GET_VIDEO_URL_TIMEOUT_EXCEPTION.value[1]) | |||
def upload_local_video(self, filePath, file_title): | |||
logger.info("开始执行vod视频上传, filePath: {}, requestId: {}", filePath, self.__requestId) | |||
uploader = AliyunVodUploader(YmlConstant.get_aliyun_access_key(self.__context), | |||
YmlConstant.get_aliyun_access_secret(self.__context), self.__requestId) | |||
uploader = AliyunVodUploader(self.__aliyun_config["access_key"], | |||
self.__aliyun_config["access_secret"], self.__requestId) | |||
uploadVideoRequest: UploadVideoRequest = UploadVideoRequest(filePath, file_title) | |||
logger.info("视频分类:{}", YmlConstant.get_aliyun_vod_cateId(self.__context)) | |||
uploadVideoRequest.setCateId(YmlConstant.get_aliyun_vod_cateId(self.__context)) | |||
logger.info("视频分类:{}, requestId:{}", self.__aliyun_config["vod"][self.__active]["CateId"], self.__requestId) | |||
uploadVideoRequest.setCateId(self.__aliyun_config["vod"][self.__active]["CateId"]) | |||
# 可以设置视频封面,如果是本地或网络图片可使用UploadImageRequest上传图片到视频点播,获取到ImageURL | |||
# ImageURL示例:https://example.com/sample-****.jpg | |||
# uploadVideoRequest.setCoverURL('<your Image URL>') | |||
@@ -116,10 +120,11 @@ class ThAliyunVodSdk: | |||
result = uploader.uploadLocalVideo(uploadVideoRequest) | |||
logger.info("vod视频上传成功, videoId:{}, requestId:{}", result.get("VideoId"), self.__requestId) | |||
return result.get("VideoId") | |||
except Exception as e: | |||
except Exception: | |||
retry_count += 1 | |||
time.sleep(1) | |||
logger.error("vod视频上传失败:{},重试次数:{}, requestId:{}", str(e), retry_count, self.__requestId) | |||
logger.error("vod视频上传失败:{},重试次数:{}, requestId:{}", format_exc(), retry_count, | |||
self.__requestId) | |||
if retry_count >= MAX_RETRIES: | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
@@ -129,21 +134,3 @@ class ThAliyunVodSdk: | |||
if videoId is None or len(videoId) == 0: | |||
return None | |||
return self.get_play_info(videoId) | |||
# if __name__ == "__main__": | |||
# with open('/home/th/tuo_heng/prod/tuoheng_alg/dsp_application.yml', 'r', encoding='"utf-8"') as f: | |||
# file_content = f.read() | |||
# context = yaml.load(file_content, yaml.FullLoader) | |||
# aliyunVodSdk = ThAliyunVodSdk(context, logger, "11111111111") | |||
# aliyunVodSdk = ThAliyunVodSdk(context, logger, "11111111111") | |||
# | |||
# upload_video_thread_or = Common(context, aliyunVodSdk.get_play_url, '/home/th/tuo_heng/prod/dsp/video1/20230510185733460569_on_ai_592c3dd7eb404af9a744c5543e0e006a.mp4', "orOnLineVideo") | |||
# upload_video_thread_ai = Common(context, aliyunVodSdk.get_play_url, '/home/th/tuo_heng/prod/dsp/video1/20230510185733460569_on_or_592c3dd7eb404af9a744c5543e0e006a.mp4', "aiOnLineVideo") | |||
# upload_video_thread_or.setDaemon(True) | |||
# upload_video_thread_ai.setDaemon(True) | |||
# upload_video_thread_or.start() | |||
# upload_video_thread_ai.start() | |||
# or_url = upload_video_thread_or.get_result() | |||
# ai_url = upload_video_thread_ai.get_result() | |||
# print(or_url) | |||
# print(ai_url) |
@@ -0,0 +1,70 @@ | |||
from os.path import dirname | |||
import psutil | |||
from loguru import logger | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
def check_cpu(base_dir, requestId=None): | |||
path = dirname(base_dir) | |||
cpu_use = psutil.cpu_percent() | |||
cpu_mem = psutil.virtual_memory().percent | |||
cpu_swap = psutil.swap_memory().percent | |||
cpu_disk = psutil.disk_usage(path).percent | |||
if float(cpu_use) > 60 or float(cpu_mem) > 80 or cpu_swap > 85 or cpu_disk > 90: | |||
if requestId: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 服务磁盘使用率:{}, requestId:{} | |||
###############################################################################################""", | |||
cpu_use, cpu_mem, cpu_swap, cpu_disk, requestId) | |||
else: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 服务磁盘使用率:{} | |||
###############################################################################################""", | |||
cpu_use, cpu_mem, cpu_swap, cpu_disk) | |||
raise ServiceException(ExceptionType.NO_CPU_RESOURCES.value[0], | |||
ExceptionType.NO_CPU_RESOURCES.value[1]) | |||
def print_cpu_ex_status(base_dir, requestId=None): | |||
path = dirname(base_dir) | |||
cpu_use = psutil.cpu_percent() | |||
cpu_mem = psutil.virtual_memory().percent | |||
cpu_swap = psutil.swap_memory().percent | |||
cpu_disk = psutil.disk_usage(path).percent | |||
result = False | |||
if float(cpu_use) > 60 or float(cpu_mem) > 70 or cpu_swap > 85 or cpu_disk > 90: | |||
result = True | |||
if requestId: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 服务磁盘使用率:{}, requestId:{} | |||
###############################################################################################""", | |||
cpu_use, cpu_mem, cpu_swap, cpu_disk, requestId) | |||
else: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 服务磁盘使用率:{} | |||
###############################################################################################""", | |||
cpu_use, cpu_mem, cpu_swap, cpu_disk) | |||
return result | |||
def print_cpu_status(base_dir, requestId=None): | |||
path = dirname(base_dir) | |||
cpu_use = psutil.cpu_percent() | |||
cpu_mem = psutil.virtual_memory().percent | |||
cpu_swap = psutil.swap_memory().percent | |||
cpu_lj = psutil.cpu_count() # 逻辑cpu个数 | |||
cpu_wl = psutil.cpu_count(logical=False) # 物理cpu个数 | |||
cpu_disk = psutil.disk_usage(path).percent | |||
if requestId: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 逻辑个数:{}, 物理个数:{}, 服务磁盘使用率:{}, requestId:{} | |||
###############################################################################################""", cpu_use, | |||
cpu_mem, cpu_swap, cpu_lj, cpu_wl, cpu_disk, requestId) | |||
else: | |||
logger.info("""############################################################################################### | |||
CPU 使用率:{}, 内存使用:{}, SWAP内存使用率:{}, 逻辑个数:{}, 物理个数:{}, 服务磁盘使用率:{} | |||
###############################################################################################""", cpu_use, | |||
cpu_mem, cpu_swap, cpu_lj, cpu_wl, cpu_disk) |
@@ -1,6 +1,7 @@ | |||
# -*- coding: utf-8 -*- | |||
import json | |||
import time | |||
from json import loads | |||
from time import time | |||
from traceback import format_exc | |||
import cv2 | |||
import subprocess as sp | |||
@@ -12,16 +13,39 @@ from exception.CustomerException import ServiceException | |||
from enums.ExceptionEnum import ExceptionType | |||
class Cv2Util(): | |||
class Cv2Util: | |||
__slots__ = [ | |||
'pullUrl', | |||
'pushUrl', | |||
'orFilePath', | |||
'aiFilePath', | |||
'p', | |||
'or_video_file', | |||
'ai_video_file', | |||
'fps', | |||
'width', | |||
'height', | |||
'wh', | |||
'h', | |||
'w', | |||
'all_frames', | |||
'bit_rate', | |||
'pull_p', | |||
'requestId', | |||
'p_push_retry_num', | |||
'isGpu', | |||
'read_w_h', | |||
'context', | |||
'p_push_time' | |||
] | |||
def __init__(self, pullUrl=None, pushUrl=None, orFilePath=None, aiFilePath=None, requestId=None, context=None, | |||
gpu_ids=None, log=logger): | |||
gpu_ids=None): | |||
self.pullUrl = pullUrl | |||
self.pushUrl = pushUrl | |||
self.orFilePath = orFilePath | |||
self.aiFilePath = aiFilePath | |||
self.__logger = log | |||
self.cap = None | |||
# self.cap = None | |||
self.p = None | |||
self.or_video_file = None | |||
self.ai_video_file = None | |||
@@ -35,6 +59,7 @@ class Cv2Util(): | |||
self.bit_rate = None | |||
self.pull_p = None | |||
self.requestId = requestId | |||
self.p_push_time = 0 | |||
self.p_push_retry_num = 0 | |||
self.isGpu = False | |||
self.read_w_h = None | |||
@@ -66,7 +91,7 @@ class Cv2Util(): | |||
def get_video_info(self): | |||
try: | |||
if self.pullUrl is None or len(self.pullUrl) == 0: | |||
self.__logger.error("拉流地址不能为空, requestId:{}", self.requestId) | |||
logger.error("拉流地址不能为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.PULL_STREAM_URL_EXCEPTION.value[0], | |||
ExceptionType.PULL_STREAM_URL_EXCEPTION.value[1]) | |||
args = ['ffprobe', '-show_format', '-show_streams', '-of', 'json', self.pullUrl] | |||
@@ -74,7 +99,7 @@ class Cv2Util(): | |||
out, err = p.communicate(timeout=20) | |||
if p.returncode != 0: | |||
raise Exception("未获取视频信息!!!!!requestId:" + self.requestId) | |||
probe = json.loads(out.decode('utf-8')) | |||
probe = loads(out.decode('utf-8')) | |||
if probe is None or probe.get("streams") is None: | |||
raise Exception("未获取视频信息!!!!!requestId:" + self.requestId) | |||
# 视频大小 | |||
@@ -104,23 +129,22 @@ class Cv2Util(): | |||
up, down = str(fps).split('/') | |||
self.fps = int(eval(up) / eval(down)) | |||
if self.fps > 30: | |||
self.__logger.info("获取视频FPS大于30帧, FPS:{}, requestId:{}", self.fps, self.requestId) | |||
logger.info("获取视频FPS大于30帧, FPS:{}, requestId:{}", self.fps, self.requestId) | |||
self.fps = 30 | |||
if self.fps < 25: | |||
self.__logger.info("获取视频FPS小于25帧, FPS:{}, requestId:{}", self.fps, self.requestId) | |||
logger.info("获取视频FPS小于25帧, FPS:{}, requestId:{}", self.fps, self.requestId) | |||
self.fps = 25 | |||
# if duration: | |||
# self.duration = float(video_stream['duration']) | |||
# self.bit_rate = int(bit_rate) / 1000 | |||
self.__logger.info("视频信息, width:{}|height:{}|fps:{}|all_frames:{}|bit_rate:{}, requestId:{}", | |||
self.width, | |||
self.height, self.fps, self.all_frames, self.bit_rate, self.requestId) | |||
logger.info("视频信息, width:{}|height:{}|fps:{}|all_frames:{}, requestId:{}", | |||
self.width, self.height, self.fps, self.all_frames, self.requestId) | |||
except ServiceException as s: | |||
self.__logger.error("获取视频信息异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.error("获取视频信息异常: {}, requestId:{}", s.msg, self.requestId) | |||
self.clear_video_info() | |||
raise s | |||
except Exception as e: | |||
self.__logger.error("获取视频信息异常:{}, requestId:{}", e, self.requestId) | |||
except Exception: | |||
logger.error("获取视频信息异常:{}, requestId:{}", format_exc(), self.requestId) | |||
self.clear_video_info() | |||
''' | |||
@@ -134,7 +158,7 @@ class Cv2Util(): | |||
out, err = p.communicate(timeout=17) | |||
if p.returncode != 0: | |||
raise Exception("未获取视频信息!!!!!requestId:" + self.requestId) | |||
probe = json.loads(out.decode('utf-8')) | |||
probe = loads(out.decode('utf-8')) | |||
if probe is None or probe.get("streams") is None: | |||
raise Exception("未获取视频信息!!!!!requestId:" + self.requestId) | |||
video_stream = next((stream for stream in probe['streams'] if stream.get('codec_type') == 'video'), None) | |||
@@ -156,14 +180,14 @@ class Cv2Util(): | |||
if fps: | |||
up, down = str(fps).split('/') | |||
self.fps = int(eval(up) / eval(down)) | |||
self.__logger.info("视频信息, width:{}|height:{}|fps:{}|all_frames:{}, requestId:{}", self.width, | |||
self.height, self.fps, self.all_frames, self.requestId) | |||
logger.info("视频信息, width:{}|height:{}|fps:{}|all_frames:{}, requestId:{}", self.width, | |||
self.height, self.fps, self.all_frames, self.requestId) | |||
except ServiceException as s: | |||
self.__logger.error("获取视频信息异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.error("获取视频信息异常: {}, requestId:{}", s.msg, self.requestId) | |||
self.clear_video_info() | |||
raise s | |||
except Exception as e: | |||
self.__logger.exception("获取视频信息异常:{}, requestId:{}", e, self.requestId) | |||
except Exception: | |||
logger.error("获取视频信息异常:{}, requestId:{}", format_exc(), self.requestId) | |||
self.clear_video_info() | |||
def getRecordingFrameConfig(self, fps, width, height): | |||
@@ -183,7 +207,7 @@ class Cv2Util(): | |||
# 如果已经初始化, 不再初始化 | |||
if self.pull_p: | |||
return | |||
command = ['ffmpeg -re', '-y' | |||
command = ['ffmpeg -re', '-y', '-an' | |||
# '-hide_banner', | |||
] | |||
if self.pullUrl.startswith('rtsp://'): | |||
@@ -193,25 +217,24 @@ class Cv2Util(): | |||
command.extend(['-i', self.pullUrl, | |||
'-f', 'rawvideo', | |||
'-pix_fmt', 'bgr24', | |||
'-r', '30', | |||
'-an', | |||
'-r', '25', | |||
'-']) | |||
self.pull_p = sp.Popen(command, stdout=sp.PIPE) | |||
except ServiceException as s: | |||
self.__logger.exception("构建拉流管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.exception("构建拉流管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
self.clear_video_info() | |||
if self.pull_p: | |||
self.__logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
self.pull_p = None | |||
raise s | |||
except Exception as e: | |||
self.__logger.exception("构建拉流管道异常:{}, requestId:{}", e, self.requestId) | |||
logger.error("构建拉流管道异常:{}, requestId:{}", e, self.requestId) | |||
self.clear_video_info() | |||
if self.pull_p: | |||
self.__logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
@@ -225,22 +248,22 @@ class Cv2Util(): | |||
if in_bytes is not None and len(in_bytes) > 0: | |||
try: | |||
result = np.frombuffer(in_bytes, np.uint8).reshape(self.read_w_h) | |||
except Exception as ei: | |||
self.__logger.exception("视频格式异常:{}, requestId:{}", ei, self.requestId) | |||
except Exception: | |||
logger.error("视频格式异常:{}, requestId:{}", format_exc(), self.requestId) | |||
raise ServiceException(ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[0], | |||
ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[1]) | |||
except ServiceException as s: | |||
if self.pull_p: | |||
self.__logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
self.pull_p = None | |||
raise s | |||
except Exception as e: | |||
self.__logger.exception("读流异常:{}, requestId:{}", e, self.requestId) | |||
except Exception: | |||
logger.error("读流异常:{}, requestId:{}", format_exc(), self.requestId) | |||
if self.pull_p: | |||
self.__logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
@@ -258,30 +281,31 @@ class Cv2Util(): | |||
command.extend(['-rtsp_transport', 'tcp']) | |||
command.extend(['-re', | |||
'-y', | |||
'-hwaccel', 'cuda', | |||
'-an', | |||
# '-hwaccel', 'cuda', cuvid | |||
'-c:v', 'h264_cuvid', | |||
# '-resize', self.wah, | |||
'-i', self.pullUrl, | |||
'-f', 'rawvideo', | |||
'-pix_fmt', 'bgr24', | |||
'-r', '30', | |||
'-an', | |||
'-r', '25', | |||
'-']) | |||
self.pull_p = sp.Popen(command, stdout=sp.PIPE) | |||
except ServiceException as s: | |||
self.__logger.exception("构建拉流管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.exception("构建拉流管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
raise s | |||
except Exception as e: | |||
self.__logger.exception("构建拉流管道异常:{}, requestId:{}", e, self.requestId) | |||
logger.error("构建拉流管道异常:{}, requestId:{}", format_exc(), self.requestId) | |||
self.clear_video_info() | |||
if self.pull_p: | |||
self.__logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("重试, 关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
self.pull_p = None | |||
def checkconfig(self): | |||
if self.fps is None or self.width is None or self.height is None: | |||
if self.width is None or self.height is None or self.fps is None: | |||
return True | |||
return False | |||
@@ -296,7 +320,7 @@ class Cv2Util(): | |||
result = (np.frombuffer(in_bytes, np.uint8).reshape([self.height, self.width, 3])) | |||
# img = (np.frombuffer(in_bytes, np.uint8)).reshape((self.h, self.w)) | |||
except Exception as ei: | |||
self.__logger.exception("视频格式异常:{}, requestId:{}", ei, self.requestId) | |||
logger.error("视频格式异常:{}, requestId:{}", format_exc(), self.requestId) | |||
raise ServiceException(ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[0], | |||
ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[1]) | |||
# result = cv2.cvtColor(img, cv2.COLOR_YUV2BGR_NV12) | |||
@@ -308,14 +332,12 @@ class Cv2Util(): | |||
except Exception as e: | |||
self.clear_video_info() | |||
if self.pull_p: | |||
self.__logger.info("关闭拉流管道, requestId:{}", self.requestId) | |||
logger.info("关闭拉流管道, requestId:{}", self.requestId) | |||
self.pull_p.stdout.close() | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
self.pull_p = None | |||
self.__logger.exception("读流异常:{}, requestId:{}", e, self.requestId) | |||
if result is None: | |||
self.__logger.error("读取的帧:{}, requestId:{}", len(in_bytes), self.requestId) | |||
logger.error("读流异常:{}, requestId:{}", format_exc(), self.requestId) | |||
return result | |||
def close(self): | |||
@@ -326,7 +348,7 @@ class Cv2Util(): | |||
self.pull_p.terminate() | |||
self.pull_p.wait() | |||
self.pull_p = None | |||
self.__logger.info("关闭拉流管道完成, requestId:{}", self.requestId) | |||
logger.info("关闭拉流管道完成, requestId:{}", self.requestId) | |||
if self.p: | |||
if self.p.stdin: | |||
self.p.stdin.close() | |||
@@ -335,15 +357,15 @@ class Cv2Util(): | |||
self.p = None | |||
# self.p.communicate() | |||
# self.p.kill() | |||
self.__logger.info("关闭管道完成, requestId:{}", self.requestId) | |||
logger.info("关闭管道完成, requestId:{}", self.requestId) | |||
if self.or_video_file: | |||
self.or_video_file.release() | |||
self.or_video_file = None | |||
self.__logger.info("关闭原视频写入流完成, requestId:{}", self.requestId) | |||
logger.info("关闭原视频写入流完成, requestId:{}", self.requestId) | |||
if self.ai_video_file: | |||
self.ai_video_file.release() | |||
self.ai_video_file = None | |||
self.__logger.info("关闭AI视频写入流完成, requestId:{}", self.requestId) | |||
logger.info("关闭AI视频写入流完成, requestId:{}", self.requestId) | |||
# 构建 cv2 | |||
# def build_cv2(self): | |||
@@ -404,20 +426,22 @@ class Cv2Util(): | |||
def build_p(self): | |||
try: | |||
if self.pushUrl is None or len(self.pushUrl) == 0: | |||
self.__logger.error("推流地址不能为空, requestId:{}", self.requestId) | |||
logger.error("推流地址不能为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAM_URL_EXCEPTION.value[0], | |||
ExceptionType.PUSH_STREAM_URL_EXCEPTION.value[1]) | |||
command = ['ffmpeg', | |||
# '-loglevel', 'debug', | |||
'-re', | |||
'-y', | |||
"-an", | |||
'-f', 'rawvideo', | |||
'-vcodec', 'rawvideo', | |||
'-pix_fmt', 'bgr24', | |||
'-thread_queue_size', '1024', | |||
'-s', "{}x{}".format(self.w * 2, self.h), | |||
'-r', str(self.fps), | |||
'-i', '-', # 指定输入文件 | |||
'-g', str(30), | |||
'-r', str(25), | |||
'-g', str(25), | |||
'-maxrate', '6000k', | |||
# '-profile:v', 'high', | |||
'-b:v', '5000k', | |||
@@ -432,15 +456,14 @@ class Cv2Util(): | |||
# '-tune', 'zerolatency', # 加速编码速度 | |||
# '-sc_threshold', '0', | |||
'-pix_fmt', 'yuv420p', | |||
"-an", | |||
# '-flvflags', 'no_duration_filesize', | |||
# '-preset', 'fast', # 指定输出的视频质量,会影响文件的生成速度,有以下几个可用的值 ultrafast, | |||
'-preset', 'p6', # 指定输出的视频质量,会影响文件的生成速度,有以下几个可用的值 ultrafast, | |||
'-tune', 'll', | |||
'-f', 'flv', | |||
self.pushUrl] | |||
self.__logger.info("fps:{}|height:{}|width:{}|requestId:{}", self.fps, self.height, self.width, | |||
self.requestId) | |||
logger.info("fps:{}|height:{}|width:{}|requestId:{}", self.fps, self.height, self.width, | |||
self.requestId) | |||
self.p = sp.Popen(command, stdin=sp.PIPE, shell=False) | |||
except ServiceException as s: | |||
if self.p: | |||
@@ -448,7 +471,7 @@ class Cv2Util(): | |||
self.p.stdin.close() | |||
self.p.terminate() | |||
self.p.wait() | |||
self.__logger.exception("构建p管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.exception("构建p管道异常: {}, requestId:{}", s.msg, self.requestId) | |||
raise s | |||
except Exception as e: | |||
if self.p: | |||
@@ -456,7 +479,7 @@ class Cv2Util(): | |||
self.p.stdin.close() | |||
self.p.terminate() | |||
self.p.wait() | |||
self.__logger.exception("初始化p管道异常:{}, requestId:{}", e, self.requestId) | |||
logger.error("初始化p管道异常:{}, requestId:{}", format_exc(), self.requestId) | |||
def push_stream(self, frame): | |||
current_retry_num = 0 | |||
@@ -465,86 +488,94 @@ class Cv2Util(): | |||
if self.p is None: | |||
self.build_p() | |||
self.p.stdin.write(frame.tostring()) | |||
self.p_push_retry_num == 0 | |||
break | |||
except ServiceException as s: | |||
raise s | |||
except Exception as ex: | |||
if self.p_push_time == 0: | |||
self.p_push_time = time.time() | |||
if time.time() - self.p_push_time < 2: | |||
self.p_push_retry_num += 1 | |||
self.p_push_time = time.time() | |||
if time.time() - self.p_push_time > 60: | |||
self.p_push_retry_num = 0 | |||
self.p_push_time = time.time() | |||
logger.error("推流管道异常:{}, requestId: {}", format_exc(), self.requestId) | |||
if self.p: | |||
if self.p.stdin: | |||
self.p.stdin.close() | |||
self.p.terminate() | |||
self.p.wait() | |||
self.p = None | |||
time.sleep(0.1) | |||
self.p_push_retry_num += 1 | |||
try: | |||
if self.p.stdin: | |||
self.p.stdin.close() | |||
self.p.terminate() | |||
self.p.wait() | |||
except: | |||
logger.error("推流管道异常:{}, requestId: {}", format_exc(), self.requestId) | |||
self.p = None | |||
current_retry_num += 1 | |||
if self.p_push_retry_num > 500: | |||
self.__logger.exception("推流进管道异常:{}, requestId: {}", ex, self.requestId) | |||
if self.p_push_retry_num > 100: | |||
logger.error("推流进管道异常:{}, requestId: {}", format_exc(), self.requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAMING_CHANNEL_IS_OCCUPIED.value[0], | |||
ExceptionType.PUSH_STREAMING_CHANNEL_IS_OCCUPIED.value[1]) | |||
if current_retry_num > 3: | |||
self.__logger.exception("推流进管道异常:{}, requestId: {}", ex, self.requestId) | |||
logger.error("推流进管道异常:{}, requestId: {}", format_exc(), self.requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAM_EXCEPTION.value[0], | |||
ExceptionType.PUSH_STREAM_EXCEPTION.value[1]) | |||
def build_or_write(self): | |||
try: | |||
if self.orFilePath is not None and self.or_video_file is None: | |||
self.or_video_file = cv2.VideoWriter(self.orFilePath, cv2.VideoWriter_fourcc(*'mp4v'), self.fps, | |||
self.or_video_file = cv2.VideoWriter(self.orFilePath, cv2.VideoWriter_fourcc(*'mp4v'), 25, | |||
(self.w, self.h)) | |||
# self.or_video_file.set(cv2.CAP_PROP_BITRATE, 5000) | |||
if self.or_video_file is None: | |||
self.__logger.error("or_video_file为空, requestId:{}", self.requestId) | |||
logger.error("or_video_file为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
except ServiceException as s: | |||
if self.or_video_file: | |||
self.or_video_file.release() | |||
self.or_video_file = None | |||
self.__logger.exception("构建OR文件写对象异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.error("构建OR文件写对象异常: {}, requestId:{}", s.msg, self.requestId) | |||
raise s | |||
except Exception as e: | |||
if self.or_video_file: | |||
self.or_video_file.release() | |||
self.or_video_file = None | |||
self.__logger.exception("构建OR文件写对象异常: {}, requestId:{}", e, self.requestId) | |||
logger.error("构建OR文件写对象异常: {}, requestId:{}", format_exc(), self.requestId) | |||
raise e | |||
except: | |||
if self.or_video_file: | |||
self.or_video_file.release() | |||
self.or_video_file = None | |||
self.__logger.exception("构建OR文件写对象异常, requestId:{}", self.requestId) | |||
logger.exception("构建OR文件写对象异常:{}, requestId:{}", format_exc(), self.requestId) | |||
raise Exception("构建OR文件写对象异常") | |||
def build_ai_write(self): | |||
try: | |||
if self.aiFilePath is not None and self.ai_video_file is None: | |||
self.ai_video_file = cv2.VideoWriter(self.aiFilePath, cv2.VideoWriter_fourcc(*'mp4v'), self.fps, | |||
self.ai_video_file = cv2.VideoWriter(self.aiFilePath, cv2.VideoWriter_fourcc(*'mp4v'), 25, | |||
(self.w * 2, self.h)) | |||
# self.ai_video_file.set(cv2.CAP_PROP_BITRATE, 5000) | |||
if self.ai_video_file is None: | |||
self.__logger.error("ai_video_file为空, requestId:{}", self.requestId) | |||
logger.error("ai_video_file为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
except ServiceException as s: | |||
if self.ai_video_file: | |||
self.ai_video_file.release() | |||
self.ai_video_file = None | |||
self.__logger.exception("构建AI文件写对象异常: {}, requestId:{}", s.msg, self.requestId) | |||
logger.error("构建AI文件写对象异常: {}, requestId:{}", s.msg, self.requestId) | |||
raise s | |||
except Exception as e: | |||
if self.ai_video_file: | |||
self.ai_video_file.release() | |||
self.ai_video_file = None | |||
self.__logger.exception("构建AI文件写对象异常: {}, requestId:{}", e, self.requestId) | |||
logger.error("构建AI文件写对象异常: {}, requestId:{}", format_exc(), self.requestId) | |||
raise e | |||
except: | |||
if self.ai_video_file: | |||
self.ai_video_file.release() | |||
self.ai_video_file = None | |||
self.__logger.exception("构建AI文件写对象异常, requestId:{}", self.requestId) | |||
logger.error("构建AI文件写对象异常:{}, requestId:{}", format_exc(), self.requestId) | |||
raise Exception("构建AI文件写对象异常") | |||
def video_or_write(self, frame): | |||
@@ -559,7 +590,8 @@ class Cv2Util(): | |||
raise s | |||
except Exception as ex: | |||
if ai_retry_num > 3: | |||
self.__logger.exception("重新写入原视频视频到本地, 重试失败, requestId: {}", self.requestId) | |||
logger.error("重新写入原视频视频到本地, 重试失败:{}, requestId: {}", format_exc(), | |||
self.requestId) | |||
raise ex | |||
finally: | |||
ai_retry_num += 1 | |||
@@ -576,7 +608,8 @@ class Cv2Util(): | |||
raise s | |||
except Exception as ex: | |||
if ai_retry_num > 3: | |||
self.__logger.exception("重新写入分析后的视频到本地,重试失败, requestId: {}", self.requestId) | |||
logger.exception("重新写入分析后的视频到本地,重试失败:{}, requestId: {}", format_exc(), | |||
self.requestId) | |||
raise ex | |||
finally: | |||
ai_retry_num += 1 | |||
@@ -590,21 +623,377 @@ class Cv2Util(): | |||
def getP(self): | |||
if self.p is None: | |||
self.__logger.error("获取管道为空, requestId:{}", self.requestId) | |||
logger.error("获取管道为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
return self.p | |||
def getOrVideoFile(self): | |||
if self.or_video_file is None: | |||
self.__logger.error("获取原视频写入对象为空, requestId:{}", self.requestId) | |||
logger.error("获取原视频写入对象为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
return self.or_video_file | |||
def getAiVideoFile(self): | |||
if self.ai_video_file is None: | |||
self.__logger.error("获取AI视频写入对象为空, requestId:{}", self.requestId) | |||
logger.error("获取AI视频写入对象为空, requestId:{}", self.requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
return self.ai_video_file | |||
def check_video_stream(width, height): | |||
if width is None or height is None: | |||
return True | |||
return False | |||
def build_video_info(pull_url, requestId): | |||
try: | |||
if pull_url is None or len(pull_url) == 0: | |||
logger.error("拉流地址不能为空, requestId:{}", requestId) | |||
raise ServiceException(ExceptionType.PULL_STREAM_URL_EXCEPTION.value[0], | |||
ExceptionType.PULL_STREAM_URL_EXCEPTION.value[1]) | |||
args = ['ffprobe', '-show_format', '-show_streams', '-of', 'json', pull_url] | |||
pp = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE) | |||
out, err = pp.communicate(timeout=20) | |||
if pp.returncode != 0: | |||
raise Exception("未获取视频信息!!!!") | |||
probe = loads(out.decode('utf-8')) | |||
# 视频大小 | |||
# format = probe['format'] | |||
# size = int(format['size'])/1024/1024 | |||
video_stream = next((stream for stream in probe['streams'] if stream.get('codec_type') == 'video'), None) | |||
if video_stream is None: | |||
raise Exception("未获取视频信息!!!!") | |||
width_new = video_stream.get('width') | |||
height_new = video_stream.get('height') | |||
nb_frames = video_stream.get('nb_frames', 0) | |||
# fps = video_stream.get('r_frame_rate') | |||
duration = video_stream.get('duration') | |||
if duration is not None and float(duration) != float(0): | |||
nb_frames = int(float(duration) * 25) | |||
# bit_rate = video_stream.get('bit_rate') | |||
if width_new is not None and int(width_new) != 0 and height_new is not None and int(height_new) != 0: | |||
width_o = int(width_new) | |||
height_o = int(height_new) | |||
# width_height_3 = width * height * 3 | |||
width_height_3 = width_o * height_o * 3 // 2 | |||
width = width_o | |||
height = height_o * 3 // 2 | |||
all_frames = int(nb_frames) | |||
w_2 = width_o | |||
h_2 = height_o | |||
if width > Constant.width: | |||
w_2 = width_o // 2 | |||
h_2 = height_o // 2 | |||
# up, down = str(fps).split('/') | |||
# self.fps = int(eval(up) / eval(down)) | |||
# if duration: | |||
# self.duration = float(video_stream['duration']) | |||
# self.bit_rate = int(bit_rate) / 1000 | |||
logger.info("视频信息, width:{}|height:{}|all_frames:{}, requestId:{}", width_o, height_o, all_frames, | |||
requestId) | |||
return width, height, width_height_3, all_frames, w_2, h_2 | |||
raise Exception("未获取视频信息!!!!") | |||
except ServiceException as s: | |||
logger.error("获取视频信息异常: {}, requestId:{}", s.msg, requestId) | |||
raise s | |||
except Exception: | |||
logger.error("获取视频信息异常:{}, requestId:{}", format_exc(), requestId) | |||
return None, None, None, 0, None, None | |||
def start_pull_p(pull_url, requestId): | |||
try: | |||
command = ['ffmpeg'] | |||
if pull_url.startswith("rtsp://"): | |||
command.extend(['-rtsp_transport', 'tcp']) | |||
command.extend(['-re', | |||
'-y', | |||
'-an', | |||
# '-hwaccel', 'cuda', cuvid | |||
'-c:v', 'h264_cuvid', | |||
# '-resize', self.wah, | |||
'-i', pull_url, | |||
'-f', 'rawvideo', | |||
# '-pix_fmt', 'bgr24', | |||
'-r', '25', | |||
'-']) | |||
return sp.Popen(command, stdout=sp.PIPE) | |||
except ServiceException as s: | |||
logger.error("构建拉流管道异常: {}, requestId:{}", s.msg, requestId) | |||
raise s | |||
except Exception as e: | |||
logger.error("构建拉流管道异常:{}, requestId:{}", format_exc(), requestId) | |||
raise e | |||
def clear_pull_p(pull_p, requestId): | |||
try: | |||
if pull_p: | |||
logger.info("关闭拉流管道, requestId:{}", requestId) | |||
pull_p.stdout.close() | |||
pull_p.terminate() | |||
pull_p.wait() | |||
logger.info("拉流管道已关闭, requestId:{}", requestId) | |||
except Exception: | |||
logger.error("关闭拉流管道异常: {}, requestId:{}", format_exc(), requestId) | |||
def pull_read_video_stream(pull_p, pull_url, width, height, width_height_3, w_2, h_2, requestId): | |||
result = None | |||
try: | |||
if pull_p is None: | |||
pull_p = start_pull_p(pull_url, requestId) | |||
in_bytes = pull_p.stdout.read(width_height_3) | |||
if in_bytes is not None and len(in_bytes) > 0: | |||
try: | |||
# result = (np.frombuffer(in_bytes, np.uint8).reshape([height * 3 // 2, width, 3])) | |||
result = (np.frombuffer(in_bytes, np.uint8)).reshape((height, width)) | |||
except Exception: | |||
logger.error("视频格式异常:{}, requestId:{}", format_exc(), requestId) | |||
raise ServiceException(ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[0], | |||
ExceptionType.VIDEO_RESOLUTION_EXCEPTION.value[1]) | |||
result = cv2.cvtColor(result, cv2.COLOR_YUV2BGR_NV12) | |||
# result = cv2.cvtColor(result, cv2.COLOR_RGB2BGR) | |||
if width > Constant.width: | |||
result = cv2.resize(result, (w_2, h_2), interpolation=cv2.INTER_LINEAR) | |||
except ServiceException as s: | |||
clear_pull_p(pull_p, requestId) | |||
raise s | |||
except Exception: | |||
clear_pull_p(pull_p, requestId) | |||
pull_p = None | |||
width = None | |||
height = None | |||
width_height_3 = None | |||
logger.error("读流异常:{}, requestId:{}", format_exc(), requestId) | |||
return result, pull_p, width, height, width_height_3 | |||
def video_conjuncing(frame1, frame2): | |||
# frameLeft = cv2.resize(frame1, (int(self.width / 2), int(self.height / 2)), interpolation=cv2.INTER_LINEAR) | |||
# frameRight = cv2.resize(frame2, (int(self.width / 2), int(self.height / 2)), interpolation=cv2.INTER_LINEAR) | |||
# frame_merge = np.hstack((frameLeft, frameRight)) | |||
frame_merge = np.hstack((frame1, frame2)) | |||
return frame_merge | |||
def build_push_p(push_url, width, height, requestId): | |||
push_p = None | |||
try: | |||
if push_url is None or len(push_url) == 0: | |||
logger.error("推流地址不能为空, requestId:{}", requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAM_URL_EXCEPTION.value[0], | |||
ExceptionType.PUSH_STREAM_URL_EXCEPTION.value[1]) | |||
command = ['ffmpeg', | |||
# '-loglevel', 'debug', | |||
# '-re', | |||
'-y', | |||
"-an", | |||
'-f', 'rawvideo', | |||
'-vcodec', 'rawvideo', | |||
'-pix_fmt', 'bgr24', | |||
'-thread_queue_size', '1024', | |||
'-s', "{}x{}".format(width * 2, height), | |||
'-i', '-', # 指定输入文件 | |||
'-r', str(25), | |||
'-g', str(25), | |||
'-maxrate', '6000k', | |||
# '-profile:v', 'high', | |||
'-b:v', '5000k', | |||
# '-crf', '18', | |||
# '-rc:v', 'vbr', | |||
# '-cq:v', '25', | |||
# '-qmin', '25', | |||
# '-qmax', '25', | |||
'-c:v', 'h264_nvenc', # | |||
'-bufsize', '5000k', | |||
# '-c:v', 'libx264', # 指定视频编码器 | |||
# '-tune', 'zerolatency', # 加速编码速度 | |||
# '-sc_threshold', '0', | |||
# '-rc', 'cbr_ld_hq', | |||
# '-zerolatency', '1', | |||
'-pix_fmt', 'yuv420p', | |||
# '-flvflags', 'no_duration_filesize', | |||
# '-preset', 'fast', # 指定输出的视频质量,会影响文件的生成速度,有以下几个可用的值 ultrafast, | |||
'-preset', 'p6', # 指定输出的视频质量,会影响文件的生成速度,有以下几个可用的值 ultrafast, | |||
'-tune', 'll', | |||
'-f', 'flv', | |||
push_url] | |||
logger.info("height:{}|width:{}|requestId:{}", height, width, requestId) | |||
push_p = sp.Popen(command, stdin=sp.PIPE, shell=False) | |||
except ServiceException as s: | |||
if push_p: | |||
if push_p.stdin: | |||
push_p.stdin.close() | |||
push_p.terminate() | |||
push_p.wait() | |||
logger.exception("构建p管道异常: {}, requestId:{}", s.msg, requestId) | |||
raise s | |||
except Exception: | |||
if push_p: | |||
if push_p.stdin: | |||
push_p.stdin.close() | |||
push_p.terminate() | |||
push_p.wait() | |||
push_p = None | |||
logger.error("初始化p管道异常:{}, requestId:{}", format_exc(), requestId) | |||
return push_p | |||
def push_video_stream(frame, push_p, push_url, width, height, p_push_array, requestId): | |||
current_retry_num = 0 | |||
while True: | |||
try: | |||
if push_p is None: | |||
push_p = build_push_p(push_url, width, height, requestId) | |||
push_p.stdin.write(frame.tostring()) | |||
return push_p | |||
except ServiceException as s: | |||
clear_push_p(push_p, requestId) | |||
raise s | |||
except Exception: | |||
if p_push_array[0] == 0: | |||
p_push_array[0] = time() | |||
if time() - p_push_array[0] < 2: | |||
p_push_array[1] += 1 | |||
p_push_array[0] = time() | |||
if time() - p_push_array[0] > 60: | |||
p_push_array[1] = 0 | |||
p_push_array[0] = time() | |||
logger.error("推流管道异常:{}, requestId: {}", format_exc(), requestId) | |||
clear_push_p(push_p, requestId) | |||
push_p = None | |||
current_retry_num += 1 | |||
if p_push_array[1] > 20: | |||
logger.error("推流进管道异常:{}, requestId: {}", format_exc(), requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAMING_CHANNEL_IS_OCCUPIED.value[0], | |||
ExceptionType.PUSH_STREAMING_CHANNEL_IS_OCCUPIED.value[1]) | |||
if current_retry_num > 3: | |||
logger.error("推流进管道异常:{}, requestId: {}", format_exc(), requestId) | |||
raise ServiceException(ExceptionType.PUSH_STREAM_EXCEPTION.value[0], | |||
ExceptionType.PUSH_STREAM_EXCEPTION.value[1]) | |||
def clear_push_p(push_p, requestId): | |||
if push_p: | |||
try: | |||
if push_p.stdin: | |||
push_p.stdin.close() | |||
push_p.terminate() | |||
push_p.wait() | |||
except Exception: | |||
logger.error("推流管道异常:{}, requestId: {}", format_exc(), requestId) | |||
def close_or_write_stream(or_video_file, requestId): | |||
try: | |||
if or_video_file: | |||
or_video_file.release() | |||
except Exception: | |||
logger.info("关闭原视频写流管道异常:{}, requestId:{}", format_exc(), requestId) | |||
def close_ai_write_stream(ai_video_file, requestId): | |||
try: | |||
if ai_video_file: | |||
ai_video_file.release() | |||
except Exception: | |||
logger.info("关闭AI视频写流管道异常:{}, requestId:{}", format_exc(), requestId) | |||
def close_all_p(push_p, or_video_file, ai_video_file, requestId): | |||
logger.info("开始停止推流、写流管道!requestId:{}", requestId) | |||
clear_push_p(push_p, requestId) | |||
close_or_write_stream(or_video_file, requestId) | |||
close_ai_write_stream(ai_video_file, requestId) | |||
logger.info("停止推流、写流管道完成!requestId:{}", requestId) | |||
def build_or_video(orFilePath, width, height, requestId): | |||
or_video_file = None | |||
try: | |||
or_video_file = cv2.VideoWriter(orFilePath, cv2.VideoWriter_fourcc(*'mp4v'), 25, (width, height)) | |||
if or_video_file is None: | |||
logger.error("or_video_file为空, requestId:{}", requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
return or_video_file | |||
except ServiceException as s: | |||
if or_video_file: | |||
or_video_file.release() | |||
logger.error("构建OR文件写对象异常: {}, requestId:{}", s.msg, requestId) | |||
raise s | |||
except Exception as e: | |||
if or_video_file: | |||
or_video_file.release() | |||
logger.error("构建OR文件写对象异常: {}, requestId:{}", format_exc(), requestId) | |||
raise e | |||
def write_or_video(frame, orFilePath, or_video_file, width, height, requestId): | |||
retry_num = 0 | |||
while True: | |||
try: | |||
if or_video_file is None: | |||
or_video_file = build_or_video(orFilePath, width, height, requestId) | |||
or_video_file.write(frame) | |||
return or_video_file | |||
except ServiceException as s: | |||
if or_video_file: | |||
or_video_file.release() | |||
raise s | |||
except Exception as ex: | |||
if retry_num > 3: | |||
if or_video_file: | |||
or_video_file.release() | |||
logger.error("重新写入原视频视频到本地, 重试失败:{}, requestId: {}", format_exc(), requestId) | |||
raise ex | |||
finally: | |||
retry_num += 1 | |||
def build_ai_video(aiFilePath, width, height, requestId): | |||
ai_video_file = None | |||
try: | |||
ai_video_file = cv2.VideoWriter(aiFilePath, cv2.VideoWriter_fourcc(*'mp4v'), 25, (width * 2, height)) | |||
if ai_video_file is None: | |||
logger.error("ai_video_file为空, requestId:{}", requestId) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
return ai_video_file | |||
except ServiceException as s: | |||
if ai_video_file: | |||
ai_video_file.release() | |||
logger.error("构建AI文件写对象异常: {}, requestId:{}", s.msg, requestId) | |||
raise s | |||
except Exception as e: | |||
if ai_video_file: | |||
ai_video_file.release() | |||
logger.error("构建AI文件写对象异常: {}, requestId:{}", format_exc(), requestId) | |||
raise e | |||
def write_ai_video(frame, aiFilePath, ai_video_file, width, height, requestId): | |||
retry_num = 0 | |||
while True: | |||
try: | |||
if ai_video_file is None: | |||
ai_video_file = build_ai_video(aiFilePath, width, height, requestId) | |||
ai_video_file.write(frame) | |||
return ai_video_file | |||
except ServiceException as s: | |||
if ai_video_file: | |||
ai_video_file.release() | |||
raise s | |||
except Exception as ex: | |||
if retry_num > 3: | |||
if ai_video_file: | |||
ai_video_file.release() | |||
logger.error("重新写入分析后的视频到本地,重试失败:{}, requestId: {}", format_exc(), requestId) | |||
raise ex | |||
finally: | |||
retry_num += 1 |
@@ -1,5 +1,7 @@ | |||
# -*- coding: utf-8 -*- | |||
import os | |||
from os import makedirs | |||
from os.path import exists, join | |||
from loguru import logger | |||
''' | |||
@@ -7,8 +9,9 @@ from loguru import logger | |||
''' | |||
def create_dir_not_exist(path): | |||
if not os.path.exists(path): | |||
def create_dir_not_exist(base_dir, file_path): | |||
path = join(base_dir, file_path) | |||
if not exists(path): | |||
logger.info("开始创建文件夹: {}", path) | |||
os.makedirs(path) | |||
makedirs(path) | |||
logger.info("文件夹创建完成 {}", path) |
@@ -1,6 +1,6 @@ | |||
import GPUtil | |||
from common import YmlConstant | |||
# -*- coding: utf-8 -*- | |||
from GPUtil import getAvailable, getGPUs | |||
from loguru import logger | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
@@ -19,31 +19,54 @@ from exception.CustomerException import ServiceException | |||
# excludeUUIDexcludeID-除了它使用 UUID 之外,其他相同。(默认 = []) | |||
# 输出 | |||
# deviceIDs - 所有可用 GPU 设备 ID 的列表。如果当前负载和内存使用量分别小于maxLoad和maxMemory,则认为 GPU 可用。该列表是根据 排序的order。返回的设备 ID 的最大数量由 限制limit。 | |||
def get_gpu_ids(context): | |||
deviceIDs = GPUtil.getAvailable(order=YmlConstant.get_gpu_order(context), | |||
limit=int(YmlConstant.get_gpu_limit(context)), | |||
maxLoad=float(YmlConstant.get_gpu_maxLoad(context)), | |||
maxMemory=float(YmlConstant.get_gpu_maxMemory(context)), | |||
includeNan=YmlConstant.get_gpu_includeNan(context), | |||
excludeID=YmlConstant.get_gpu_excludeID(context), | |||
excludeUUID=YmlConstant.get_gpu_excludeUUID(context)) | |||
def get_gpu_ids(): | |||
deviceIDs = getAvailable(maxLoad=0.80, maxMemory=0.80) | |||
return deviceIDs | |||
def get_all_gpu_ids(): | |||
return GPUtil.getGPUs() | |||
return getGPUs() | |||
def get_first_gpu_name(): | |||
gps = GPUtil.getGPUs() | |||
if gps is None or len(gps) == 0: | |||
raise Exception("未获取到gpu资源, 先检测服务器是否已经配置GPU资源!") | |||
gps = get_all_gpu_ids() | |||
if len(gps) == 0: | |||
raise ServiceException(ExceptionType.NO_GPU_RESOURCES.value[0], | |||
ExceptionType.NO_GPU_RESOURCES.value[1]) | |||
return gps[0].name | |||
def check_gpu_resource(context): | |||
gpu_ids = get_gpu_ids(context) | |||
if gpu_ids is None or len(gpu_ids) == 0 or (0 not in gpu_ids and str(0) not in gpu_ids): | |||
def check_gpu_resource(requestId=None): | |||
gpu_ids = get_gpu_ids() | |||
if len(gpu_ids) == 0 or 0 not in gpu_ids: | |||
print_gpu_status(requestId) | |||
raise ServiceException(ExceptionType.NO_GPU_RESOURCES.value[0], | |||
ExceptionType.NO_GPU_RESOURCES.value[1]) | |||
return gpu_ids | |||
def print_gpu_ex_status(requestId=None): | |||
gpu_ids = get_gpu_ids() | |||
result = False | |||
if len(gpu_ids) == 0 or 0 not in gpu_ids: | |||
result = True | |||
print_gpu_status(requestId) | |||
return result | |||
def print_gpu_status(requestId=None): | |||
GPUs = get_all_gpu_ids() | |||
if len(GPUs) == 0: | |||
return | |||
for gpu in GPUs: | |||
if requestId: | |||
logger.info("""############################################################################################ | |||
GPU ID:{}, GPU 名称:{}, 负载率:{}, 内存使用率:{}, 总内存:{}, 占用内存:{}, 空闲内存:{}, requestId:{} | |||
############################################################################################""", gpu.id, | |||
gpu.name, gpu.load * 100, gpu.memoryUtil * 100, gpu.memoryTotal, gpu.memoryUsed, gpu.memoryFree, | |||
requestId) | |||
else: | |||
logger.info("""############################################################################################ | |||
GPU ID:{}, GPU 名称:{}, 负载率:{}, 内存使用率:{}, 总内存:{}, 占用内存:{}, 空闲内存:{} | |||
############################################################################################""", gpu.id, | |||
gpu.name, gpu.load * 100, gpu.memoryUtil * 100, gpu.memoryTotal, gpu.memoryUsed, gpu.memoryFree) |
@@ -1,5 +1,6 @@ | |||
import time | |||
# -*- coding: utf-8 -*- | |||
from io import BytesIO | |||
from traceback import format_exc | |||
import cv2 | |||
import requests | |||
@@ -118,24 +119,30 @@ class TextWaterMark(): | |||
class PictureWaterMark: | |||
def __init__(self): | |||
self.logo = cv2.imread("./image/logo.png", -1) | |||
def common_water(self, image, logo): | |||
width, height = image.shape[1], image.shape[0] | |||
mark_width, mark_height = logo.shape[1], logo.shape[0] | |||
rate = int(width * 0.2) / mark_width | |||
logo_new = cv2.resize(logo, None, fx=rate, fy=rate, interpolation=cv2.INTER_NEAREST) | |||
position = (int(width * 0.95 - logo_new.shape[1]), int(height * 0.95 - logo_new.shape[0])) | |||
b = Image.new('RGBA', (width, height), (0, 0, 0, 0)) # 创建新图像:透明' | |||
a = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) | |||
watermark = Image.fromarray(cv2.cvtColor(logo_new, cv2.COLOR_BGRA2RGBA)) | |||
# 图片旋转 | |||
# watermark = watermark.rotate(45) | |||
b.paste(a, (0, 0)) | |||
b.paste(watermark, position, mask=watermark) | |||
return cv2.cvtColor(np.asarray(b), cv2.COLOR_BGR2RGB) | |||
__slots__ = ('logo', '__requestId') | |||
def __init__(self, logo=None, requestId=None): | |||
self.__requestId = requestId | |||
self.logo = logo | |||
if requestId is None: | |||
self.__requestId = '1' | |||
if logo is None: | |||
self.logo = cv2.imread("./image/logo.png", -1) | |||
# def common_water(self, image, logo): | |||
# width, height = image.shape[1], image.shape[0] | |||
# mark_width, mark_height = logo.shape[1], logo.shape[0] | |||
# rate = int(width * 0.2) / mark_width | |||
# logo_new = cv2.resize(logo, None, fx=rate, fy=rate, interpolation=cv2.INTER_NEAREST) | |||
# position = (int(width * 0.95 - logo_new.shape[1]), int(height * 0.95 - logo_new.shape[0])) | |||
# b = Image.new('RGBA', (width, height), (0, 0, 0, 0)) # 创建新图像:透明' | |||
# a = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) | |||
# watermark = Image.fromarray(cv2.cvtColor(logo_new, cv2.COLOR_BGRA2RGBA)) | |||
# # 图片旋转 | |||
# # watermark = watermark.rotate(45) | |||
# b.paste(a, (0, 0)) | |||
# b.paste(watermark, position, mask=watermark) | |||
# return cv2.cvtColor(np.asarray(b), cv2.COLOR_BGR2RGB) | |||
def common_water_1(self, image, logo, alpha=1): | |||
try: | |||
@@ -167,11 +174,46 @@ class PictureWaterMark: | |||
mask_channels[i] * (mask_channels[3] * alpha / 255), dtype=np.uint8) | |||
dst_img = cv2.merge(dst_channels) | |||
return dst_img | |||
except Exception as e: | |||
logger.exception("加水印异常", e) | |||
except Exception: | |||
logger.error("加水印异常:{}, requestId:{}", format_exc(), self.__requestId) | |||
return image | |||
def add_water_pic(image, logo, requestId, alpha=1): | |||
try: | |||
h, w = image.shape[0], image.shape[1] | |||
# if w >= h: | |||
rate = int(w * 0.1) / logo.shape[1] | |||
# else: | |||
# rate = int(h * 0.1) / logo.shape[0] | |||
mask = cv2.resize(logo, None, fx=rate, fy=rate, interpolation=cv2.INTER_NEAREST) | |||
mask_h, mask_w = mask.shape[0], mask.shape[1] | |||
mask_channels = cv2.split(mask) | |||
dst_channels = cv2.split(image) | |||
# b, g, r, a = cv2.split(mask) | |||
# 计算mask在图片的坐标 | |||
# if w >= h: | |||
ul_points = (int(h * 0.95) - mask_h, int(w - h * 0.05 - mask_w)) | |||
dr_points = (int(h * 0.95), int(w - h * 0.05)) | |||
# else: | |||
# ul_points = (int(h * 0.95) - mask_h, int(w - h * 0.05 - mask_w)) | |||
# dr_points = (int(h * 0.95), int(w - h * 0.05)) | |||
for i in range(3): | |||
dst_channels[i][ul_points[0]: dr_points[0], ul_points[1]: dr_points[1]] = dst_channels[i][ | |||
ul_points[0]: dr_points[0], | |||
ul_points[1]: dr_points[ | |||
1]] * ( | |||
255.0 - mask_channels[ | |||
3] * alpha) / 255 | |||
dst_channels[i][ul_points[0]: dr_points[0], ul_points[1]: dr_points[1]] += np.array( | |||
mask_channels[i] * (mask_channels[3] * alpha / 255), dtype=np.uint8) | |||
dst_img = cv2.merge(dst_channels) | |||
return dst_img | |||
except Exception: | |||
logger.error("加水印异常:{}, requestId:{}", format_exc(), requestId) | |||
return image | |||
# 差值感知算法 | |||
def dHash(image): | |||
# 缩放9*8 | |||
@@ -199,17 +241,19 @@ def Hamming_distance(hash1, hash2): | |||
return num | |||
def url2Array(url): | |||
def url2Array(url, enable_ex=True): | |||
try: | |||
response = requests.get(url) | |||
image = Image.open(BytesIO(response.content)) | |||
image1 = np.array(image) | |||
img_bgr = cv2.cvtColor(image1, cv2.COLOR_RGB2BGR) | |||
return img_bgr | |||
except Exception as e: | |||
logger.exception("url地址请求异常: {}", e) | |||
raise ServiceException(ExceptionType.URL_ADDRESS_ACCESS_FAILED.value[0], | |||
ExceptionType.URL_ADDRESS_ACCESS_FAILED.value[1]) | |||
except Exception: | |||
logger.exception("url地址请求异常: {}", format_exc()) | |||
if enable_ex: | |||
raise ServiceException(ExceptionType.URL_ADDRESS_ACCESS_FAILED.value[0], | |||
ExceptionType.URL_ADDRESS_ACCESS_FAILED.value[1]) | |||
return None | |||
def url2Content(url): |
@@ -1,27 +1,36 @@ | |||
import time | |||
import traceback | |||
from aip import AipImageClassify, AipBodyAnalysis | |||
from loguru import logger | |||
from common import YmlConstant | |||
from common.YmlConstant import get_baidu_vehicle_APP_ID, get_baidu_vehicle_API_KEY, get_baidu_vehicle_SECRET_KEY, \ | |||
get_baidu_person_APP_ID, get_baidu_person_API_KEY, get_baidu_person_SECRET_KEY | |||
from enums.BaiduSdkEnum import BAIDUERRORDATA | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
from util.ImageUtils import url2Content | |||
from util.RWUtils import getConfigs | |||
class AipImageClassifyClient: | |||
def __init__(self, context): | |||
self.__context = context | |||
__slots__ = [ | |||
'__client', | |||
'__aliyun_config' | |||
] | |||
def __init__(self, base_dir): | |||
self.__client = None | |||
self.__aliyun_config = getConfigs(base_dir, 'config/dsp_baidu.json') | |||
self.init_client() | |||
# self.lock = Lock() | |||
def init_client(self): | |||
if self.__client is None: | |||
self.__client = AipImageClassify(str(YmlConstant.get_baidu_vehicle_APP_ID(self.__context)), | |||
YmlConstant.get_baidu_vehicle_API_KEY(self.__context), | |||
YmlConstant.get_baidu_vehicle_SECRET_KEY(self.__context)) | |||
self.__client = AipImageClassify(str(self.__aliyun_config["vehicle"]["APP_ID"]), | |||
self.__aliyun_config["vehicle"]["API_KEY"], | |||
self.__aliyun_config["vehicle"]["SECRET_KEY"]) | |||
''' | |||
车辆检测 | |||
@@ -71,7 +80,7 @@ class AipImageClassifyClient: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("车辆检测识别失败: {}, request_id: {}", e, request_id) | |||
logger.error("车辆检测识别失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
# except Exception as ee: | |||
@@ -129,24 +138,29 @@ class AipImageClassifyClient: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("车辆检测识别失败: {}, request_id: {}", e, request_id) | |||
logger.error("车辆检测识别失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
class AipBodyAnalysisClient: | |||
def __init__(self, context): | |||
self.__context = context | |||
__slots__ = [ | |||
'__aliyun_config', | |||
'__client' | |||
] | |||
def __init__(self, base_dir): | |||
self.__client = None | |||
self.__aliyun_config = getConfigs(base_dir, 'config/dsp_baidu.json') | |||
self.init_client() | |||
# self.lock = Lock() | |||
def init_client(self): | |||
if self.__client is None: | |||
self.__client = AipBodyAnalysis(str(YmlConstant.get_baidu_person_APP_ID(self.__context)), | |||
YmlConstant.get_baidu_person_API_KEY(self.__context), | |||
YmlConstant.get_baidu_person_SECRET_KEY(self.__context)) | |||
self.__client = AipBodyAnalysis(str(self.__aliyun_config["person"]["APP_ID"]), | |||
self.__aliyun_config["person"]["API_KEY"], | |||
self.__aliyun_config["person"]["SECRET_KEY"]) | |||
''' | |||
人体检测与属性识别 | |||
@@ -197,7 +211,7 @@ class AipBodyAnalysisClient: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("人体检测与属性识别失败: {}, request_id: {}", e, request_id) | |||
logger.error("人体检测与属性识别失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
# except Exception as ee: | |||
@@ -256,7 +270,7 @@ class AipBodyAnalysisClient: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("人流量统计失败: {}, request_id: {}", e, request_id) | |||
logger.exception("人流量统计失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
# except Exception as ee: | |||
@@ -270,12 +284,12 @@ class AipBodyAnalysisClient: | |||
try: | |||
return url2Content(url) | |||
except Exception as e: | |||
logger.exception("读取图片异常!url: {}, request_id: {}", url, request_id, e) | |||
logger.error("读取图片异常!url: {}, request_id: {}, 异常信息:{}", url, request_id, traceback.format_exc()) | |||
raise ServiceException(ExceptionType.READ_IAMGE_URL_EXCEPTION.value[0], | |||
ExceptionType.READ_IAMGE_URL_EXCEPTION.value[1]) | |||
# if __name__ == '__main__': | |||
# with open(r"D:\work\alg_new\tuoheng_alg\dsp_application.yml", "r", encoding='utf-8') as f: | |||
# with open(r"D:\work\alg_new\tuoheng_alg\dsp_application.json", "r", encoding='utf-8') as f: | |||
# file_content = f.read() | |||
# content = yaml.load(file_content, yaml.FullLoader) | |||
# aipImageClassifyClient = AipImageClassifyClient(content) |
@@ -1,20 +1,25 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from json import dumps, loads | |||
from traceback import format_exc | |||
from kafka import KafkaProducer, KafkaConsumer, TopicPartition, OffsetAndMetadata | |||
from kafka.errors import kafka_errors | |||
import json | |||
from loguru import logger | |||
from common import YmlConstant, Constant | |||
# 生产者 | |||
class CustomerKafkaProducer: | |||
__slots__ = ( | |||
'__context', | |||
'__configs', | |||
'customerProducer', | |||
'__bootstrap_servers' | |||
) | |||
def __init__(self, context): | |||
self.__context = context | |||
self.__configs = YmlConstant.get_kafka_producer_config(context) | |||
self.__configs = context["kafka"][context["dsp"]["active"]]["producer"] | |||
self.__bootstrap_servers = context["kafka"][context["dsp"]["active"]]["bootstrap_servers"] | |||
self.customerProducer = None | |||
self.get_producer() | |||
@@ -23,18 +28,18 @@ class CustomerKafkaProducer: | |||
if self.customerProducer is None: | |||
logger.info("配置kafka生产者!") | |||
self.customerProducer = KafkaProducer( | |||
bootstrap_servers=YmlConstant.get_kafka_bootstrap_servers(self.__context), | |||
acks=self.__configs[YmlConstant.KAFKA_ACKS], | |||
retries=self.__configs[YmlConstant.KAFKA_RETRIES], | |||
linger_ms=self.__configs[YmlConstant.KAFKA_LINGER_MS], | |||
retry_backoff_ms=self.__configs[YmlConstant.KAFKA_RETRY_BACKOFF_MS], | |||
max_in_flight_requests_per_connection=self.__configs[YmlConstant.KAFKA_MAX_IN_FLIGHT_REQUESTS], | |||
key_serializer=lambda m: json.dumps(m).encode(Constant.UTF_8), | |||
value_serializer=lambda m: json.dumps(m).encode(Constant.UTF_8)) | |||
bootstrap_servers=self.__bootstrap_servers, | |||
acks=self.__configs["acks"], | |||
retries=self.__configs["retries"], | |||
linger_ms=self.__configs["linger_ms"], | |||
retry_backoff_ms=self.__configs["retry_backoff_ms"], | |||
max_in_flight_requests_per_connection=self.__configs["max_in_flight_requests_per_connection"], | |||
key_serializer=lambda m: dumps(m).encode("utf-8"), | |||
value_serializer=lambda m: dumps(m).encode("utf-8")) | |||
# mode 模式1:异步发送 2:同步发送 | |||
# def on_send_success(record_metadata): 成功回调 | |||
# def on_send_error(excp): 失败回调 | |||
# def on_send_error(exc): 失败回调 | |||
def sender(self, topic, key, message, mode=1, customer_send_success=None, customer_send_error=None): | |||
retry_send_num = 0 | |||
while True: | |||
@@ -48,23 +53,26 @@ class CustomerKafkaProducer: | |||
if not customer_send_error: | |||
customer_send_error = CustomerKafkaProducer.on_send_error | |||
self.customerProducer.send(topic=topic, key=key, value=message) \ | |||
.add_callback(customer_send_success, message.get(YmlConstant.REQUEST_ID)) \ | |||
.add_errback(customer_send_error, message.get(YmlConstant.REQUEST_ID)) | |||
.add_callback(customer_send_success, message.get("request_id")) \ | |||
.add_errback(customer_send_error, message.get("request_id")) | |||
if mode == 2: | |||
try: | |||
self.customerProducer.send(topic=topic, key=key, value=message).get(timeout=30) | |||
logger.info("kafka同步发送信息成功, requestId:{}", message.get(YmlConstant.REQUEST_ID)) | |||
except kafka_errors as ke: | |||
logger.exception("kafka同步发送消息异常: {}, requestId:{}", ke, message.get(YmlConstant.REQUEST_ID)) | |||
logger.info("kafka同步发送信息成功, requestId:{}", message.get("request_id")) | |||
except Exception as ke: | |||
logger.error("kafka同步发送消息异常: {}, requestId:{}", format_exc(), | |||
message.get("request_id")) | |||
raise ke | |||
break | |||
except Exception as e: | |||
retry_send_num += 1 | |||
logger.error("kafka发送消息异常, 开始重试, 当前重试次数:{} requestId:{}", retry_send_num, | |||
message.get(YmlConstant.REQUEST_ID)) | |||
message.get("request_id")) | |||
time.sleep(1) | |||
self.customerProducer = None | |||
if retry_send_num > 3: | |||
logger.exception("kafka发送消息重试失败: {}, requestId:{}", e, message.get(YmlConstant.REQUEST_ID)) | |||
logger.error("kafka发送消息重试失败: {}, requestId:{}", format_exc(), | |||
message.get("request_id")) | |||
raise e | |||
def close_producer(self): | |||
@@ -78,17 +86,25 @@ class CustomerKafkaProducer: | |||
record_metadata.partition, record_metadata.offset, requestId) | |||
@staticmethod | |||
def on_send_error(requestId, excp): | |||
logger.exception("kafka异步发送消息异常: {}, requestId:{}", excp, requestId) | |||
def on_send_error(requestId, exc): | |||
logger.exception("kafka异步发送消息异常: {}, requestId:{}", exc, requestId) | |||
# 生产者 | |||
class CustomerKafkaConsumer: | |||
__slots__ = ( | |||
'__context', | |||
'__configs', | |||
'customerConsumer', | |||
'__bootstrap_servers', | |||
'__topics' | |||
) | |||
def __init__(self, context, topics=()): | |||
logger.info("初始化消费者") | |||
self.__context = context | |||
self.__configs = YmlConstant.get_kafka_consumer_config(context) | |||
self.__configs = context["kafka"][context["dsp"]["active"]]["consumer"] | |||
self.__bootstrap_servers = context["kafka"][context["dsp"]["active"]]["bootstrap_servers"] | |||
self.customerConsumer = None | |||
self.__topics = topics | |||
self.subscribe() | |||
@@ -98,13 +114,13 @@ class CustomerKafkaConsumer: | |||
if self.customerConsumer is None: | |||
logger.info("获取消费者!") | |||
self.customerConsumer = KafkaConsumer( | |||
bootstrap_servers=YmlConstant.get_kafka_bootstrap_servers(self.__context), | |||
client_id=self.__configs[YmlConstant.KAFKA_CLIENT_ID], | |||
group_id=self.__configs[YmlConstant.KAFKA_GROUP_ID], | |||
auto_offset_reset=self.__configs[YmlConstant.KAFKA_AUTO_OFFSET_RESET], | |||
enable_auto_commit=self.__configs[YmlConstant.KAFKA_ENABLE_AUTO_COMMIT], | |||
max_poll_records=self.__configs[YmlConstant.KAFKA_MAX_POLL_RECORDS], | |||
value_deserializer=lambda m: json.loads(m.decode(Constant.UTF_8))) | |||
bootstrap_servers=self.__bootstrap_servers, | |||
# client_id=self.__configs[KAFKA_CLIENT_ID], | |||
group_id=self.__configs["group_id"], | |||
auto_offset_reset=self.__configs["auto_offset_reset"], | |||
enable_auto_commit=bool(self.__configs["enable_auto_commit"]), | |||
max_poll_records=self.__configs["max_poll_records"], | |||
value_deserializer=lambda m: loads(m.decode("utf-8"))) | |||
logger.info("kafka生产者订阅topic:{}", self.__topics) | |||
# if self.topics is None or len(self.topics) == 0: | |||
# logger.error("消费者订阅topic不能为空!") | |||
@@ -124,30 +140,36 @@ class CustomerKafkaConsumer: | |||
try: | |||
self.subscribe() | |||
msg = self.customerConsumer.poll() | |||
except Exception as e: | |||
except Exception: | |||
self.customerConsumer = None | |||
logger.exception("消费者拉取消息异常: {}", e) | |||
logger.error("消费者拉取消息异常: {}", format_exc()) | |||
return msg | |||
def commit_offset(self, message): | |||
retry_num = 1 | |||
retry_num = 0 | |||
request_id = message.value.get('request_id') | |||
if not request_id: | |||
request_id = "1" | |||
topic = message.topic | |||
offset = message.offset + 1 | |||
partition = message.partition | |||
while True: | |||
try: | |||
self.subscribe() | |||
logger.info("消费者开始提交offset,topic:{}|offset:{}|partition:{}", message.topic, message.offset + 1, | |||
message.partition) | |||
tp = TopicPartition(topic=message.topic, partition=message.partition) | |||
self.customerConsumer.commit(offsets={tp: (OffsetAndMetadata(message.offset + 1, None))}) | |||
logger.info("消费者提交offset完成,topic:{}|offset:{}|partition:{}", message.topic, message.offset + 1, | |||
message.partition) | |||
logger.info("消费者开始提交offset,topic:{}|offset:{}|partition:{}|requestId:{}", topic, offset, partition, | |||
request_id) | |||
tp = TopicPartition(topic=topic, partition=partition) | |||
self.customerConsumer.commit(offsets={tp: (OffsetAndMetadata(offset, None))}) | |||
logger.info("消费者提交offset完成,topic:{}|offset:{}|partition:{}|requestId:{}", topic, offset, partition, | |||
request_id) | |||
break | |||
except Exception as e: | |||
except Exception: | |||
self.customerConsumer = None | |||
logger.exception("消费者提交offset异常: {}, 重试次数: {}", e, retry_num) | |||
logger.error("消费者提交offset异常: {}, 重试次数: {}, requestId:{}", format_exc(), retry_num, request_id) | |||
time.sleep(1) | |||
retry_num += 1 | |||
if retry_num > 3: | |||
logger.exception("消费者提交offset重试失败: {}", e) | |||
logger.error("消费者提交offset重试失败: {}, requestId:{}", format_exc(), request_id) | |||
break | |||
# if __name__=="__main__": |
@@ -1,31 +1,32 @@ | |||
# -*- coding: utf-8 -*- | |||
import sys | |||
import os | |||
from os import makedirs | |||
from os.path import join, exists | |||
from loguru import logger | |||
from common import YmlConstant | |||
from util.RWUtils import getConfigs | |||
# 初始化日志配置 | |||
def init_log(context): | |||
def init_log(base_dir): | |||
log_config = getConfigs(base_dir, 'config/dsp_logger.json') | |||
# 判断日志文件是否存在,不存在创建 | |||
base_path = YmlConstant.get_log_base_path(context) | |||
if not os.path.exists(base_path): | |||
os.makedirs(base_path) | |||
base_path = join(base_dir, log_config.get("base_path")) | |||
if not exists(base_path): | |||
makedirs(base_path) | |||
# 移除日志设置 | |||
logger.remove(handler_id=None) | |||
# 打印日志到文件 | |||
if YmlConstant.get_log_enable_file(context): | |||
logger.add(base_path + YmlConstant.get_log_log_name(context), | |||
rotation=YmlConstant.get_log_rotation(context), | |||
retention=YmlConstant.get_log_retention(context), | |||
format=YmlConstant.get_log_log_fmt(context), | |||
level=YmlConstant.get_log_level(context), | |||
enqueue=YmlConstant.get_log_enqueue(context), | |||
encoding=YmlConstant.get_log_encoding(context)) | |||
if bool(log_config.get("enable_file_log")): | |||
logger.add(join(base_path, log_config.get("log_name")), | |||
rotation=log_config.get("rotation"), | |||
retention=log_config.get("retention"), | |||
format=log_config.get("log_fmt"), | |||
level=log_config.get("level"), | |||
enqueue=True, | |||
encoding=log_config.get("encoding")) | |||
# 控制台输出 | |||
if YmlConstant.get_log_enable_stderr(context): | |||
if bool(log_config.get("enable_stderr")): | |||
logger.add(sys.stderr, | |||
format=YmlConstant.get_log_log_fmt(context), | |||
level=YmlConstant.get_log_level(context), | |||
format=log_config.get("log_fmt"), | |||
level=log_config.get("level"), | |||
enqueue=True) |
@@ -1,27 +1,33 @@ | |||
import time | |||
import traceback | |||
import cv2 | |||
from aip import AipOcr | |||
from loguru import logger | |||
from common import YmlConstant | |||
from common.YmlConstant import get_baidu_ocr_APP_ID, get_baidu_ocr_API_KEY, get_baidu_ocr_SECRET_KEY | |||
from enums.BaiduSdkEnum import BAIDUERRORDATA | |||
from enums.ExceptionEnum import ExceptionType | |||
from exception.CustomerException import ServiceException | |||
from util.RWUtils import getConfigs | |||
class OcrBaiduSdk: | |||
def __init__(self, context): | |||
self.__context = context | |||
__slots__ = [ | |||
'client', "__baidu_config" | |||
] | |||
def __init__(self, base_dir): | |||
self.client = None | |||
self.__baidu_config = getConfigs(base_dir, 'config/dsp_baidu.json') | |||
self.init_client() | |||
def init_client(self): | |||
if self.client is None: | |||
self.client = AipOcr(str(YmlConstant.get_baidu_ocr_APP_ID(self.__context)), | |||
YmlConstant.get_baidu_ocr_API_KEY(self.__context), | |||
YmlConstant.get_baidu_ocr_SECRET_KEY(self.__context)) | |||
self.client = AipOcr(str(self.__baidu_config["orc"]["APP_ID"]), | |||
self.__baidu_config["orc"]["API_KEY"], | |||
self.__baidu_config["orc"]["SECRET_KEY"]) | |||
''' | |||
{ | |||
@@ -82,7 +88,7 @@ class OcrBaiduSdk: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("通用文字识别失败: {}, request_id: {}", e, request_id) | |||
logger.error("通用文字识别失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
@@ -136,7 +142,7 @@ class OcrBaiduSdk: | |||
if reply_num > reply_value: | |||
if isinstance(e, ServiceException): | |||
raise ServiceException(e.code, e.msg) | |||
logger.exception("车牌识别失败: {}, request_id: {}", e, request_id) | |||
logger.error("车牌识别失败: {}, request_id: {}", traceback.format_exc(), request_id) | |||
raise ServiceException(ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
ExceptionType.SERVICE_INNER_EXCEPTION.value[1]) | |||
@@ -0,0 +1,15 @@ | |||
# -*- coding: utf-8 -*- | |||
from json import loads | |||
from os.path import join | |||
from yaml import safe_load | |||
def getConfigs(base_dir, file_path, read_type='json'): | |||
path = join(base_dir, file_path) | |||
with open(path, 'r', encoding="utf-8") as f: | |||
if read_type == 'json': | |||
return loads(f.read()) | |||
if read_type == 'yml': | |||
return safe_load(f) | |||
raise Exception('路径: %s未获取配置信息' % path) |
@@ -1,3 +1,5 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
import datetime | |||
@@ -15,5 +17,5 @@ def now_date_to_str(fmt=None): | |||
fmt = YY_MM_DD_HH_MM_SS | |||
return datetime.datetime.now().strftime(fmt) | |||
if __name__=="__main__": | |||
print(now_date_to_str(YMDHMSF)) | |||
# if __name__=="__main__": | |||
# print(now_date_to_str(YMDHMSF)) |
@@ -1,16 +0,0 @@ | |||
import os | |||
import yaml | |||
from common import Constant | |||
# 从配置文件读取所有配置信息 | |||
def getConfigs(base_dir): | |||
applicationConfigPath = os.path.join(base_dir, Constant.APPLICATION_CONFIG) | |||
if not os.path.exists(applicationConfigPath): | |||
raise Exception("未找到配置文件:%s" % applicationConfigPath) | |||
with open(applicationConfigPath, Constant.R, encoding=Constant.UTF_8) as f: | |||
file_content = f.read() | |||
context = yaml.load(file_content, yaml.FullLoader) | |||
if not context: | |||
raise Exception("配置项不能为空:%s" % applicationConfigPath) | |||
return context |
@@ -20,6 +20,22 @@ VOD_MAX_DESCRIPTION_LENGTH = 1024 | |||
class AliyunVodUploader: | |||
__slots__ = ( | |||
'__requestId', | |||
'__accessKeyId', | |||
'__accessKeySecret', | |||
'__ecsRegion', | |||
'__vodApiRegion', | |||
'__connTimeout', | |||
'__bucketClient', | |||
'__maxRetryTimes', | |||
'__vodClient', | |||
'__EnableCrc', | |||
'__multipartThreshold', | |||
'__multipartPartSize', | |||
'__multipartThreadsNum' | |||
) | |||
def __init__(self, accessKeyId, accessKeySecret, requestId, ecsRegionId=None): | |||
""" | |||
constructor for VodUpload |