@@ -1,116 +1,115 @@ | |||
# ---> Python | |||
# Byte-compiled / optimized / DLL files | |||
__pycache__/ | |||
*.py[cod] | |||
*$py.class | |||
# C extensions | |||
*.so | |||
# Distribution / packaging | |||
.Python | |||
build/ | |||
develop-eggs/ | |||
dist/ | |||
downloads/ | |||
eggs/ | |||
.eggs/ | |||
lib/ | |||
lib64/ | |||
parts/ | |||
sdist/ | |||
var/ | |||
wheels/ | |||
*.egg-info/ | |||
.installed.cfg | |||
*.egg | |||
MANIFEST | |||
# PyInstaller | |||
# Usually these files are written by a python script from a template | |||
# before PyInstaller builds the exe, so as to inject date/other infos into it. | |||
*.manifest | |||
*.spec | |||
# Installer logs | |||
pip-log.txt | |||
pip-delete-this-directory.txt | |||
# Unit test / coverage reports | |||
htmlcov/ | |||
.tox/ | |||
.nox/ | |||
.coverage | |||
.coverage.* | |||
.cache | |||
nosetests.xml | |||
coverage.xml | |||
*.cover | |||
.hypothesis/ | |||
.pytest_cache/ | |||
# Translations | |||
*.mo | |||
*.pot | |||
# Django stuff: | |||
*.log | |||
local_settings.py | |||
db.sqlite3 | |||
# Flask stuff: | |||
instance/ | |||
.webassets-cache | |||
# Scrapy stuff: | |||
.scrapy | |||
# Sphinx documentation | |||
docs/_build/ | |||
# PyBuilder | |||
target/ | |||
# Jupyter Notebook | |||
.ipynb_checkpoints | |||
# IPython | |||
profile_default/ | |||
ipython_config.py | |||
# pyenv | |||
.python-version | |||
# celery beat schedule file | |||
celerybeat-schedule | |||
# SageMath parsed files | |||
*.sage.py | |||
# Environments | |||
.env | |||
.venv | |||
env/ | |||
venv/ | |||
ENV/ | |||
env.bak/ | |||
venv.bak/ | |||
# Spyder project settings | |||
.spyderproject | |||
.spyproject | |||
# Rope project settings | |||
.ropeproject | |||
# mkdocs documentation | |||
/site | |||
# mypy | |||
.mypy_cache/ | |||
.dmypy.json | |||
dmypy.json | |||
# Pyre type checker | |||
.pyre/ | |||
## ---> Python | |||
## Byte-compiled / optimized / DLL files | |||
#__pycache__/ | |||
#*.py[cod] | |||
#*$py.class | |||
# | |||
## C extensions | |||
#*.so | |||
# | |||
## Distribution / packaging | |||
#.Python | |||
#build/ | |||
#develop-eggs/ | |||
#downloads/ | |||
#eggs/ | |||
#.eggs/ | |||
#lib/ | |||
#lib64/ | |||
#parts/ | |||
#sdist/ | |||
#var/ | |||
#wheels/ | |||
#*.egg-info/ | |||
#.installed.cfg | |||
#*.egg | |||
#MANIFEST | |||
# | |||
## PyInstaller | |||
## Usually these files are written by a python script from a template | |||
## before PyInstaller builds the exe, so as to inject date/other infos into it. | |||
#*.manifest | |||
#*.spec | |||
# | |||
## Installer logs | |||
#pip-log.txt | |||
#pip-delete-this-directory.txt | |||
# | |||
## Unit test / coverage reports | |||
#htmlcov/ | |||
#.tox/ | |||
#.nox/ | |||
#.coverage | |||
#.coverage.* | |||
#.cache | |||
#nosetests.xml | |||
#coverage.xml | |||
#*.cover | |||
#.hypothesis/ | |||
#.pytest_cache/ | |||
# | |||
## Translations | |||
#*.mo | |||
#*.pot | |||
# | |||
## Django stuff: | |||
#*.log | |||
#local_settings.py | |||
#db.sqlite3 | |||
# | |||
## Flask stuff: | |||
#instance/ | |||
#.webassets-cache | |||
# | |||
## Scrapy stuff: | |||
#.scrapy | |||
# | |||
## Sphinx documentation | |||
#docs/_build/ | |||
# | |||
## PyBuilder | |||
#target/ | |||
# | |||
## Jupyter Notebook | |||
#.ipynb_checkpoints | |||
# | |||
## IPython | |||
#profile_default/ | |||
#ipython_config.py | |||
# | |||
## pyenv | |||
#.python-version | |||
# | |||
## celery beat schedule file | |||
#celerybeat-schedule | |||
# | |||
## SageMath parsed files | |||
#*.sage.py | |||
# | |||
## Environments | |||
#.env | |||
#.venv | |||
#env/ | |||
#venv/ | |||
#ENV/ | |||
#env.bak/ | |||
#venv.bak/ | |||
# | |||
## Spyder project settings | |||
#.spyderproject | |||
#.spyproject | |||
# | |||
## Rope project settings | |||
#.ropeproject | |||
# | |||
## mkdocs documentation | |||
#/site | |||
# | |||
## mypy | |||
#.mypy_cache/ | |||
#.dmypy.json | |||
#dmypy.json | |||
# | |||
## Pyre type checker | |||
#.pyre/ | |||
# |
@@ -0,0 +1,8 @@ | |||
# 默认忽略的文件 | |||
/shelf/ | |||
/workspace.xml | |||
# 基于编辑器的 HTTP 客户端请求 | |||
/httpRequests/ | |||
# Datasource local storage ignored files | |||
/dataSources/ | |||
/dataSources.local.xml |
@@ -0,0 +1,6 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<project version="4"> | |||
<component name="ProjectRootManager" version="2" languageLevel="JDK_19" project-jdk-name="Python 3.8 (test)" project-jdk-type="Python SDK"> | |||
<output url="file://$PROJECT_DIR$/out" /> | |||
</component> | |||
</project> |
@@ -0,0 +1,8 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<project version="4"> | |||
<component name="ProjectModuleManager"> | |||
<modules> | |||
<module fileurl="file://$PROJECT_DIR$/.idea/tuoheng_airport_stream.iml" filepath="$PROJECT_DIR$/.idea/tuoheng_airport_stream.iml" /> | |||
</modules> | |||
</component> | |||
</project> |
@@ -0,0 +1,9 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<module type="JAVA_MODULE" version="4"> | |||
<component name="NewModuleRootManager" inherit-compiler-output="true"> | |||
<exclude-output /> | |||
<content url="file://$MODULE_DIR$" /> | |||
<orderEntry type="inheritedJdk" /> | |||
<orderEntry type="sourceFolder" forTests="false" /> | |||
</component> | |||
</module> |
@@ -0,0 +1,6 @@ | |||
<?xml version="1.0" encoding="UTF-8"?> | |||
<project version="4"> | |||
<component name="VcsDirectoryMappings"> | |||
<mapping directory="" vcs="Git" /> | |||
</component> | |||
</project> |
@@ -0,0 +1,39 @@ | |||
# -*- coding: utf-8 -*- | |||
from multiprocessing import freeze_support | |||
from os.path import dirname, realpath, join | |||
from loguru import logger | |||
from service.MqttDisService import MqttDispatcherService | |||
from service.HttpDisService import HttpDispatcherService | |||
from util.LogUtils import init_log | |||
from util.RWUtils import getConfigs | |||
''' | |||
主程序入口 | |||
''' | |||
if __name__ == '__main__': | |||
freeze_support() | |||
base_dir = dirname(realpath(__file__)) | |||
init_log(base_dir) | |||
logger.info("(♥◠‿◠)ノ゙ 【机场推流服务】开始启动 ლ(´ڡ`ლ)゙") | |||
print("(♥◠‿◠)ノ゙ 【机场推流服务】开始启动 ლ(´ڡ`ლ)゙") | |||
print("############################################################") | |||
print("配置文件路径: ", join(base_dir, "config")) | |||
print("服务配置文件: service.yml") | |||
print("mqtt配置文件: mqtt.yml") | |||
print("日志配置文件: logger.yml") | |||
print("日志文件路径: ", join(base_dir, "logs")) | |||
print("############################################################") | |||
# mqtt交互 | |||
service_config = getConfigs(base_dir, "config/service.yml") | |||
service_config['base_dir'] = base_dir | |||
if 1 == service_config['docking_method']: | |||
print("当前使用的交互模式是mqtt交互!!") | |||
print("############################################################") | |||
MqttDispatcherService(service_config) | |||
elif 2 == service_config['docking_method']: | |||
print("当前使用的交互模式是接口交互!!") | |||
print("############################################################") | |||
HttpDispatcherService(service_config) | |||
@@ -0,0 +1,30 @@ | |||
# -*-coding:utf-8 -*- | |||
from enums.StatusEnum import StatusType | |||
from util.QueUtil import put_queue | |||
from util.TimeUtils import now_date_to_str | |||
def push_result(fb_queue, errorCode="", errorMsg="", status=StatusType.RUNNING.value[0]): | |||
put_queue(fb_queue, ('stream', | |||
{ | |||
"errorCode": errorCode, | |||
"errorMsg": errorMsg, | |||
"status": status, | |||
"currentTime": now_date_to_str() | |||
}), | |||
timeout=2) | |||
def push_http_result(fb_queue, callback_url=None, errorCode="", errorMsg="", status=StatusType.RUNNING.value[0]): | |||
if callback_url is not None: | |||
put_queue(fb_queue, ('stream', | |||
{ | |||
"callback_url": callback_url, | |||
"data": { | |||
"errorCode": errorCode, | |||
"errorMsg": errorMsg, | |||
"status": status, | |||
"currentTime": now_date_to_str() | |||
} | |||
}), | |||
timeout=2) |
@@ -0,0 +1,24 @@ | |||
# -*-coding:utf-8 -*- | |||
from pydantic import BaseModel | |||
class JsonResult(BaseModel): | |||
code: int | |||
msg: str | |||
data: None | |||
@staticmethod | |||
def success(code=0, msg="操作成功!", data=None): | |||
return { | |||
"code": code, | |||
"msg": msg, | |||
"data": data | |||
} | |||
@staticmethod | |||
def error(code=-1, msg="操作失败!", data=None): | |||
return { | |||
"code": code, | |||
"msg": msg, | |||
"data": data | |||
} |
@@ -0,0 +1,20 @@ | |||
# -*-coding:utf-8 -*- | |||
from typing import Union | |||
from pydantic import BaseModel, Field, HttpUrl | |||
from fastapi._compat import Required | |||
class PushStreamRequest(BaseModel): | |||
pullUrl: Union[str, None] = Field(default=None, title="拉流地址", | |||
pattern="(^(https|http|rtsp|rtmp|artc|webrtc|ws)://[\\w\\d\\.\\-/:_?=&!~*'()+$@,;\"%\\[\\]]+$)?") | |||
pushUrl: Union[str, None] = Field(default=None, title="推流地址", | |||
pattern="(^(https|http|rtsp|rtmp|artc|webrtc|ws)://[\\w\\d\\.\\-/:_?=&!~*'()+$@,;\"%\\[\\]]+$)?") | |||
callbackUrl: HttpUrl = Field(default=Required, title="回调地址") | |||
class CallbackRequest(BaseModel): | |||
errorCode: Union[str, None] | |||
errorMsg: Union[str, None] | |||
status: Union[int, None] | |||
currentTime: Union[str, None] |
@@ -0,0 +1,17 @@ | |||
# -*- coding: utf-8 -*- | |||
from multiprocessing import Queue | |||
SHARE_QUEUE = Queue() | |||
def get_share_queue(): | |||
return SHARE_QUEUE | |||
TASK_RECORD = { | |||
"stream": None | |||
} | |||
def get_task_record(): | |||
return TASK_RECORD |
@@ -0,0 +1,62 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from json import loads | |||
from threading import Thread | |||
from traceback import format_exc | |||
from loguru import logger | |||
from util.QueUtil import get_block_queue | |||
from util.RequestUtil import HttpRequests | |||
class HttpFeedbackThread(Thread): | |||
__slots__ = '__fb_queue' | |||
def __init__(self, fb_queue): | |||
super().__init__() | |||
self.__fb_queue = fb_queue | |||
def run(self): | |||
logger.info("启动反馈线程") | |||
fb_queue = self.__fb_queue | |||
request = HttpRequests() | |||
try: | |||
headers = {'content-type': "application/json"} | |||
while True: | |||
try: | |||
fb = get_block_queue(fb_queue) | |||
if fb is not None and len(fb) > 0: | |||
if fb[0] == "stream": | |||
stream_req(request, fb[1], headers) | |||
del fb | |||
else: | |||
time.sleep(1) | |||
except Exception: | |||
logger.error("反馈异常:{}", format_exc()) | |||
if request: | |||
request.close_session() | |||
request = HttpRequests() | |||
finally: | |||
request.close_session() | |||
logger.info("反馈线程执行完成") | |||
def stream_req(request, stream, headers): | |||
logger.info("开始发送推流回调请求, 回调地址: {}", stream['callback_url']) | |||
logger.info("开始发送推流回调请求, 回调请求体: {}", stream['data']) | |||
try: | |||
response = request.send_request('POST', stream['callback_url'], data=stream['data'], | |||
headers=headers, timeout=3) | |||
if response.status_code != 200: | |||
logger.error("推流回调请求失败! 状态码: {}, {}", response.status_code, response.__dict__) | |||
else: | |||
content = response.content.decode('utf-8') | |||
if content is not None and len(content) > 0: | |||
content = loads(content) | |||
code = content.get('code') | |||
if code is not None and code != 0: | |||
logger.error("推流回调请求失败! 失败描述: {}", content.get('msg')) | |||
except Exception: | |||
logger.error("回调请求失败, 请求体: {}, 回调地址: {}, 异常信息: {}", stream['data'], stream['callback_url'], | |||
format_exc()) |
@@ -0,0 +1,124 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from multiprocessing import Process, Queue | |||
from threading import Thread | |||
from traceback import format_exc | |||
from loguru import logger | |||
from bean.Feedback import push_http_result | |||
from enums.ExceptionEnum import StreamStrExceptionType | |||
from enums.StatusEnum import StatusType | |||
from exception.CustomerException import ServiceException | |||
from util.LogUtils import init_log | |||
from util.PushStreamUtils import PushStreamUtil | |||
from util.QueUtil import put_queue, get_no_block_queue | |||
class PushStreamProcess(Process): | |||
__slots__ = ('__fbQueue', '__event', '__service_config', '__push_stream_tool', '__callback_url') | |||
def __init__(self, fb_queue, service_config, callback_url, pull_url, push_url): | |||
super().__init__() | |||
self.__fb_queue = fb_queue | |||
self.__event = Queue() | |||
self.__service_config = service_config | |||
self.__callback_url = callback_url | |||
self.__push_stream_tool = PushStreamUtil(service_config["stream"]["pullUrl"], | |||
service_config["stream"]["pushUrl"]) | |||
self.__push_stream_tool.set_url(pull_url, push_url) | |||
def send_event(self, result): | |||
put_queue(self.__event, result, timeout=2, is_throw_ex=True) | |||
def push_stream(self, push_queue, push_stream_tool): | |||
logger.info("开始启动推流线程!") | |||
while True: | |||
try: | |||
push_stream_tool.start_push_stream() | |||
out, err = push_stream_tool.push_stream_sp.communicate() | |||
if push_stream_tool.status: | |||
logger.warning("推流异常,请检测拉流地址和推流地址是否正常!") | |||
if push_stream_tool.push_stream_sp.returncode != 0: | |||
logger.error("推流异常:{}", err.decode('utf-8')) | |||
put_queue(push_queue, (2, StatusType.RETRYING.value[0]), timeout=2, is_throw_ex=True) | |||
push_stream_tool.close_push_stream_p() | |||
time.sleep(3) | |||
if not push_stream_tool.status: | |||
push_stream_tool.close_push_stream_p() | |||
put_queue(push_queue, (0,), timeout=2, is_throw_ex=True) | |||
break | |||
except ServiceException as s: | |||
logger.error("{}", s.msg) | |||
push_stream_tool.close_push_stream_p() | |||
# push_stream_tool.status = False | |||
put_queue(push_queue, (1, s), timeout=2, is_throw_ex=True) | |||
break | |||
except Exception as e: | |||
logger.error("异常:{}", format_exc()) | |||
push_stream_tool.close_push_stream_p() | |||
# push_stream_tool.status = False | |||
put_queue(push_queue, (1, e), timeout=2, is_throw_ex=True) | |||
break | |||
logger.info("推流线程运行结束!") | |||
def run(self): | |||
fb_queue, event_queue, callback_url = self.__fb_queue, self.__event, self.__callback_url | |||
service_config = self.__service_config | |||
hb_status = StatusType.WAITTING.value[0] | |||
push_stream_tool = self.__push_stream_tool | |||
# 初始化日志 | |||
init_log(service_config["base_dir"]) | |||
logger.info("开始启动推流进程") | |||
push_queue = Queue() | |||
push = Thread(target=self.push_stream, args=(push_queue, push_stream_tool)) | |||
push.setDaemon(True) | |||
push.start() | |||
count, start_time, ex = 0, time.time(), None | |||
try: | |||
while True: | |||
if push_stream_tool.status and not push.is_alive(): | |||
logger.error("检测到推流线程异常停止!") | |||
raise Exception("检测到推流线程异常停止!") | |||
ph_result = get_no_block_queue(push_queue) | |||
event_result = get_no_block_queue(event_queue) | |||
if event_result is not None: | |||
command = event_result.get("command") | |||
if "stop" == command: | |||
hb_status = StatusType.STOPPING.value[0] | |||
push_stream_tool.status = False | |||
push_stream_tool.close_push_stream_p() | |||
if ph_result is not None and ph_result[0] == 0: | |||
logger.info("推流任务停止中") | |||
push.join(timeout=60) | |||
push_http_result(fb_queue, callback_url, status=StatusType.SUCCESS.value[0]) | |||
break | |||
if ph_result is not None and ph_result[0] == 1: | |||
logger.info("推流任务异常停止中") | |||
push.join(timeout=60) | |||
raise ph_result[1] | |||
if ph_result is not None and ph_result[0] == 2: | |||
if StatusType.RETRYING.value[0] == ph_result[1]: | |||
hb_status = StatusType.RETRYING.value[0] | |||
start_time = time.time() | |||
if time.time() - start_time > 10: | |||
hb_status = StatusType.RUNNING.value[0] | |||
count += 1 | |||
if count % 15 == 0: | |||
push_http_result(fb_queue, callback_url, status=hb_status) | |||
count = 0 | |||
time.sleep(1) | |||
except ServiceException as s: | |||
logger.error("推流异常, code: {}, msg: {}", s.code, s.msg) | |||
ex = s.code, s.msg | |||
except Exception: | |||
logger.error("推流异常:{}", format_exc()) | |||
ex = StreamStrExceptionType.SERVICE_INNER_EXCEPTION.value[0], StreamStrExceptionType.SERVICE_INNER_EXCEPTION.value[1] | |||
finally: | |||
push_stream_tool.status = False | |||
push_stream_tool.close_push_stream_p() | |||
push.join(timeout=60) | |||
if ex: | |||
code, msg = ex | |||
push_http_result(fb_queue, callback_url, code, msg, status=StatusType.FAILED.value[0]) | |||
logger.info("推流检测线程执行完成") |
@@ -0,0 +1,93 @@ | |||
# -*- coding: utf-8 -*- | |||
import sys | |||
from multiprocessing import Queue | |||
from threading import Thread | |||
from time import sleep | |||
from traceback import format_exc | |||
from loguru import logger | |||
from bean.Feedback import push_http_result | |||
from common.Constant import get_task_record, get_share_queue | |||
from concurrency.http.HttpFeedbackThread import HttpFeedbackThread | |||
from concurrency.http.HttpPushStreamProcess import PushStreamProcess | |||
from enums.ExceptionEnum import StreamStrExceptionType | |||
from enums.StatusEnum import StatusType | |||
from exception.CustomerException import ServiceException | |||
from util.QueUtil import get_no_block_queue | |||
class HttpServiceImpl: | |||
__slots__ = () | |||
def __init__(self, service_config): | |||
service_thread = Thread(target=self.start_service, args=(service_config,)) | |||
service_thread.setDaemon(True) | |||
service_thread.start() | |||
@staticmethod | |||
def start_service(service_config): | |||
fb_queue = Queue() | |||
task, msg_queue = get_task_record(), get_share_queue() | |||
handle_method = { | |||
"stream": lambda x, y, z, h: handle_stream(x, y, z, h) | |||
} | |||
feedbackThread = None | |||
while True: | |||
try: | |||
if task["stream"] is not None and not task["stream"].is_alive(): | |||
task["stream"] = None | |||
feedbackThread = start_feedback_thread(feedbackThread, fb_queue) | |||
message = get_no_block_queue(msg_queue) | |||
if message is not None and isinstance(message, tuple) and len(message) == 2: | |||
if handle_method.get(message[0]) is not None: | |||
handle_method[message[0]](message[1], service_config, task, fb_queue) | |||
else: | |||
sleep(1) | |||
except Exception: | |||
logger.error("服务异常: {}", format_exc()) | |||
def handle_stream(msg, service_config, task, fb_queue): | |||
try: | |||
command = msg["command"] | |||
if 'start' == command: | |||
if task["stream"] is not None: | |||
logger.error("推流任务已存在!!!") | |||
push_http_result(fb_queue, msg["callback_url"], | |||
StreamStrExceptionType.PUSH_STREAM_TASK_IS_AREADLY.value[0], | |||
StreamStrExceptionType.PUSH_STREAM_TASK_IS_AREADLY.value[1], | |||
StatusType.FAILED.value[0]) | |||
return | |||
push_http_result(fb_queue, msg["callback_url"], status=StatusType.WAITTING.value[0]) | |||
pp = PushStreamProcess(fb_queue, service_config, msg["callback_url"], msg["pull_url"], msg["push_url"]) | |||
pp.start() | |||
task["stream"] = pp | |||
elif 'stop' == command: | |||
if task["stream"] is None: | |||
logger.error("推流任务不存在, 任务无法停止!") | |||
return | |||
task["stream"].send_event({"command": "stop"}) | |||
except ServiceException as s: | |||
logger.error("消息处理异常: {}", s.msg) | |||
push_http_result(fb_queue, msg["callback_url"], s.code, s.msg, StatusType.FAILED.value[0]) | |||
except Exception: | |||
logger.error("消息处理异常: {}", format_exc()) | |||
push_http_result(fb_queue, msg["callback_url"], | |||
StreamStrExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
StreamStrExceptionType.SERVICE_INNER_EXCEPTION.value[1], | |||
StatusType.FAILED.value[0]) | |||
finally: | |||
del msg | |||
def start_feedback_thread(feedbackThread, fb_queue): | |||
if feedbackThread is None: | |||
feedbackThread = HttpFeedbackThread(fb_queue) | |||
feedbackThread.setDaemon(True) | |||
feedbackThread.start() | |||
else: | |||
if not feedbackThread.is_alive(): | |||
logger.error("反馈线程异常停止! 开始终止程序!") | |||
sys.exit() | |||
return feedbackThread |
@@ -0,0 +1,35 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from threading import Thread | |||
from traceback import format_exc | |||
from loguru import logger | |||
from util.QueUtil import get_block_queue | |||
''' | |||
问题反馈线程 | |||
''' | |||
class FeedbackThread(Thread): | |||
__slots__ = ('__fb_queue', '__mq') | |||
def __init__(self, fb_queue, mq): | |||
super().__init__() | |||
self.__fb_queue = fb_queue | |||
self.__mq = mq | |||
def run(self): | |||
logger.info("启动反馈线程") | |||
fb_queue, mq = self.__fb_queue, self.__mq | |||
while True: | |||
try: | |||
fb = get_block_queue(fb_queue) | |||
if fb is not None and len(fb) > 0: | |||
mq.publish(fb) | |||
else: | |||
time.sleep(1) | |||
except Exception: | |||
logger.error("反馈异常:{}", format_exc()) | |||
logger.info("反馈线程执行完成") |
@@ -0,0 +1,204 @@ | |||
# -*- coding: utf-8 -*- | |||
import time | |||
from multiprocessing import Queue, Process | |||
from threading import Thread | |||
from traceback import format_exc | |||
from loguru import logger | |||
from bean.Feedback import push_result | |||
from enums.ExceptionEnum import ExceptionType | |||
from enums.StatusEnum import StatusType | |||
from exception.CustomerException import ServiceException | |||
from util.LogUtils import init_log | |||
from util.PushStreamUtils import PushStreamUtil | |||
from util.QueUtil import put_queue, get_no_block_queue | |||
class PushStreamProcess(Process): | |||
__slots__ = ('__fb_queue', '__service_config', '__event', '__pullUrl', '__pushUrl') | |||
def __init__(self, fb_queue, service_config, pullUrl, pushUrl): | |||
super().__init__() | |||
self.__fb_queue = fb_queue | |||
self.__service_config = service_config | |||
self.__event = Queue() | |||
self.__pullUrl = pullUrl | |||
self.__pushUrl = pushUrl | |||
push_result(fb_queue, status=StatusType.WAITTING.value[0]) | |||
def send_event(self, result): | |||
put_queue(self.__event, result, timeout=2, is_throw_ex=True) | |||
@staticmethod | |||
def push_stream(push_queue, push_stream_tool): | |||
logger.info("开始启动推流线程!") | |||
while True: | |||
try: | |||
push_stream_tool.start_push_stream() | |||
out, err = push_stream_tool.push_stream_sp.communicate() | |||
if push_stream_tool.status: | |||
logger.warning("推流异常,请检测拉流地址和推流地址是否正常!") | |||
if push_stream_tool.push_stream_sp.returncode != 0: | |||
logger.error("推流异常:{}", err.decode("utf-8")) | |||
put_queue(push_queue, (2, StatusType.RETRYING.value[0]), timeout=2, is_throw_ex=True) | |||
push_stream_tool.close_push_stream_p() | |||
time.sleep(1) | |||
if not push_stream_tool.status: | |||
push_stream_tool.close_push_stream_p() | |||
put_queue(push_queue, (0,), timeout=2, is_throw_ex=True) | |||
break | |||
except ServiceException as s: | |||
logger.error("异常: {}", s.msg) | |||
push_stream_tool.close_push_stream_p() | |||
# push_stream_tool.status = False | |||
put_queue(push_queue, (1, s), timeout=2, is_throw_ex=True) | |||
break | |||
except Exception as e: | |||
logger.error("异常: {}", format_exc()) | |||
push_stream_tool.close_push_stream_p() | |||
# push_stream_tool.status = False | |||
put_queue(push_queue, (1, e), timeout=2, is_throw_ex=True) | |||
break | |||
logger.info("推流线程运行结束!") | |||
def run(self): | |||
service_config, pullUrl, pushUrl = self.__service_config, self.__pullUrl, self.__pushUrl | |||
fb_queue, event_queue, push_queue = self.__fb_queue, self.__event, Queue() | |||
hb_status = StatusType.WAITTING.value[0] | |||
push_stream_tool = PushStreamUtil(service_config["stream"]["pullUrl"], service_config["stream"]["pushUrl"]) | |||
push_stream_tool.set_url(pullUrl, pushUrl) | |||
push = None | |||
count, start_time, ex = 0, time.time(), None | |||
try: | |||
init_log(service_config["base_dir"]) | |||
logger.info("开始启动推流进程!") | |||
push = Thread(target=self.push_stream, args=(push_queue, push_stream_tool)) | |||
push.setDaemon(True) | |||
push.start() | |||
while True: | |||
if push_stream_tool.status and not push.is_alive(): | |||
logger.error("检测到推流线程异常停止!") | |||
raise Exception("检测到推流线程异常停止!") | |||
ph_result = get_no_block_queue(push_queue) | |||
event_result = get_no_block_queue(event_queue) | |||
if event_result is not None: | |||
command = event_result.get("command") | |||
if "stop" == command: | |||
hb_status = StatusType.STOPPING.value[0] | |||
push_stream_tool.status = False | |||
push_stream_tool.close_push_stream_p() | |||
if ph_result is not None and ph_result[0] == 0: | |||
logger.info("推流任务停止中") | |||
push.join(timeout=60) | |||
push_result(fb_queue, status=StatusType.SUCCESS.value[0]) | |||
break | |||
if ph_result is not None and ph_result[0] == 1: | |||
logger.info("推流任务异常停止中") | |||
push.join(timeout=60) | |||
raise ph_result[1] | |||
if ph_result is not None and ph_result[0] == 2: | |||
if StatusType.RETRYING.value[0] == ph_result[1]: | |||
hb_status = StatusType.RETRYING.value[0] | |||
start_time = time.time() | |||
if time.time() - start_time > 20: | |||
hb_status = StatusType.RUNNING.value[0] | |||
count += 1 | |||
if count % 15 == 0: | |||
push_result(fb_queue, status=hb_status) | |||
count = 0 | |||
time.sleep(1) | |||
except ServiceException as s: | |||
logger.error("推流异常, code: {}, msg: {}", s.code, s.msg) | |||
ex = s.code, s.msg | |||
except Exception: | |||
logger.error("推流异常:{}", format_exc()) | |||
ex = ExceptionType.SERVICE_INNER_EXCEPTION.value[0], ExceptionType.SERVICE_INNER_EXCEPTION.value[1] | |||
finally: | |||
push_stream_tool.status = False | |||
push_stream_tool.close_push_stream_p() | |||
if push: | |||
push.join(timeout=60) | |||
if ex: | |||
code, msg = ex | |||
push_result(fb_queue, code, msg, status=StatusType.FAILED.value[0]) | |||
logger.info("推流检测线程执行完成") | |||
# """ | |||
# 版本二 | |||
# """ | |||
# class PushStreamThread(Thread): | |||
# __slots__ = ('__fbQueue', '__event', '__push_stream_tool', '__hb_status') | |||
# | |||
# def __init__(self, fbQueue, push_stream_tool): | |||
# super().__init__() | |||
# self.__fb_queue = fbQueue | |||
# self.__event = Queue() | |||
# self.__push_stream_tool = push_stream_tool | |||
# self.__hb_status = StatusType.WAITTING.value[0] | |||
# put_queue(self.__fb_queue, { | |||
# "errorCode": "", | |||
# "errorMsg": "", | |||
# "status": StatusType.WAITTING.value[0], | |||
# "current_time": now_date_to_str()}, is_throw_ex=False) | |||
# | |||
# def send_event(self, result): | |||
# put_queue(self.__event, result, is_throw_ex=False) | |||
# | |||
# def run(self): | |||
# logger.info("启动推流线程") | |||
# self.__push_stream_tool.start_push_stream() | |||
# count = 0 | |||
# start_time = time.time() | |||
# while True: | |||
# try: | |||
# event_result = get_no_block_queue(self.__event) | |||
# if event_result is not None: | |||
# command = event_result.get("command") | |||
# if "stop" == command: | |||
# self.__push_stream_tool.close_push_stream_p(send=True) | |||
# put_queue(self.__fb_queue, { | |||
# "errorCode": "", | |||
# "errorMsg": "", | |||
# "status": StatusType.SUCCESS.value[0], | |||
# "current_time": now_date_to_str()}, is_throw_ex=False) | |||
# break | |||
# if self.__push_stream_tool.push_stream_sp and self.__push_stream_tool.push_stream_sp.poll() is not None: | |||
# logger.error("推流异常,请检查推流地址和拉流地址是否正常!!!!!") | |||
# # if self.__push_stream_tool.push_stream_sp.returncode != 0: | |||
# # out, err = self.__push_stream_tool.push_stream_sp.communicate(timeout=120) | |||
# # logger.error("推流异常:{}", err.decode()) | |||
# self.__push_stream_tool.close_push_stream_p(send=True) | |||
# self.__push_stream_tool.start_push_stream() | |||
# self.__hb_status = StatusType.RETRYING.value[0] | |||
# start_time = time.time() | |||
# if time.time() - start_time > 20: | |||
# self.__hb_status = StatusType.RUNNING.value[0] | |||
# count += 1 | |||
# if count % 10 == 0: | |||
# put_queue(self.__fb_queue, { | |||
# "errorCode": "", | |||
# "errorMsg": "", | |||
# "status": self.__hb_status, | |||
# "current_time": now_date_to_str()}, is_throw_ex=False) | |||
# count = 0 | |||
# time.sleep(1) | |||
# except ServiceException as s: | |||
# logger.error("推流异常, code: {}, msg: {}", s.code, s.msg) | |||
# put_queue(self.__fb_queue, { | |||
# "errorCode": s.code, | |||
# "errorMsg": s.msg, | |||
# "status": StatusType.FAILED.value[0], | |||
# "current_time": now_date_to_str()}, is_throw_ex=False) | |||
# self.__push_stream_tool.close_push_stream_p(send=True) | |||
# break | |||
# except Exception: | |||
# logger.error("推流异常:{}", format_exc()) | |||
# put_queue(self.__fb_queue, { | |||
# "errorCode": ExceptionType.SERVICE_INNER_EXCEPTION.value[0], | |||
# "errorMsg": ExceptionType.SERVICE_INNER_EXCEPTION.value[1], | |||
# "status": StatusType.FAILED.value[0], | |||
# "current_time": now_date_to_str()}, is_throw_ex=False) | |||
# self.__push_stream_tool.close_push_stream_p(send=True) | |||
# break | |||
# logger.info("推流检测线程执行完成") |
@@ -0,0 +1,20 @@ | |||
# 是否启动日志文件记录日志 | |||
enable_file_log: true | |||
# 是否启动控制台打印日志 | |||
enable_stderr: true | |||
# 日志相对于根路径下的相对路径 | |||
base_path: "logs" | |||
# 日志文件名称 | |||
log_name: "airport_media.log" | |||
# 日志打印格式 | |||
log_fmt: "{time:YYYY-MM-DD HH:mm:ss.SSS} [{level}][{process.name}-{process.id}-{thread.name}-{thread.id}][{line}] {module}-{function} - {message}" | |||
# 日志打印级别 | |||
level: "INFO" | |||
# 日志绕接时间 | |||
rotation: "00:00" | |||
# 日志保存时间 | |||
retention: "7 days" | |||
# 日志编码格式 | |||
encoding: "utf8" | |||
@@ -0,0 +1,20 @@ | |||
mqtt: | |||
# mqtt客户端id对应每个机场平台的编码 | |||
client_id: "THOBS@0000THJSQ232003" | |||
# mqtt用户(根据对接环境修改) | |||
username: "admin" | |||
# mqtt密码(根据对接环境修改) | |||
password: "admin##123" | |||
# mqtt连接地址(根据对接环境修改) | |||
host: "mqtt.t-aaron.com" | |||
# mqtt端口号(根据对接环境修改) | |||
port: 10883 | |||
# 长链接时间 | |||
keepalive: 60 | |||
topic: | |||
stream: | |||
# 推流订阅topic(不修改) | |||
sub_topic: "/v1/%s/stream/push" | |||
# 推流响应topic(不修改) | |||
res_topic: "/v1/%s/stream/result" | |||
@@ -0,0 +1,11 @@ | |||
# 1: mqtt消息队列方式对接 | |||
# 2: http接口方式对接 | |||
docking_method: 2 | |||
# 推拉流功能配置 | |||
stream: | |||
# 拉流地址 | |||
pullUrl: "rtmp://live.play.t-aaron.com/live/1111" | |||
# 推流地址 | |||
pushUrl: "rtmp://live.push.t-aaron.com/live/2222" | |||
@@ -0,0 +1,66 @@ | |||
Cerberus is developed and maintained by the Cerberus community. It was created | |||
by Nicola Iarocci. | |||
Core maintainers | |||
~~~~~~~~~~~~~~~~ | |||
- Nicola Iarocci (nicolaiarocci) | |||
- Frank Sachsenheim (funkyfuture) | |||
Contributors | |||
~~~~~~~~~~~~ | |||
- Antoine Lubineau | |||
- Arsh Singh | |||
- Audric Schiltknecht | |||
- Brandon Aubie | |||
- Brett | |||
- Bruno Oliveira | |||
- Bryan W. Weber | |||
- C.D. Clark III | |||
- Christian Hogan | |||
- Connor Zapfel | |||
- Damián Nohales | |||
- Danielle Pizzolli | |||
- Davis Kirkendall | |||
- Denis Carriere | |||
- Dominik Kellner | |||
- Eelke Hermens | |||
- Evgeny Odegov | |||
- Florian Rathgeber | |||
- Gabriel Wainer | |||
- Harro van der Klauw | |||
- Jaroslav Semančík | |||
- Jonathan Huot | |||
- Kaleb Pomeroy | |||
- Kirill Pavlov | |||
- Kornelijus Survila | |||
- Lujeni | |||
- Luke Bechtel | |||
- Luo Peng | |||
- Martijn Vermaat | |||
- Martin Ortbauer | |||
- Matthew Ellison | |||
- Michael Klich | |||
- Nik Haldimann | |||
- Nikita Melentev | |||
- Nikita Vlaznev | |||
- Paul Weaver | |||
- Peter Demin | |||
- Riccardo | |||
- Roman Redkovich | |||
- Scott Crunkleton | |||
- Sebastian Heid | |||
- Sebastian Rajo | |||
- Sergey Leshchenko | |||
- Tobias Betz | |||
- Trong Hieu HA | |||
- Vipul Gupta | |||
- Waldir Pimenta | |||
- Yauhen Shulitski | |||
- calve | |||
- gilbsgilbs | |||
A full, up-to-date list of contributors is available from git with: | |||
git shortlog -sne |
@@ -0,0 +1 @@ | |||
pip |
@@ -0,0 +1,15 @@ | |||
ISC License | |||
Copyright (c) 2012-2016 Nicola Iarocci. | |||
Permission to use, copy, modify, and/or distribute this software for any | |||
purpose with or without fee is hereby granted, provided that the above | |||
copyright notice and this permission notice appear in all copies. | |||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH | |||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND | |||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, | |||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM | |||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR | |||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR | |||
PERFORMANCE OF THIS SOFTWARE. |
@@ -0,0 +1,164 @@ | |||
Metadata-Version: 2.1 | |||
Name: Cerberus | |||
Version: 1.3.4 | |||
Summary: Lightweight, extensible schema and data validation tool for Python dictionaries. | |||
Home-page: http://docs.python-cerberus.org | |||
Author: Nicola Iarocci | |||
Author-email: nicola@nicolaiarocci.com | |||
Maintainer: Frank Sachsenheim | |||
Maintainer-email: funkyfuture@riseup.net | |||
License: ISC | |||
Project-URL: Documentation, http://python-cerberus.org | |||
Project-URL: Code, https://github.com/pyeve/cerberus | |||
Project-URL: Issue tracker, https://github.com/pyeve/cerberus/issues | |||
Keywords: validation,schema,dictionaries,documents,normalization | |||
Platform: any | |||
Classifier: Development Status :: 5 - Production/Stable | |||
Classifier: Intended Audience :: Developers | |||
Classifier: Natural Language :: English | |||
Classifier: License :: OSI Approved :: ISC License (ISCL) | |||
Classifier: Operating System :: OS Independent | |||
Classifier: Programming Language :: Python | |||
Classifier: Programming Language :: Python :: 2 | |||
Classifier: Programming Language :: Python :: 2.7 | |||
Classifier: Programming Language :: Python :: 3 | |||
Classifier: Programming Language :: Python :: 3.4 | |||
Classifier: Programming Language :: Python :: 3.5 | |||
Classifier: Programming Language :: Python :: 3.6 | |||
Classifier: Programming Language :: Python :: 3.7 | |||
Classifier: Programming Language :: Python :: 3.8 | |||
Classifier: Programming Language :: Python :: 3.9 | |||
Classifier: Programming Language :: Python :: Implementation :: CPython | |||
Classifier: Programming Language :: Python :: Implementation :: PyPy | |||
Requires-Python: >=2.7 | |||
License-File: LICENSE | |||
License-File: AUTHORS | |||
Requires-Dist: setuptools | |||
Cerberus |latest-version| | |||
========================= | |||
|build-status| |python-support| |black| | |||
Cerberus is a lightweight and extensible data validation library for Python. | |||
.. code-block:: python | |||
>>> v = Validator({'name': {'type': 'string'}}) | |||
>>> v.validate({'name': 'john doe'}) | |||
True | |||
Features | |||
-------- | |||
Cerberus provides type checking and other base functionality out of the box and | |||
is designed to be non-blocking and easily and widely extensible, allowing for | |||
custom validation. It has no dependencies, but has the potential to become | |||
yours. | |||
Versioning & Interpreter support | |||
-------------------------------- | |||
The Cerberus `1.x` versions can be used with Python 2 while version `2.0` and | |||
later rely on Python 3 features. | |||
Starting with Cerberus 1.2, it is maintained according to | |||
`semantic versioning`_. So, a major release sheds off the old and defines a | |||
space for the new, minor releases ship further new features and improvements | |||
(you now the drill, new bugs are inevitable too), and micro releases polish a | |||
definite amount of features to glory. | |||
We intend to test Cerberus against all CPython interpreters at least until half | |||
a year after their `end of life`_ and against the most recent PyPy interpreter | |||
as a requirement for a release. If you still need to use it with a potential | |||
security hole in your setup, it should most probably work with the latest | |||
minor version branch from the time when the interpreter was still tested. | |||
Subsequent minor versions have good chances as well. In any case, you are | |||
advised to run the contributed test suite on your target system. | |||
Funding | |||
------- | |||
Cerberus is an open source, collaboratively funded project. If you run a | |||
business and are using Cerberus in a revenue-generating product, it would | |||
make business sense to sponsor its development: it ensures the project that | |||
your product relies on stays healthy and actively maintained. Individual users | |||
are also welcome to make a recurring pledge or a one time donation if Cerberus | |||
has helped you in your work or personal projects. | |||
Every single sign-up makes a significant impact towards making Eve possible. To | |||
learn more, check out our `funding page`_. | |||
Documentation | |||
------------- | |||
Complete documentation is available at http://docs.python-cerberus.org | |||
Installation | |||
------------ | |||
Cerberus is on PyPI_, so all you need to do is: | |||
.. code-block:: console | |||
$ pip install cerberus | |||
Testing | |||
------- | |||
Just run: | |||
.. code-block:: console | |||
$ python setup.py test | |||
Or you can use tox to run the tests under all supported Python versions. Make | |||
sure the required python versions are installed and run: | |||
.. code-block:: console | |||
$ pip install tox # first time only | |||
$ tox | |||
Contributing | |||
------------ | |||
Please see the `Contribution Guidelines`_. | |||
Copyright | |||
--------- | |||
Cerberus is an open source project by `Nicola Iarocci`_. See the license_ file | |||
for more information. | |||
.. _Contribution Guidelines: https://github.com/pyeve/cerberus/blob/master/CONTRIBUTING.rst | |||
.. _end of life: https://devguide.python.org/#status-of-python-branches | |||
.. _funding page: http://docs.python-cerberus.org/en/latest/funding.html | |||
.. _license: https://github.com/pyeve/cerberus/blob/master/LICENSE | |||
.. _Nicola Iarocci: https://nicolaiarocci.com/ | |||
.. _PyPI: https://pypi.python.org/ | |||
.. _semantic versioning: https://semver.org/ | |||
.. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg | |||
:alt: Black code style | |||
:target: https://black.readthedocs.io/ | |||
.. |build-status| image:: https://travis-ci.org/pyeve/cerberus.svg?branch=master | |||
:alt: Build status | |||
:target: https://travis-ci.org/pyeve/cerberus | |||
.. |latest-version| image:: https://img.shields.io/pypi/v/cerberus.svg | |||
:alt: Latest version on PyPI | |||
:target: https://pypi.org/project/cerberus | |||
.. |license| image:: https://img.shields.io/pypi/l/cerberus.svg | |||
:alt: Software license | |||
:target: https://github.com/pyeve/cerberus/blob/master/LICENSE | |||
.. |python-support| image:: https://img.shields.io/pypi/pyversions/cerberus.svg | |||
:target: https://pypi.python.org/pypi/cerberus | |||
:alt: Python versions |
@@ -0,0 +1,48 @@ | |||
Cerberus-1.3.4.dist-info/AUTHORS,sha256=Wa5cbyooET3QnA5rDV6trSRxay3If8IuT0HEeEPHSSo,1145 | |||
Cerberus-1.3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |||
Cerberus-1.3.4.dist-info/LICENSE,sha256=OXJkvLKH9kPVx7jBhG566vGRH4I2svi9759-bxzy__k,751 | |||
Cerberus-1.3.4.dist-info/METADATA,sha256=eDm6XwMWX8T1L3bOIgc2f5hg5YZ4OW120hznkjgXqTk,5799 | |||
Cerberus-1.3.4.dist-info/RECORD,, | |||
Cerberus-1.3.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
Cerberus-1.3.4.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 | |||
Cerberus-1.3.4.dist-info/top_level.txt,sha256=ZwdViFmandWj-jK09wLSDM63moDFYE46vDo7tRb-1HE,9 | |||
cerberus/__init__.py,sha256=Zj2rLhMbfDS9h_0UuNBf0DfNucUv7ae-RCn51tTiRB4,796 | |||
cerberus/__pycache__/__init__.cpython-38.pyc,, | |||
cerberus/__pycache__/errors.cpython-38.pyc,, | |||
cerberus/__pycache__/platform.cpython-38.pyc,, | |||
cerberus/__pycache__/schema.cpython-38.pyc,, | |||
cerberus/__pycache__/utils.cpython-38.pyc,, | |||
cerberus/__pycache__/validator.cpython-38.pyc,, | |||
cerberus/benchmarks/__init__.py,sha256=tIrl4N414oddAmhtfZm5u5tmpSKCaaSvyg780Y7_F5M,80 | |||
cerberus/benchmarks/__pycache__/__init__.cpython-38.pyc,, | |||
cerberus/benchmarks/__pycache__/test_overall_performance_1.cpython-38.pyc,, | |||
cerberus/benchmarks/__pycache__/test_overall_performance_2.cpython-38.pyc,, | |||
cerberus/benchmarks/test_overall_performance_1.py,sha256=7iNmNttRTC7ZyZ7q3rmGQfutfkV-Dp6eaolW-ZlHA3Q,6052 | |||
cerberus/benchmarks/test_overall_performance_2.py,sha256=Z41GN5ZlWRcuNbFsXPLDfLqAAdTffOGGwm695LPsi4g,1610 | |||
cerberus/errors.py,sha256=xxuSnQmY1wGHr3nLob0xwMjf8pXCwrn7cBxT0IRrHOc,21252 | |||
cerberus/platform.py,sha256=7eEKOaKSTjn2AiYrqymRrMTSbX2iB7RNstIbPQVr6bE,856 | |||
cerberus/schema.py,sha256=57M65CjNq1TV9dTwnntDT567K8gC_9f16oMUxCqqSvU,18337 | |||
cerberus/tests/__init__.py,sha256=-4-nTnmGWSRzRFGkF_lSHt8c_6b3ZdSaT8woqerrUp0,4687 | |||
cerberus/tests/__pycache__/__init__.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/conftest.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_assorted.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_customization.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_errors.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_legacy.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_normalization.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_registries.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_schema.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_utils.cpython-38.pyc,, | |||
cerberus/tests/__pycache__/test_validation.cpython-38.pyc,, | |||
cerberus/tests/conftest.py,sha256=CVw_J0T2rw89KTYL7OKQyKwv8yE0kCa18Vvv-NfP3ls,2517 | |||
cerberus/tests/test_assorted.py,sha256=DifsOU-Y16Zp4utQ4SyJtTGjgL4fPwkVWXiKQnCmbcM,3169 | |||
cerberus/tests/test_customization.py,sha256=M8_39XtpLOkKpsJX6O6E4danmNy6HW_VbnfO-39SHMw,3383 | |||
cerberus/tests/test_errors.py,sha256=dFB8HpAq0vJwJxh3oDA4bhr8e2O1V9gVnu4B_0Flc_w,11862 | |||
cerberus/tests/test_legacy.py,sha256=37wjVkGfwPX9kZE_TDds3Q9R5b-CQzlvQiOJVG9Y9NI,30 | |||
cerberus/tests/test_normalization.py,sha256=zcsNVbEF4SNQj0UI1O1bBwbvIn5QFWEYStxalS0P5KU,17212 | |||
cerberus/tests/test_registries.py,sha256=dD2BGXcG2f-Z0qBAYqNgfCKJK7ve-pdK93LaNrJrML0,2841 | |||
cerberus/tests/test_schema.py,sha256=OQi7qevtQe1ZfG-xeUToQ6p9zlhBENq3bHHZZR_RXOM,5080 | |||
cerberus/tests/test_utils.py,sha256=YhkRkqKmeARPXJIfG6o9TvgXpk45gucN5LToZgnDLvw,290 | |||
cerberus/tests/test_validation.py,sha256=OIHxLnJdUOBjxtsOSsiLQ4YrpucQ8wo9Zv6z5Q6wmuw,59155 | |||
cerberus/utils.py,sha256=IbWRFyNtBLz38pJsT2ogcaBzjHHyr59j2nqDyMH5czM,3875 | |||
cerberus/validator.py,sha256=rcS-H26EqX41y1CZzwpA2G7DitQav_bk4sqUWkuhdng,64960 |
@@ -0,0 +1,5 @@ | |||
Wheel-Version: 1.0 | |||
Generator: bdist_wheel (0.40.0) | |||
Root-Is-Purelib: true | |||
Tag: py3-none-any | |||
@@ -0,0 +1 @@ | |||
cerberus |