部分hls中会出现类似[../]或者[./]的相对地址, 因此写了一个解析的方法

This commit is contained in:
Alex 2023-02-05 16:17:01 +08:00
parent a52e9d6ba8
commit 7b59ad874a
4 changed files with 48 additions and 11 deletions

View File

@ -159,7 +159,50 @@ StrCaseMap Parser::parseArgs(const string &str, const char *pair_delim, const ch
}
return ret;
}
std::string Parser::merge_url(const string &base_url, const string &path) {
if (path.empty()) return base_url;
if (path.find("://") != string::npos) return path; // 如果包含协议,则直接返回
if (path.find("./") == 0) return base_url.substr(0, base_url.rfind('/') + 1) + path.substr(2);
if (path.find("../") != 0) return base_url.substr(0, base_url.rfind('/') + 1) + path;
vector<string> path_parts;
size_t pos = 0, next_pos = 0;
if (path[0] == '/') {
path_parts.emplace_back(""); // 新的URL从根开始
} else {
while ((next_pos = base_url.find('/', pos)) != string::npos) {
path_parts.emplace_back(base_url.substr(pos, next_pos - pos));
pos = next_pos + 1;
}
// path_parts.pop_back(); // 去掉文件名部分
}
pos = 0;
while ((next_pos = path.find('/', pos)) != string::npos) {
string part = path.substr(pos, next_pos - pos);
if (part == "..") {
if (!path_parts.empty() && !path_parts.back().empty()) {
path_parts.pop_back();
}
} else {
path_parts.emplace_back(part);
}
pos = next_pos + 1;
}
string part = path.substr(pos);
if (part != "..") {
path_parts.emplace_back(part);
}
stringstream ss;
for (size_t i = 0; i < path_parts.size(); ++i) {
if (i == 0) {
ss << path_parts[i];
} else {
ss << '/' << path_parts[i];
}
}
return ss.str();
}
void RtspUrl::parse(const string &strUrl) {
auto schema = FindField(strUrl.data(), nullptr, "://");
bool is_ssl = strcasecmp(schema.data(), "rtsps") == 0;

View File

@ -105,6 +105,8 @@ public:
//解析?后面的参数
static StrCaseMap parseArgs(const std::string &str, const char *pair_delim = "&", const char *key_delim = "=");
static std::string merge_url(const std::string &base_url, const std::string &path);
private:
std::string _strMethod;
std::string _strUrl;

View File

@ -37,15 +37,7 @@ bool HlsParser::parse(const string &http_url, const string &m3u8) {
if ((_is_m3u8_inner || extinf_dur != 0) && line[0] != '#') {
segment.duration = extinf_dur;
if (line.find("http://") == 0 || line.find("https://") == 0) {
segment.url = line;
} else {
if (line.find("/") == 0) {
segment.url = http_url.substr(0, http_url.find("/", 8)) + line;
} else {
segment.url = http_url.substr(0, http_url.rfind("/") + 1) + line;
}
}
segment.url = Parser::merge_url(http_url, line);
if (!_is_m3u8_inner) {
//ts按照先后顺序排序
ts_map.emplace(index++, segment);

View File

@ -182,8 +182,8 @@ void HttpClient::onErr(const SockException &ex) {
ssize_t HttpClient::onRecvHeader(const char *data, size_t len) {
_parser.Parse(data);
if (_parser.Url() == "302" || _parser.Url() == "301") {
auto new_url = _parser["Location"];
if (_parser.Url() == "302" || _parser.Url() == "301" || _parser.Url() == "303") {
auto new_url = Parser::merge_url(_url, _parser["Location"]);
if (new_url.empty()) {
throw invalid_argument("未找到Location字段(跳转url)");
}