c-在boost :: spirit :: lex中,第一次解析花费的时间最长,而解析之后的时间要短得多
作者:互联网
我将一系列文本输入到sip解析器中.第一个花费最长的时间,无论哪个是第一个.我想知道当spirit :: lex进行第一个解析时是否有任何初始化工作?
template <typename Lexer>
struct sip_token : lex::lexer<Lexer>
{
sip_token()
{
this->self.add_pattern
("KSIP", "sip:")
("KSIPS", "sips:")
("USERINFO", "[0-9a-zA-Z-_.!~*'()]+(:[0-9a-zA-Z-_.!~*'()&=+$,]*)?@")
("DOMAINLBL", "([0-9a-zA-Z]|([0-9a-zA-Z][0-9a-zA-Z-]*[0-9a-zA-Z]))")
("TOPLBL", "[a-zA-Z]|([a-zA-Z][0-9a-zA-Z-]*[0-9a-zA-Z-])")
("INVITE", "INVITE")
("ACK", "ACK")
("OPTIONS", "OPTIONS")
("BYE", "BYE")
("CANCEL", "CANCEL")
("REGISTER", "REGISTER")
("METHOD", "({INVITE}|{ACK}|{OPTIONS}|{BYE}|{CANCEL}|{REGISTER})")
("SIPVERSION", "SIP\\/[0-9]\\.[0-9]")
("PROTOCOAL", "SIP\\/[^/]+\\/UDP")
("IPV4ADDR", "(\\d{1,3}\\.){3}\\d{1,3}")
("HOSTNAME", "[^ \t\r\n]+")
("SIPURL", "{KSIP}{USERINFO}?{HOSTNAME}(:[0-9]+)?")
("SIPSURL", "{KSIPS}{USERINFO}?{HOSTNAME}(:[0-9]+)?")
("SENTBY", "({HOSTNAME}|{IPV4ADDR})(:[0-9]+)?")
("GENPARM", "[^ ;\\n]+=[^ ;\r\\n]+")
("TOKEN", "[0-9a-zA-Z-.!%*_+~`']+")
("NAMEADDR", "({TOKEN} )?<({SIPURL}|{SIPSURL})>")
("STATUSCODE", "\\d{3}")
("REASONPHRASE", "[0-9a-zA-Z-_.!~*'()&=+$,]*")
("CR", "\\r")
("LF", "\\n")
;
this->self.add
("{METHOD} {SIPURL} {SIPVERSION}", T_REQ_LINE)
("{SIPVERSION} {STATUSCODE} {REASONPHRASE}", T_STAT_LINE)
("{CR}?{LF}", T_CRLF)
("Via: {PROTOCOAL} {SENTBY}(;{GENPARM})*", T_VIA)
("To: {NAMEADDR}(;{GENPARM})*", T_TO)
("From: {NAMEADDR}(;{GENPARM})*", T_FROM)
("[0-9a-zA-Z -_.!~*'()&=+$,;/?:@]+", T_OTHER)
;
}
};
语法:
template <typename Iterator>
struct sip_grammar : qi::grammar<Iterator>
{
template <typename TokenDef>
sip_grammar(TokenDef const& tok)
: sip_grammar::base_type(start)
{
using boost::phoenix::ref;
using boost::phoenix::size;
using boost::spirit::qi::eol;
start = request | response;
response = stat_line >> *(msg_header) >> qi::token(T_CRLF);
request = req_line >> *(msg_header) >> qi::token(T_CRLF);
stat_line = qi::token(T_STAT_LINE) >> qi::token(T_CRLF);
req_line = qi::token(T_REQ_LINE) >> qi::token(T_CRLF);
msg_header = (qi::token(T_VIA) | qi::token(T_TO) | qi::token(T_FROM) | qi::token(T_OTHER))
>> qi::token(T_CRLF);
}
std::size_t c, w, l;
qi::rule<Iterator> start, response, request, stat_line, req_line, msg_header;
};
定时:
gettimeofday(&t1, NULL);
bool r = lex::tokenize_and_parse(first, last, siplexer, g);
gettimeofday(&t2, NULL);
结果:
pkt1 time=40945(us)
pkt2 time=140
pkt3 time=60
pkt4 time=74
pkt5 time=58
pkt6 time=51
解决方法:
显然,它确实是:)
Lex可能会生成DFA(也许每个Lexer状态一个DFA).这很可能是花费最多时间的事情.使用探查器确定:/
现在你可以
>确保在首次使用之前初始化表,或者
>使用The Static Lexer Model防止启动成本
这意味着您将编写一个“额外”的main来将DFA生成为C代码:
#include <boost/spirit/include/lex_lexertl.hpp>
#include <boost/spirit/include/lex_generate_static_lexertl.hpp>
#include <fstream>
#include "sip_token.hpp"
using namespace boost::spirit;
int main(int argc, char* argv[])
{
// create the lexer object instance needed to invoke the generator
sip_token<lex::lexertl::lexer<> > my_lexer; // the token definition
std::ofstream out(argc < 2 ? "sip_token_static.hpp" : argv[1]);
// invoke the generator, passing the token definition, the output stream
// and the name suffix of the tables and functions to be generated
//
// The suffix "sip" used below results in a type lexertl::static_::lexer_sip
// to be generated, which needs to be passed as a template parameter to the
// lexertl::static_lexer template (see word_count_static.cpp).
return lex::lexertl::generate_static_dfa(my_lexer, out, "sip") ? 0 : -1;
}
此处生成的代码示例(在本教程中的字数示例中):http://www.boost.org/doc/libs/1_54_0/libs/spirit/example/lex/static_lexer/word_count_static.hpp
标签:c,boost-spirit 来源: https://codeday.me/bug/20191012/1898823.html