diff options
author | Steve Block <steveblock@google.com> | 2010-04-27 16:31:00 +0100 |
---|---|---|
committer | Steve Block <steveblock@google.com> | 2010-05-11 14:42:12 +0100 |
commit | dcc8cf2e65d1aa555cce12431a16547e66b469ee (patch) | |
tree | 92a8d65cd5383bca9749f5327fb5e440563926e6 /WebCore/html/HTMLTokenizer.cpp | |
parent | ccac38a6b48843126402088a309597e682f40fe6 (diff) | |
download | external_webkit-dcc8cf2e65d1aa555cce12431a16547e66b469ee.zip external_webkit-dcc8cf2e65d1aa555cce12431a16547e66b469ee.tar.gz external_webkit-dcc8cf2e65d1aa555cce12431a16547e66b469ee.tar.bz2 |
Merge webkit.org at r58033 : Initial merge by git
Change-Id: If006c38561af287c50cd578d251629b51e4d8cd1
Diffstat (limited to 'WebCore/html/HTMLTokenizer.cpp')
-rw-r--r-- | WebCore/html/HTMLTokenizer.cpp | 18 |
1 files changed, 5 insertions, 13 deletions
diff --git a/WebCore/html/HTMLTokenizer.cpp b/WebCore/html/HTMLTokenizer.cpp index 6fa3e20..390d332 100644 --- a/WebCore/html/HTMLTokenizer.cpp +++ b/WebCore/html/HTMLTokenizer.cpp @@ -71,25 +71,17 @@ namespace WebCore { using namespace HTMLNames; -#if MOBILE -// The mobile device needs to be responsive, as such the tokenizer chunk size is reduced. // This value is used to define how many characters the tokenizer will process before // yeilding control. -static const int defaultTokenizerChunkSize = 256; -#else +// To increase responsivness reduce the tokenizer chunk size. static const int defaultTokenizerChunkSize = 4096; -#endif -#if MOBILE -// As the chunks are smaller (above), the tokenizer should not yield for as long a period, otherwise -// it will take way to long to load a page. -static const double defaultTokenizerTimeDelay = 0.300; -#else // FIXME: We would like this constant to be 200ms. // Yielding more aggressively results in increased responsiveness and better incremental rendering. // It slows down overall page-load on slower machines, though, so for now we set a value of 500. +// For smaller chunks (above) decrease the value of TimerDelay as the the tokenizer should not +// yield for as long a period otherwise it will take way to long to load a page. static const double defaultTokenizerTimeDelay = 0.500; -#endif static const char commentStart [] = "<!--"; static const char doctypeStart [] = "<!doctype"; @@ -1523,7 +1515,7 @@ HTMLTokenizer::State HTMLTokenizer::parseTag(SegmentedString& src, State state) m_scriptTagSrcAttrValue = String(); m_scriptTagCharsetAttrValue = String(); if (m_currentToken.attrs && !m_fragment) { - if (m_doc->frame() && m_doc->frame()->script()->canExecuteScripts()) { + if (m_doc->frame() && m_doc->frame()->script()->canExecuteScripts(NotAboutToExecuteScript)) { if ((a = m_currentToken.attrs->getAttributeItem(srcAttr))) m_scriptTagSrcAttrValue = m_doc->completeURL(deprecatedParseURL(a->value())).string(); } @@ -1940,7 +1932,7 @@ void HTMLTokenizer::finish() PassRefPtr<Node> HTMLTokenizer::processToken() { ScriptController* scriptController = (!m_fragment && m_doc->frame()) ? m_doc->frame()->script() : 0; - if (scriptController && scriptController->canExecuteScripts()) + if (scriptController && scriptController->canExecuteScripts(NotAboutToExecuteScript)) // FIXME: Why isn't this m_currentScriptTagStartLineNumber? I suspect this is wrong. scriptController->setEventHandlerLineNumber(m_currentTagStartLineNumber + 1); // Script line numbers are 1 based. if (m_dest > m_buffer) { |