Revision: 19613
Author:   [email protected]
Date:     Fri Feb 28 13:29:06 2014 UTC
Log:      Experimental parser: cleanup logging

[email protected]

BUG=

Review URL: https://codereview.chromium.org/180213003
http://code.google.com/p/v8/source/detail?r=19613

Added:
 /branches/experimental/parser/tools/lexer_generator/ply_utilities.py
Modified:
 /branches/experimental/parser/tools/gyp/v8.gyp
 /branches/experimental/parser/tools/lexer_generator/code_generator.py
 /branches/experimental/parser/tools/lexer_generator/dfa_optimizer.py
 /branches/experimental/parser/tools/lexer_generator/generator.py
 /branches/experimental/parser/tools/lexer_generator/regex_parser.py
 /branches/experimental/parser/tools/lexer_generator/rule_parser.py

=======================================
--- /dev/null
+++ /branches/experimental/parser/tools/lexer_generator/ply_utilities.py Fri Feb 28 13:29:06 2014 UTC
@@ -0,0 +1,70 @@
+# Copyright 2014 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import logging
+import ply.lex as lex
+import ply.yacc as yacc
+
+class ParserBuilder:
+
+  class Logger(object):
+    def debug(self,msg,*args,**kwargs):
+      logging.debug(msg % args)
+
+    def info(self,msg,*args,**kwargs):
+      logging.debug(msg % args)
+
+    def warning(self,msg,*args,**kwargs):
+      raise Exception("warning: "+ (msg % args) + "\n")
+
+    def error(self,msg,*args,**kwargs):
+      raise Exception("error: "+ (msg % args) + "\n")
+
+  __static_instances = {}
+  @staticmethod
+  def parse(
+ string, name, new_lexer, new_parser, preparse = None, postparse = None):
+    if not name in ParserBuilder.__static_instances:
+      logger = ParserBuilder.Logger()
+      lexer_instance = new_lexer()
+      lexer_instance.lex = lex.lex(module=lexer_instance)
+      instance = new_parser()
+      instance.yacc = yacc.yacc(
+        module=instance, debug=True, write_tables=0,
+        debuglog=logger, errorlog=logger)
+      ParserBuilder.__static_instances[name] = (lexer_instance, instance)
+    (lexer_instance, instance) = ParserBuilder.__static_instances[name]
+    if preparse:
+      preparse(instance)
+    try:
+      return_value = instance.yacc.parse(string, lexer=lexer_instance.lex)
+    except Exception:
+      del ParserBuilder.__static_instances[name]
+      raise
+    if postparse:
+      postparse(instance)
+    return return_value
=======================================
--- /branches/experimental/parser/tools/gyp/v8.gyp Wed Feb 19 15:52:12 2014 UTC +++ /branches/experimental/parser/tools/gyp/v8.gyp Fri Feb 28 13:29:06 2014 UTC
@@ -252,6 +252,7 @@
           '../../tools/lexer_generator/key_encoding.py',
           '../../tools/lexer_generator/nfa.py',
           '../../tools/lexer_generator/nfa_builder.py',
+          '../../tools/lexer_generator/ply_utilities.py',
           '../../tools/lexer_generator/regex_parser.py',
           '../../tools/lexer_generator/rule_parser.py',
           '../../tools/lexer_generator/term.py',
=======================================
--- /branches/experimental/parser/tools/lexer_generator/code_generator.py Wed Feb 19 15:52:12 2014 UTC +++ /branches/experimental/parser/tools/lexer_generator/code_generator.py Fri Feb 28 13:29:06 2014 UTC
@@ -27,6 +27,7 @@

 import os
 import sys
+import logging
 import jinja2
 from copy import deepcopy
 from dfa import Dfa
@@ -40,8 +41,7 @@
                minimize_default = True,
                inline = True,
                switching = True,
-               debug_print = False,
-               log = False):
+               debug_print = False):
     if minimize_default:
       dfa = rule_processor.default_automata().minimal_dfa()
     else:
@@ -49,7 +49,6 @@
     self.__dfa = dfa
     self.__default_action = rule_processor.default_action()
     self.__debug_print = debug_print
-    self.__log = log
     self.__inline = inline
     self.__switching = switching
     self.__jump_table = []
@@ -380,20 +379,17 @@
     dfa_states = self.__dfa_states
     # split transitions
     switched = reduce(self.__split_transitions, dfa_states, 0)
-    if self.__log:
-      print "%s states use switch (instead of if)" % switched
+    logging.info("%s states use switch (instead of if)" % switched)
     # rewrite deferred transitions
     for state in dfa_states:
       self.__rewrite_deferred_transitions(state)
     # set nodes to inline
     if self.__inline:
       inlined = reduce(self.__set_inline, dfa_states, 0)
-      if self.__log:
-        print "%s states inlined" % inlined
+      logging.info("%s states inlined" % inlined)
     # rewrite transitions to use jumps
inlined_nodes = self.__rewrite_transitions_to_jumps(0, len(dfa_states), {})
-    if self.__log:
-      print "%s inlined nodes created" % inlined_nodes
+    logging.info("%s inlined nodes created" % inlined_nodes)
     # mark the entry point in case there are implicit jumps to it
     self.__dfa_states[0]['entry_points']['state_entry'] = True

=======================================
--- /branches/experimental/parser/tools/lexer_generator/dfa_optimizer.py Wed Feb 19 15:52:12 2014 UTC +++ /branches/experimental/parser/tools/lexer_generator/dfa_optimizer.py Fri Feb 28 13:29:06 2014 UTC
@@ -25,6 +25,7 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+import logging
 from transition_key import TransitionKey
 from automaton import Term, Action, Automaton
 from dfa import Dfa
@@ -139,12 +140,11 @@
 class DfaOptimizer(object):

   @staticmethod
-  def optimize(dfa, log):
-    return DfaOptimizer(dfa, log).__replace_tokens_with_gotos()
+  def optimize(dfa):
+    return DfaOptimizer(dfa).__replace_tokens_with_gotos()

-  def __init__(self, dfa, log):
+  def __init__(self, dfa):
     self.__dfa = dfa
-    self.__log = log

   @staticmethod
   def __transistions_match(encoding, incoming_key, incoming_state, state):
@@ -354,12 +354,9 @@
     start_name = name(self.__dfa.start_state())
     states = self.__remove_orphaned_states(states, orphanable, start_name)
     # dump stats
-    if self.__log:
-      print 'goto_start inserted %s' % counters['goto_start']
-      print 'store_token inserted %s' % (
-        counters['store_token'])
-      print 'store_harmony_token %s' % (
-        counters['store_harmony_token'])
-      print 'transitions removed %s' % counters['removals']
-      print 'states split %s' % counters['split_state']
+    logging.info('goto_start inserted %s' % counters['goto_start'])
+    logging.info('store_token inserted %s' % (counters['store_token']))
+ logging.info('store_harmony_token %s' % (counters['store_harmony_token']))
+    logging.info('transitions removed %s' % counters['removals'])
+    logging.info('states split %s' % counters['split_state'])
     return Dfa(self.__dfa.encoding(), start_name, states)
=======================================
--- /branches/experimental/parser/tools/lexer_generator/generator.py Thu Feb 20 08:59:23 2014 UTC +++ /branches/experimental/parser/tools/lexer_generator/generator.py Fri Feb 28 13:29:06 2014 UTC
@@ -25,6 +25,7 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+import logging
 import argparse
 from dot_utilities import *
 from nfa import Nfa
@@ -141,70 +142,57 @@
   parser.add_argument('--debug-code', action='store_true')
   parser.add_argument('--profile', action='store_true')
   parser.add_argument('--rule-html')
-  parser.add_argument('--count-paths', action='store_true')
   args = parser.parse_args()

   minimize_default = not args.no_minimize_default
-  verbose = args.verbose
+  if args.verbose:
+    logging.basicConfig(level=logging.INFO)

   if args.profile:
     profiler = start_profiling()

   re_file = args.re
-  if verbose:
-    print "parsing %s" % re_file
+  logging.info("parsing %s" % re_file)
   with open(re_file, 'r') as f:
     rule_processor = RuleProcessor(f.read(), args.encoding)

   if not args.no_optimize_default:
-    rule_processor.default_automata().optimize_dfa(log = args.verbose)
+    rule_processor.default_automata().optimize_dfa()

   if minimize_default:
     if args.no_verify_default:
       DfaMinimizer.set_verify(False)
     dfa = rule_processor.default_automata().dfa()
     mdfa = rule_processor.default_automata().minimal_dfa()
-    if verbose:
-      print "nodes reduced from %s to %s" % (
-        dfa.node_count(), mdfa.node_count())
+    logging.info("nodes reduced from %s to %s" % (
+        dfa.node_count(), mdfa.node_count()))
     DfaMinimizer.set_verify(True)

-  if args.count_paths:
-    path_count = 0
-    print 'counting'
- for path in rule_processor.default_automata().minimal_dfa().path_iter():
-      path_count += 1
-    print 'done', path_count
-
   html_file = args.html
   if html_file:
     html = generate_html(
       rule_processor, minimize_default, not args.no_merge_html)
     with open(args.html, 'w') as f:
       f.write(html)
-      if verbose:
-        print "wrote html to %s" % html_file
+      logging.info("wrote html to %s" % html_file)

   rule_html_file = args.rule_html
   if rule_html_file:
     html = generate_rule_tree_html(rule_processor)
     with open(rule_html_file, 'w') as f:
       f.write(html)
-      if verbose:
-        print "wrote html to %s" % rule_html_file
+      logging.info("wrote html to %s" % rule_html_file)

   code_file = args.code
   if code_file:
     code_generator = CodeGenerator(rule_processor,
                                    minimize_default = minimize_default,
-                                   log = verbose,
                                    inline = not args.no_inline,
                                    debug_print = args.debug_code)
     code = code_generator.process()
     with open(code_file, 'w') as f:
       f.write(code)
-      if verbose:
-        print "wrote code to %s" % code_file
+      logging.info("wrote code to %s" % code_file)

   input_file = args.input
   if input_file:
=======================================
--- /branches/experimental/parser/tools/lexer_generator/regex_parser.py Mon Feb 17 11:26:21 2014 UTC +++ /branches/experimental/parser/tools/lexer_generator/regex_parser.py Fri Feb 28 13:29:06 2014 UTC
@@ -25,50 +25,12 @@
 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+import logging
 import ply.lex as lex
 import ply.yacc as yacc
 from term import Term
 from nfa_builder import NfaBuilder
-
-class ParserBuilder:
-
-  class Logger(object):
-    def debug(self,msg,*args,**kwargs):
-      pass
-
-    def info(self,msg,*args,**kwargs):
-      pass
-
-    def warning(self,msg,*args,**kwargs):
-      raise Exception("warning: "+ (msg % args) + "\n")
-
-    def error(self,msg,*args,**kwargs):
-      raise Exception("error: "+ (msg % args) + "\n")
-
-  __static_instances = {}
-  @staticmethod
-  def parse(
- string, name, new_lexer, new_parser, preparse = None, postparse = None):
-    if not name in ParserBuilder.__static_instances:
-      logger = ParserBuilder.Logger()
-      lexer_instance = new_lexer()
-      lexer_instance.lex = lex.lex(module=lexer_instance)
-      instance = new_parser()
-      instance.yacc = yacc.yacc(
-        module=instance, debug=True, write_tables=0,
-        debuglog=logger, errorlog=logger)
-      ParserBuilder.__static_instances[name] = (lexer_instance, instance)
-    (lexer_instance, instance) = ParserBuilder.__static_instances[name]
-    if preparse:
-      preparse(instance)
-    try:
-      return_value = instance.yacc.parse(string, lexer=lexer_instance.lex)
-    except Exception:
-      del ParserBuilder.__static_instances[name]
-      raise
-    if postparse:
-      postparse(instance)
-    return return_value
+from ply_utilities import ParserBuilder

 def build_escape_map(chars):
   def add_escape(d, char):
=======================================
--- /branches/experimental/parser/tools/lexer_generator/rule_parser.py Wed Feb 19 15:52:12 2014 UTC +++ /branches/experimental/parser/tools/lexer_generator/rule_parser.py Fri Feb 28 13:29:06 2014 UTC
@@ -390,10 +390,10 @@
         self.__dfa = dfa
       return self.__dfa

-    def optimize_dfa(self, log = False):
+    def optimize_dfa(self):
       assert not self.__dfa
       assert not self.__minimial_dfa
-      self.__dfa = DfaOptimizer.optimize(self.minimal_dfa(), log)
+      self.__dfa = DfaOptimizer.optimize(self.minimal_dfa())
       self.__minimial_dfa = None

     def minimal_dfa(self):

--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
--- You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.

Reply via email to