diff -Nru happy-1.19.5/CHANGES happy-1.19.8/CHANGES --- happy-1.19.5/CHANGES 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/CHANGES 2017-10-12 07:46:11.000000000 +0000 @@ -1,4 +1,25 @@ ----------------------------------------------------------------------------- +1.19.8 + + * Fix issue #94 (some grammars don't compile due to new type + signatures introduced to allow overloading to be used) + +----------------------------------------------------------------------------- +1.19.7 + + * Fix misisng test suite files in the sdist + +----------------------------------------------------------------------------- +1.19.6 + * Manually generate Parser.hs using Makefile before sdist, + to fix bootstrapping problems with cabal sandboxes & new-build + * Documentation fixes + * Fixed GLR support + * new option -p/--pretty prints the grammar rules (only) to a file + * Added generation of additional type signatures to enable use + of typeclasses in monadic parsers. + +----------------------------------------------------------------------------- 1.19.5 * fixes for GHC 7.10 * Code cleanups (thanks Index Int ) diff -Nru happy-1.19.5/debian/changelog happy-1.19.8/debian/changelog --- happy-1.19.5/debian/changelog 2016-10-27 22:32:18.000000000 +0000 +++ happy-1.19.8/debian/changelog 2017-10-23 21:46:34.000000000 +0000 @@ -1,3 +1,21 @@ +happy (1.19.8-1) unstable; urgency=medium + + * New upstream release (Closes: #876605) + + -- Ilias Tsitsimpis Tue, 24 Oct 2017 00:46:34 +0300 + +happy (1.19.7-1) unstable; urgency=medium + + * New upstream release. + + -- Sean Whitton Sat, 16 Sep 2017 13:14:37 -0700 + +happy (1.19.6-1) unstable; urgency=medium + + * New upstream release + + -- Sean Whitton Sun, 03 Sep 2017 19:07:11 -0700 + happy (1.19.5-7) unstable; urgency=medium * Upload to unstable as part of GHC 8 transition. @@ -463,7 +481,3 @@ * Initial Release. -- Michael Weber Fri, 2 Jul 1999 18:32:57 +0200 - -Local variables: -mode: debian-changelog -End: diff -Nru happy-1.19.5/debian/control happy-1.19.8/debian/control --- happy-1.19.5/debian/control 2016-10-27 22:32:18.000000000 +0000 +++ happy-1.19.8/debian/control 2017-10-23 17:41:51.000000000 +0000 @@ -1,32 +1,37 @@ Source: happy -Section: haskell -Priority: extra Maintainer: Debian Haskell Group Uploaders: Iain Lane +Priority: extra +Section: haskell Standards-Version: 3.9.8 -Build-Depends: cdbs (>= 0.4.59), - haskell-devscripts (>= 0.13), - debhelper (>= 9.20141010), - dpkg-dev (>= 1.17.14), - autoconf, - docbook-utils, - ghc (>= 8), - docbook-xsl, - docbook-xml, - xsltproc, - libghc-mtl-dev, - happy -Homepage: http://www.haskell.org/happy -Vcs-Git: https://anonscm.debian.org/git/pkg-haskell/DHG_packages.git +Build-Depends: + cdbs (>= 0.4.59), + haskell-devscripts (>= 0.13), + debhelper (>= 9.20141010), + dpkg-dev (>= 1.17.14), + autoconf, + docbook-utils, + ghc (>= 8), + ghc-prof, + docbook-xsl, + docbook-xml, + xsltproc, + libghc-mtl-dev (>= 2.2.1), + happy , +Homepage: https://www.haskell.org/happy/ +Vcs-Git: https://anonscm.debian.org/cgit/pkg-haskell/DHG_packages.git Vcs-Browser: https://anonscm.debian.org/cgit/pkg-haskell/DHG_packages.git/tree/p/happy - Package: happy Architecture: any -Depends: ${shlibs:Depends}, ${misc:Depends}, ${haskell:Depends} -Recommends: ${haskell:Recommends} -Suggests: haskell-doc, info-browser, ${haskell:Suggests} -Replaces: ghc-cvs (<< 20031221) +Depends: ${shlibs:Depends}, + ${misc:Depends}, + ${haskell:Depends}, +Recommends: ${haskell:Recommends}, +Suggests: haskell-doc, + info-browser, + ${haskell:Suggests}, +Replaces: ghc-cvs (<< 20031221), Description: Parser generator for Haskell Happy is a parser generator system for Haskell, similar to the tool `yacc' for C. Like `yacc', it takes a file containing an annotated BNF specification of a diff -Nru happy-1.19.5/dist/build/happy/happy-tmp/AttrGrammarParser.hs happy-1.19.8/dist/build/happy/happy-tmp/AttrGrammarParser.hs --- happy-1.19.5/dist/build/happy/happy-tmp/AttrGrammarParser.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/dist/build/happy/happy-tmp/AttrGrammarParser.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,616 +0,0 @@ -{-# OPTIONS_GHC -w #-} -{-# OPTIONS -fglasgow-exts -cpp #-} -{-# OPTIONS_GHC -w #-} -module AttrGrammarParser (agParser) where -import ParseMonad -import AttrGrammar -import qualified Data.Array as Happy_Data_Array -import qualified GHC.Exts as Happy_GHC_Exts -import Control.Applicative(Applicative(..)) - --- parser produced by Happy Version 1.19.4 - -newtype HappyAbsSyn = HappyAbsSyn HappyAny -#if __GLASGOW_HASKELL__ >= 607 -type HappyAny = Happy_GHC_Exts.Any -#else -type HappyAny = forall a . a -#endif -happyIn4 :: ([AgRule]) -> (HappyAbsSyn ) -happyIn4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn4 #-} -happyOut4 :: (HappyAbsSyn ) -> ([AgRule]) -happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut4 #-} -happyIn5 :: ([AgRule]) -> (HappyAbsSyn ) -happyIn5 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn5 #-} -happyOut5 :: (HappyAbsSyn ) -> ([AgRule]) -happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut5 #-} -happyIn6 :: (AgRule) -> (HappyAbsSyn ) -happyIn6 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn6 #-} -happyOut6 :: (HappyAbsSyn ) -> (AgRule) -happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut6 #-} -happyIn7 :: ([AgToken]) -> (HappyAbsSyn ) -happyIn7 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn7 #-} -happyOut7 :: (HappyAbsSyn ) -> ([AgToken]) -happyOut7 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut7 #-} -happyIn8 :: ([AgToken]) -> (HappyAbsSyn ) -happyIn8 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn8 #-} -happyOut8 :: (HappyAbsSyn ) -> ([AgToken]) -happyOut8 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut8 #-} -happyInTok :: (AgToken) -> (HappyAbsSyn ) -happyInTok x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyInTok #-} -happyOutTok :: (HappyAbsSyn ) -> (AgToken) -happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOutTok #-} - - -happyActOffsets :: HappyAddr -happyActOffsets = HappyA# "\x0f\x00\x0f\x00\x00\x00\x30\x00\x0a\x00\x2e\x00\x2d\x00\x2b\x00\x14\x00\x0a\x00\x0a\x00\x0a\x00\x00\x00\x01\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x0a\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x26\x00\x0a\x00\x00\x00\x01\x00\x00\x00\x00\x00"# - -happyGotoOffsets :: HappyAddr -happyGotoOffsets = HappyA# "\x18\x00\x1a\x00\x00\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x28\x00\x27\x00\x00\x00\x25\x00\x24\x00\x23\x00\x22\x00\x21\x00\x20\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1d\x00\x1c\x00\x1b\x00\x19\x00\x0c\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\xfd\xff\x00\x00\x00\x00"# - -happyDefActions :: HappyAddr -happyDefActions = HappyA# "\xfb\xff\x00\x00\xfe\xff\xfc\xff\xf0\xff\x00\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xf0\xff\xf0\xff\xf7\xff\xe8\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xfb\xff\xfd\xff\xf1\xff\xf2\xff\xf3\xff\xf4\xff\xf5\xff\x00\x00\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xf0\xff\xe8\xff\xfa\xff\xf9\xff\xf8\xff\xe9\xff\xea\xff\xeb\xff\xec\xff\xee\xff\xed\xff\x00\x00\xf0\xff\xf6\xff\xe8\xff\xef\xff"# - -happyCheck :: HappyAddr -happyCheck = HappyA# "\xff\xff\x04\x00\x01\x00\x04\x00\x03\x00\x04\x00\x03\x00\x06\x00\x07\x00\x08\x00\x09\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x04\x00\x0a\x00\x04\x00\x04\x00\x04\x00\x04\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x04\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x04\x00\xff\xff\x04\x00\x04\x00\x03\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# - -happyTable :: HappyAddr -happyTable = HappyA# "\x00\x00\x2f\x00\x1c\x00\x25\x00\x1d\x00\x1e\x00\x2d\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x0e\x00\x14\x00\x03\x00\x0f\x00\x26\x00\x10\x00\x11\x00\x12\x00\x13\x00\x05\x00\x06\x00\x07\x00\x08\x00\x08\x00\x02\x00\x03\x00\x02\x00\x03\x00\x27\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x2f\x00\x1a\x00\x22\x00\x23\x00\x24\x00\x0c\x00\x2d\x00\x0a\x00\x00\x00\x0b\x00\x0c\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -happyReduceArr = Happy_Data_Array.array (1, 23) [ - (1 , happyReduce_1), - (2 , happyReduce_2), - (3 , happyReduce_3), - (4 , happyReduce_4), - (5 , happyReduce_5), - (6 , happyReduce_6), - (7 , happyReduce_7), - (8 , happyReduce_8), - (9 , happyReduce_9), - (10 , happyReduce_10), - (11 , happyReduce_11), - (12 , happyReduce_12), - (13 , happyReduce_13), - (14 , happyReduce_14), - (15 , happyReduce_15), - (16 , happyReduce_16), - (17 , happyReduce_17), - (18 , happyReduce_18), - (19 , happyReduce_19), - (20 , happyReduce_20), - (21 , happyReduce_21), - (22 , happyReduce_22), - (23 , happyReduce_23) - ] - -happy_n_terms = 11 :: Int -happy_n_nonterms = 5 :: Int - -happyReduce_1 = happySpecReduce_1 0# happyReduction_1 -happyReduction_1 happy_x_1 - = case happyOut5 happy_x_1 of { happy_var_1 -> - happyIn4 - (happy_var_1 - )} - -happyReduce_2 = happySpecReduce_3 1# happyReduction_2 -happyReduction_2 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOut6 happy_x_1 of { happy_var_1 -> - case happyOut5 happy_x_3 of { happy_var_3 -> - happyIn5 - (happy_var_1 : happy_var_3 - )}} - -happyReduce_3 = happySpecReduce_1 1# happyReduction_3 -happyReduction_3 happy_x_1 - = case happyOut6 happy_x_1 of { happy_var_1 -> - happyIn5 - (happy_var_1 : [] - )} - -happyReduce_4 = happySpecReduce_0 1# happyReduction_4 -happyReduction_4 = happyIn5 - ([] - ) - -happyReduce_5 = happySpecReduce_3 2# happyReduction_5 -happyReduction_5 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { happy_var_3 -> - happyIn6 - (SelfAssign (selfRefVal happy_var_1) happy_var_3 - )}} - -happyReduce_6 = happySpecReduce_3 2# happyReduction_6 -happyReduction_6 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { happy_var_3 -> - happyIn6 - (SubAssign (subRefVal happy_var_1) happy_var_3 - )}} - -happyReduce_7 = happySpecReduce_3 2# happyReduction_7 -happyReduction_7 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_3 of { happy_var_3 -> - happyIn6 - (RightmostAssign (rightRefVal happy_var_1) happy_var_3 - )}} - -happyReduce_8 = happySpecReduce_2 2# happyReduction_8 -happyReduction_8 happy_x_2 - happy_x_1 - = case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn6 - (Conditional happy_var_2 - )} - -happyReduce_9 = happyReduce 4# 3# happyReduction_9 -happyReduction_9 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_3 of { happy_var_3 -> - case happyOut7 happy_x_4 of { happy_var_4 -> - happyIn7 - ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 - ) `HappyStk` happyRest}}}} - -happyReduce_10 = happySpecReduce_2 3# happyReduction_10 -happyReduction_10 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn7 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_11 = happySpecReduce_2 3# happyReduction_11 -happyReduction_11 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn7 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_12 = happySpecReduce_2 3# happyReduction_12 -happyReduction_12 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn7 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_13 = happySpecReduce_2 3# happyReduction_13 -happyReduction_13 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn7 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_14 = happySpecReduce_2 3# happyReduction_14 -happyReduction_14 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn7 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_15 = happySpecReduce_0 3# happyReduction_15 -happyReduction_15 = happyIn7 - ([] - ) - -happyReduce_16 = happyReduce 4# 4# happyReduction_16 -happyReduction_16 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_3 of { happy_var_3 -> - case happyOut8 happy_x_4 of { happy_var_4 -> - happyIn8 - ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 - ) `HappyStk` happyRest}}}} - -happyReduce_17 = happySpecReduce_2 4# happyReduction_17 -happyReduction_17 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_18 = happySpecReduce_2 4# happyReduction_18 -happyReduction_18 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_19 = happySpecReduce_2 4# happyReduction_19 -happyReduction_19 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_20 = happySpecReduce_2 4# happyReduction_20 -happyReduction_20 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_21 = happySpecReduce_2 4# happyReduction_21 -happyReduction_21 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut7 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_22 = happySpecReduce_2 4# happyReduction_22 -happyReduction_22 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { happy_var_1 -> - case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn8 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_23 = happySpecReduce_0 4# happyReduction_23 -happyReduction_23 = happyIn8 - ([] - ) - -happyNewToken action sts stk - = agLexer(\tk -> - let cont i = happyDoAction i tk action sts stk in - case tk of { - AgTok_EOF -> happyDoAction 10# tk action sts stk; - AgTok_LBrace -> cont 1#; - AgTok_RBrace -> cont 2#; - AgTok_Semicolon -> cont 3#; - AgTok_Eq -> cont 4#; - AgTok_Where -> cont 5#; - AgTok_SelfRef _ -> cont 6#; - AgTok_SubRef _ -> cont 7#; - AgTok_RightmostRef _ -> cont 8#; - AgTok_Unknown _ -> cont 9#; - _ -> happyError' tk - }) - -happyError_ 10# tk = happyError' tk -happyError_ _ tk = happyError' tk - -happyThen :: () => P a -> (a -> P b) -> P b -happyThen = (>>=) -happyReturn :: () => a -> P a -happyReturn = (return) -happyThen1 = happyThen -happyReturn1 :: () => a -> P a -happyReturn1 = happyReturn -happyError' :: () => (AgToken) -> P a -happyError' tk = (\token -> happyError) tk - -agParser = happySomeParser where - happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (happyOut4 x)) - -happySeq = happyDontSeq - - -happyError :: P a -happyError = fail ("Parse error\n") -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "" #-} -{-# LINE 1 "" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} --- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp - -{-# LINE 13 "templates/GenericTemplate.hs" #-} - - - - - --- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. -#if __GLASGOW_HASKELL__ > 706 -#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Bool) -#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Bool) -#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Bool) -#else -#define LT(n,m) (n Happy_GHC_Exts.<# m) -#define GTE(n,m) (n Happy_GHC_Exts.>=# m) -#define EQ(n,m) (n Happy_GHC_Exts.==# m) -#endif -{-# LINE 46 "templates/GenericTemplate.hs" #-} - - -data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList - - - - - -{-# LINE 67 "templates/GenericTemplate.hs" #-} - -{-# LINE 77 "templates/GenericTemplate.hs" #-} - -{-# LINE 86 "templates/GenericTemplate.hs" #-} - -infixr 9 `HappyStk` -data HappyStk a = HappyStk a (HappyStk a) - ------------------------------------------------------------------------------ --- starting the parse - -happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll - ------------------------------------------------------------------------------ --- Accepting the parse - --- If the current token is 0#, it means we've just accepted a partial --- parse (a %partial parser). We must ignore the saved token on the top of --- the stack in this case. -happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = - happyReturn1 ans -happyAccept j tk st sts (HappyStk ans _) = - (happyTcHack j (happyTcHack st)) (happyReturn1 ans) - ------------------------------------------------------------------------------ --- Arrays only: do the next action - - - -happyDoAction i tk st - = {- nothing -} - - - case action of - 0# -> {- nothing -} - happyFail i tk st - -1# -> {- nothing -} - happyAccept i tk st - n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} - - (happyReduceArr Happy_Data_Array.! rule) i tk st - where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) - n -> {- nothing -} - - - happyShift new_state i tk st - where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) - where off = indexShortOffAddr happyActOffsets st - off_i = (off Happy_GHC_Exts.+# i) - check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) - then EQ(indexShortOffAddr happyCheck off_i, i) - else False - action - | check = indexShortOffAddr happyTable off_i - | otherwise = indexShortOffAddr happyDefActions st - - -indexShortOffAddr (HappyA# arr) off = - Happy_GHC_Exts.narrow16Int# i - where - i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) - high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) - low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) - off' = off Happy_GHC_Exts.*# 2# - - - - - -data HappyAddr = HappyA# Happy_GHC_Exts.Addr# - - - - ------------------------------------------------------------------------------ --- HappyState data type (not arrays) - -{-# LINE 170 "templates/GenericTemplate.hs" #-} - ------------------------------------------------------------------------------ --- Shifting a token - -happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "shifting the error token" $ - happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) - -happyShift new_state i tk st sts stk = - happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) - --- happyReduce is specialised for the common cases. - -happySpecReduce_0 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_0 nt fn j tk st@((action)) sts stk - = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) - -happySpecReduce_1 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') - = let r = fn v1 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_2 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') - = let r = fn v1 v2 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_3 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') - = let r = fn v1 v2 v3 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happyReduce k i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyReduce k nt fn j tk st sts stk - = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of - sts1@((HappyCons (st1@(action)) (_))) -> - let r = fn stk in -- it doesn't hurt to always seq here... - happyDoSeq r (happyGoto nt j tk st1 sts1 r) - -happyMonadReduce k nt fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyMonadReduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk in - happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) - -happyMonad2Reduce k nt fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyMonad2Reduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk - - off = indexShortOffAddr happyGotoOffsets st1 - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - in - happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) - -happyDrop 0# l = l -happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t - -happyDropStk 0# l = l -happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs - ------------------------------------------------------------------------------ --- Moving to a new state after a reduction - - -happyGoto nt j tk st = - {- nothing -} - happyDoAction j tk new_state - where off = indexShortOffAddr happyGotoOffsets st - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - ------------------------------------------------------------------------------ --- Error recovery (0# is the error token) - --- parse error if we are in recovery and we fail again -happyFail 0# tk old_st _ stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "failing" $ - happyError_ i tk - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail 0# tk old_st (HappyCons ((action)) (sts)) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk)) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail i tk (action) sts stk = --- trace "entering error recovery" $ - happyDoAction 0# tk action sts ( (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) - --- Internal happy errors: - -notHappyAtAll :: a -notHappyAtAll = error "Internal Happy error\n" - ------------------------------------------------------------------------------ --- Hack to get the typechecker to accept our action functions - - -happyTcHack :: Happy_GHC_Exts.Int# -> a -> a -happyTcHack x y = y -{-# INLINE happyTcHack #-} - - ------------------------------------------------------------------------------ --- Seq-ing. If the --strict flag is given, then Happy emits --- happySeq = happyDoSeq --- otherwise it emits --- happySeq = happyDontSeq - -happyDoSeq, happyDontSeq :: a -> b -> b -happyDoSeq a b = a `seq` b -happyDontSeq a b = b - ------------------------------------------------------------------------------ --- Don't inline any functions from the template. GHC has a nasty habit --- of deciding to inline happyGoto everywhere, which increases the size of --- the generated parser quite a bit. - - -{-# NOINLINE happyDoAction #-} -{-# NOINLINE happyTable #-} -{-# NOINLINE happyCheck #-} -{-# NOINLINE happyActOffsets #-} -{-# NOINLINE happyGotoOffsets #-} -{-# NOINLINE happyDefActions #-} - -{-# NOINLINE happyShift #-} -{-# NOINLINE happySpecReduce_0 #-} -{-# NOINLINE happySpecReduce_1 #-} -{-# NOINLINE happySpecReduce_2 #-} -{-# NOINLINE happySpecReduce_3 #-} -{-# NOINLINE happyReduce #-} -{-# NOINLINE happyMonadReduce #-} -{-# NOINLINE happyGoto #-} -{-# NOINLINE happyFail #-} - --- end of Happy Template. diff -Nru happy-1.19.5/dist/build/happy/happy-tmp/Parser.hs happy-1.19.8/dist/build/happy/happy-tmp/Parser.hs --- happy-1.19.5/dist/build/happy/happy-tmp/Parser.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/dist/build/happy/happy-tmp/Parser.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1003 +0,0 @@ -{-# OPTIONS_GHC -w #-} -{-# OPTIONS -fglasgow-exts -cpp #-} -{-# OPTIONS_GHC -w #-} -module Parser (ourParser,AbsSyn) where -import ParseMonad -import AbsSyn -import Lexer -import qualified Data.Array as Happy_Data_Array -import qualified GHC.Exts as Happy_GHC_Exts -import Control.Applicative(Applicative(..)) - --- parser produced by Happy Version 1.19.4 - -newtype HappyAbsSyn = HappyAbsSyn HappyAny -#if __GLASGOW_HASKELL__ >= 607 -type HappyAny = Happy_GHC_Exts.Any -#else -type HappyAny = forall a . a -#endif -happyIn4 :: (AbsSyn) -> (HappyAbsSyn ) -happyIn4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn4 #-} -happyOut4 :: (HappyAbsSyn ) -> (AbsSyn) -happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut4 #-} -happyIn5 :: ([Rule]) -> (HappyAbsSyn ) -happyIn5 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn5 #-} -happyOut5 :: (HappyAbsSyn ) -> ([Rule]) -happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut5 #-} -happyIn6 :: (Rule) -> (HappyAbsSyn ) -happyIn6 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn6 #-} -happyOut6 :: (HappyAbsSyn ) -> (Rule) -happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut6 #-} -happyIn7 :: ([String]) -> (HappyAbsSyn ) -happyIn7 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn7 #-} -happyOut7 :: (HappyAbsSyn ) -> ([String]) -happyOut7 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut7 #-} -happyIn8 :: ([String]) -> (HappyAbsSyn ) -happyIn8 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn8 #-} -happyOut8 :: (HappyAbsSyn ) -> ([String]) -happyOut8 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut8 #-} -happyIn9 :: ([Prod]) -> (HappyAbsSyn ) -happyIn9 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn9 #-} -happyOut9 :: (HappyAbsSyn ) -> ([Prod]) -happyOut9 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut9 #-} -happyIn10 :: (Prod) -> (HappyAbsSyn ) -happyIn10 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn10 #-} -happyOut10 :: (HappyAbsSyn ) -> (Prod) -happyOut10 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut10 #-} -happyIn11 :: (Term) -> (HappyAbsSyn ) -happyIn11 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn11 #-} -happyOut11 :: (HappyAbsSyn ) -> (Term) -happyOut11 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut11 #-} -happyIn12 :: ([Term]) -> (HappyAbsSyn ) -happyIn12 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn12 #-} -happyOut12 :: (HappyAbsSyn ) -> ([Term]) -happyOut12 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut12 #-} -happyIn13 :: ([Term]) -> (HappyAbsSyn ) -happyIn13 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn13 #-} -happyOut13 :: (HappyAbsSyn ) -> ([Term]) -happyOut13 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut13 #-} -happyIn14 :: ([Term]) -> (HappyAbsSyn ) -happyIn14 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn14 #-} -happyOut14 :: (HappyAbsSyn ) -> ([Term]) -happyOut14 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut14 #-} -happyIn15 :: (Maybe String) -> (HappyAbsSyn ) -happyIn15 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn15 #-} -happyOut15 :: (HappyAbsSyn ) -> (Maybe String) -happyOut15 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut15 #-} -happyIn16 :: ([Directive String]) -> (HappyAbsSyn ) -happyIn16 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn16 #-} -happyOut16 :: (HappyAbsSyn ) -> ([Directive String]) -happyOut16 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut16 #-} -happyIn17 :: (Directive String) -> (HappyAbsSyn ) -happyIn17 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn17 #-} -happyOut17 :: (HappyAbsSyn ) -> (Directive String) -happyOut17 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut17 #-} -happyIn18 :: (Maybe String) -> (HappyAbsSyn ) -happyIn18 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn18 #-} -happyOut18 :: (HappyAbsSyn ) -> (Maybe String) -happyOut18 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut18 #-} -happyIn19 :: ([(String,String)]) -> (HappyAbsSyn ) -happyIn19 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn19 #-} -happyOut19 :: (HappyAbsSyn ) -> ([(String,String)]) -happyOut19 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut19 #-} -happyIn20 :: ((String,String)) -> (HappyAbsSyn ) -happyIn20 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn20 #-} -happyOut20 :: (HappyAbsSyn ) -> ((String,String)) -happyOut20 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut20 #-} -happyIn21 :: ([String]) -> (HappyAbsSyn ) -happyIn21 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn21 #-} -happyOut21 :: (HappyAbsSyn ) -> ([String]) -happyOut21 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut21 #-} -happyIn22 :: (Maybe String) -> (HappyAbsSyn ) -happyIn22 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyIn22 #-} -happyOut22 :: (HappyAbsSyn ) -> (Maybe String) -happyOut22 x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOut22 #-} -happyInTok :: (Token) -> (HappyAbsSyn ) -happyInTok x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyInTok #-} -happyOutTok :: (HappyAbsSyn ) -> (Token) -happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x -{-# INLINE happyOutTok #-} - - -happyActOffsets :: HappyAddr -happyActOffsets = HappyA# "\x70\x00\x70\x00\x22\x00\x00\x00\x67\x00\xff\xff\x00\x00\x6d\x00\x78\x00\x76\x00\x75\x00\x6c\x00\x00\x00\x6b\x00\x73\x00\x73\x00\x73\x00\x66\x00\x64\x00\x6f\x00\x62\x00\x00\x00\x61\x00\x00\x00\x00\x00\x00\x00\x6e\x00\x00\x00\x00\x00\x60\x00\x5d\x00\x6a\x00\x6a\x00\x00\x00\x69\x00\x5c\x00\x00\x00\x00\x00\x68\x00\x10\x00\x00\x00\x50\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5b\x00\x00\x00\x00\x00\x56\x00\x01\x00\x65\x00\x00\x00\x00\x00\x34\x00\x00\x00\x63\x00\x51\x00\x00\x00\x0f\x00\x00\x00\x4e\x00\x00\x00\x55\x00\x5f\x00\x4b\x00\x00\x00\x5e\x00\x00\x00\x5a\x00\x00\x00\x4d\x00\x59\x00\x58\x00\x4a\x00\x57\x00\x00\x00\x57\x00\x00\x00\x00\x00\x48\x00\x00\x00\x31\x00\x00\x00\x52\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -happyGotoOffsets :: HappyAddr -happyGotoOffsets = HappyA# "\x0b\x00\x40\x00\x3c\x00\x00\x00\x00\x00\x47\x00\x00\x00\x00\x00\x37\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x46\x00\x45\x00\x44\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x43\x00\x42\x00\x00\x00\x35\x00\x00\x00\x00\x00\x00\x00\x41\x00\x11\x00\x00\x00\x49\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x38\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x19\x00\x00\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x33\x00\x00\x00\x2e\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -happyDefActions :: HappyAddr -happyDefActions = HappyA# "\xcb\xff\x00\x00\x00\x00\xcc\xff\x00\x00\x00\x00\xe5\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x00\x00\xcd\xff\xcd\xff\xcd\xff\x00\x00\x00\x00\x00\x00\x00\x00\xd5\xff\x00\x00\xd6\xff\xd7\xff\xd9\xff\xcd\xff\xd8\xff\xda\xff\xde\xff\x00\x00\xd2\xff\xd2\xff\xe3\xff\xd0\xff\x00\x00\xe4\xff\xe6\xff\x00\x00\xcb\xff\xfc\xff\xf7\xff\xcf\xff\xd1\xff\xe2\xff\xd3\xff\xe1\xff\xdf\xff\xdd\xff\xce\xff\xd4\xff\xdc\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\xf6\xff\xed\xff\x00\x00\xdb\xff\x00\x00\xf9\xff\xf3\xff\xec\xff\xe7\xff\xee\xff\xf0\xff\xf8\xff\x00\x00\xf5\xff\x00\x00\xeb\xff\x00\x00\x00\x00\xed\xff\x00\x00\xed\xff\xfb\xff\xed\xff\xf4\xff\xe8\xff\xf1\xff\xea\xff\x00\x00\xef\xff\x00\x00\xf2\xff\xfa\xff\xe9\xff"# - -happyCheck :: HappyAddr -happyCheck = HappyA# "\xff\xff\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x00\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x01\x00\x01\x00\x0b\x00\x02\x00\x13\x00\x16\x00\x15\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x07\x00\x12\x00\x07\x00\x0a\x00\x07\x00\x11\x00\x13\x00\x12\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x04\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x01\x00\x02\x00\x0f\x00\x10\x00\x0f\x00\x10\x00\x0c\x00\x0d\x00\x19\x00\x1a\x00\x03\x00\x19\x00\x1a\x00\x11\x00\x0e\x00\x0e\x00\x12\x00\x01\x00\x0d\x00\x11\x00\x11\x00\x11\x00\x01\x00\x01\x00\x01\x00\x01\x00\x14\x00\x13\x00\x11\x00\x01\x00\x01\x00\x0c\x00\x11\x00\x18\x00\x01\x00\x17\x00\x01\x00\x11\x00\x18\x00\x01\x00\x01\x00\x01\x00\x11\x00\x11\x00\x11\x00\x01\x00\x01\x00\x11\x00\x11\x00\x11\x00\x01\x00\x11\x00\x01\x00\x01\x00\x12\x00\x01\x00\xff\xff\xff\xff\x11\x00\x11\x00\x11\x00\xff\xff\xff\xff\x11\x00\x1b\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# - -happyTable :: HappyAddr -happyTable = HappyA# "\x00\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x04\x00\x12\x00\x13\x00\x14\x00\x15\x00\x4d\x00\x2a\x00\x49\x00\x36\x00\x3b\x00\x27\x00\x3c\x00\x58\x00\x3f\x00\x40\x00\x41\x00\x42\x00\x53\x00\x02\x00\x59\x00\x54\x00\x48\x00\x04\x00\x4e\x00\x37\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x38\x00\x12\x00\x13\x00\x14\x00\x15\x00\x4e\x00\x3f\x00\x40\x00\x41\x00\x42\x00\x50\x00\x3f\x00\x40\x00\x41\x00\x42\x00\x3e\x00\x3f\x00\x40\x00\x41\x00\x42\x00\x27\x00\x28\x00\x2b\x00\x22\x00\x21\x00\x22\x00\x05\x00\x06\x00\x56\x00\x57\x00\x34\x00\x45\x00\x46\x00\x31\x00\x2c\x00\x2e\x00\x02\x00\x44\x00\x25\x00\x19\x00\x1b\x00\x1c\x00\x44\x00\x44\x00\x52\x00\x44\x00\x58\x00\x50\x00\x53\x00\x47\x00\x44\x00\x4b\x00\x3e\x00\x48\x00\x44\x00\x4c\x00\x3a\x00\x3d\x00\x36\x00\x2a\x00\x24\x00\x2e\x00\x34\x00\x2b\x00\x30\x00\x1b\x00\x17\x00\x31\x00\x33\x00\x16\x00\x1b\x00\x18\x00\x20\x00\x21\x00\x19\x00\x24\x00\x00\x00\x00\x00\x1e\x00\x1f\x00\x25\x00\x00\x00\x00\x00\x04\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -happyReduceArr = Happy_Data_Array.array (1, 52) [ - (1 , happyReduce_1), - (2 , happyReduce_2), - (3 , happyReduce_3), - (4 , happyReduce_4), - (5 , happyReduce_5), - (6 , happyReduce_6), - (7 , happyReduce_7), - (8 , happyReduce_8), - (9 , happyReduce_9), - (10 , happyReduce_10), - (11 , happyReduce_11), - (12 , happyReduce_12), - (13 , happyReduce_13), - (14 , happyReduce_14), - (15 , happyReduce_15), - (16 , happyReduce_16), - (17 , happyReduce_17), - (18 , happyReduce_18), - (19 , happyReduce_19), - (20 , happyReduce_20), - (21 , happyReduce_21), - (22 , happyReduce_22), - (23 , happyReduce_23), - (24 , happyReduce_24), - (25 , happyReduce_25), - (26 , happyReduce_26), - (27 , happyReduce_27), - (28 , happyReduce_28), - (29 , happyReduce_29), - (30 , happyReduce_30), - (31 , happyReduce_31), - (32 , happyReduce_32), - (33 , happyReduce_33), - (34 , happyReduce_34), - (35 , happyReduce_35), - (36 , happyReduce_36), - (37 , happyReduce_37), - (38 , happyReduce_38), - (39 , happyReduce_39), - (40 , happyReduce_40), - (41 , happyReduce_41), - (42 , happyReduce_42), - (43 , happyReduce_43), - (44 , happyReduce_44), - (45 , happyReduce_45), - (46 , happyReduce_46), - (47 , happyReduce_47), - (48 , happyReduce_48), - (49 , happyReduce_49), - (50 , happyReduce_50), - (51 , happyReduce_51), - (52 , happyReduce_52) - ] - -happy_n_terms = 28 :: Int -happy_n_nonterms = 19 :: Int - -happyReduce_1 = happyReduce 5# 0# happyReduction_1 -happyReduction_1 (happy_x_5 `HappyStk` - happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOut22 happy_x_1 of { happy_var_1 -> - case happyOut16 happy_x_2 of { happy_var_2 -> - case happyOut5 happy_x_4 of { happy_var_4 -> - case happyOut22 happy_x_5 of { happy_var_5 -> - happyIn4 - (AbsSyn happy_var_1 (reverse happy_var_2) (reverse happy_var_4) happy_var_5 - ) `HappyStk` happyRest}}}} - -happyReduce_2 = happySpecReduce_2 1# happyReduction_2 -happyReduction_2 happy_x_2 - happy_x_1 - = case happyOut5 happy_x_1 of { happy_var_1 -> - case happyOut6 happy_x_2 of { happy_var_2 -> - happyIn5 - (happy_var_2 : happy_var_1 - )}} - -happyReduce_3 = happySpecReduce_1 1# happyReduction_3 -happyReduction_3 happy_x_1 - = case happyOut6 happy_x_1 of { happy_var_1 -> - happyIn5 - ([happy_var_1] - )} - -happyReduce_4 = happyReduce 6# 2# happyReduction_4 -happyReduction_4 (happy_x_6 `HappyStk` - happy_x_5 `HappyStk` - happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut7 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - case happyOut9 happy_x_6 of { happy_var_6 -> - happyIn6 - ((happy_var_1,happy_var_2,happy_var_6,Just happy_var_4) - ) `HappyStk` happyRest}}}} - -happyReduce_5 = happyReduce 7# 2# happyReduction_5 -happyReduction_5 (happy_x_7 `HappyStk` - happy_x_6 `HappyStk` - happy_x_5 `HappyStk` - happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut7 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - case happyOut9 happy_x_7 of { happy_var_7 -> - happyIn6 - ((happy_var_1,happy_var_2,happy_var_7,Just happy_var_4) - ) `HappyStk` happyRest}}}} - -happyReduce_6 = happyReduce 4# 2# happyReduction_6 -happyReduction_6 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut7 happy_x_2 of { happy_var_2 -> - case happyOut9 happy_x_4 of { happy_var_4 -> - happyIn6 - ((happy_var_1,happy_var_2,happy_var_4,Nothing) - ) `HappyStk` happyRest}}} - -happyReduce_7 = happySpecReduce_3 3# happyReduction_7 -happyReduction_7 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOut8 happy_x_2 of { happy_var_2 -> - happyIn7 - (reverse happy_var_2 - )} - -happyReduce_8 = happySpecReduce_0 3# happyReduction_8 -happyReduction_8 = happyIn7 - ([] - ) - -happyReduce_9 = happySpecReduce_1 4# happyReduction_9 -happyReduction_9 happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn8 - ([happy_var_1] - )} - -happyReduce_10 = happySpecReduce_3 4# happyReduction_10 -happyReduction_10 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOut8 happy_x_1 of { happy_var_1 -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokId) -> - happyIn8 - (happy_var_3 : happy_var_1 - )}} - -happyReduce_11 = happySpecReduce_3 5# happyReduction_11 -happyReduction_11 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOut10 happy_x_1 of { happy_var_1 -> - case happyOut9 happy_x_3 of { happy_var_3 -> - happyIn9 - (happy_var_1 : happy_var_3 - )}} - -happyReduce_12 = happySpecReduce_1 5# happyReduction_12 -happyReduction_12 happy_x_1 - = case happyOut10 happy_x_1 of { happy_var_1 -> - happyIn9 - ([happy_var_1] - )} - -happyReduce_13 = happyMonadReduce 4# 6# happyReduction_13 -happyReduction_13 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) tk - = happyThen (case happyOut12 happy_x_1 of { happy_var_1 -> - case happyOut15 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - ( lineP >>= \l -> return (happy_var_1,happy_var_3,l,happy_var_2))}}} - ) (\r -> happyReturn (happyIn10 r)) - -happyReduce_14 = happyMonadReduce 3# 6# happyReduction_14 -happyReduction_14 (happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) tk - = happyThen (case happyOut12 happy_x_1 of { happy_var_1 -> - case happyOut15 happy_x_2 of { happy_var_2 -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - ( lineP >>= \l -> return (happy_var_1,happy_var_3,l,happy_var_2))}}} - ) (\r -> happyReturn (happyIn10 r)) - -happyReduce_15 = happySpecReduce_1 7# happyReduction_15 -happyReduction_15 happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn11 - (App happy_var_1 [] - )} - -happyReduce_16 = happyReduce 4# 7# happyReduction_16 -happyReduction_16 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut14 happy_x_3 of { happy_var_3 -> - happyIn11 - (App happy_var_1 (reverse happy_var_3) - ) `HappyStk` happyRest}} - -happyReduce_17 = happySpecReduce_1 8# happyReduction_17 -happyReduction_17 happy_x_1 - = case happyOut13 happy_x_1 of { happy_var_1 -> - happyIn12 - (reverse happy_var_1 - )} - -happyReduce_18 = happySpecReduce_0 8# happyReduction_18 -happyReduction_18 = happyIn12 - ([] - ) - -happyReduce_19 = happySpecReduce_1 9# happyReduction_19 -happyReduction_19 happy_x_1 - = case happyOut11 happy_x_1 of { happy_var_1 -> - happyIn13 - ([happy_var_1] - )} - -happyReduce_20 = happySpecReduce_2 9# happyReduction_20 -happyReduction_20 happy_x_2 - happy_x_1 - = case happyOut13 happy_x_1 of { happy_var_1 -> - case happyOut11 happy_x_2 of { happy_var_2 -> - happyIn13 - (happy_var_2 : happy_var_1 - )}} - -happyReduce_21 = happySpecReduce_1 10# happyReduction_21 -happyReduction_21 happy_x_1 - = case happyOut11 happy_x_1 of { happy_var_1 -> - happyIn14 - ([happy_var_1] - )} - -happyReduce_22 = happySpecReduce_3 10# happyReduction_22 -happyReduction_22 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOut14 happy_x_1 of { happy_var_1 -> - case happyOut11 happy_x_3 of { happy_var_3 -> - happyIn14 - (happy_var_3 : happy_var_1 - )}} - -happyReduce_23 = happySpecReduce_2 11# happyReduction_23 -happyReduction_23 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - happyIn15 - (Just happy_var_2 - )} - -happyReduce_24 = happySpecReduce_0 11# happyReduction_24 -happyReduction_24 = happyIn15 - (Nothing - ) - -happyReduce_25 = happySpecReduce_2 12# happyReduction_25 -happyReduction_25 happy_x_2 - happy_x_1 - = case happyOut16 happy_x_1 of { happy_var_1 -> - case happyOut17 happy_x_2 of { happy_var_2 -> - happyIn16 - (happy_var_2 : happy_var_1 - )}} - -happyReduce_26 = happySpecReduce_1 12# happyReduction_26 -happyReduction_26 happy_x_1 - = case happyOut17 happy_x_1 of { happy_var_1 -> - happyIn16 - ([happy_var_1] - )} - -happyReduce_27 = happySpecReduce_2 13# happyReduction_27 -happyReduction_27 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn17 - (TokenType happy_var_2 - )} - -happyReduce_28 = happySpecReduce_2 13# happyReduction_28 -happyReduction_28 happy_x_2 - happy_x_1 - = case happyOut19 happy_x_2 of { happy_var_2 -> - happyIn17 - (TokenSpec happy_var_2 - )} - -happyReduce_29 = happySpecReduce_3 13# happyReduction_29 -happyReduction_29 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - case happyOut18 happy_x_3 of { happy_var_3 -> - happyIn17 - (TokenName happy_var_2 happy_var_3 False - )}} - -happyReduce_30 = happySpecReduce_3 13# happyReduction_30 -happyReduction_30 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - case happyOut18 happy_x_3 of { happy_var_3 -> - happyIn17 - (TokenName happy_var_2 happy_var_3 True - )}} - -happyReduce_31 = happySpecReduce_1 13# happyReduction_31 -happyReduction_31 happy_x_1 - = happyIn17 - (TokenImportedIdentity - ) - -happyReduce_32 = happySpecReduce_3 13# happyReduction_32 -happyReduction_32 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn17 - (TokenLexer happy_var_2 happy_var_3 - )}} - -happyReduce_33 = happySpecReduce_2 13# happyReduction_33 -happyReduction_33 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn17 - (TokenMonad "()" happy_var_2 ">>=" "return" - )} - -happyReduce_34 = happySpecReduce_3 13# happyReduction_34 -happyReduction_34 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn17 - (TokenMonad happy_var_2 happy_var_3 ">>=" "return" - )}} - -happyReduce_35 = happyReduce 4# 13# happyReduction_35 -happyReduction_35 (happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - happyIn17 - (TokenMonad "()" happy_var_2 happy_var_3 happy_var_4 - ) `HappyStk` happyRest}}} - -happyReduce_36 = happyReduce 5# 13# happyReduction_36 -happyReduction_36 (happy_x_5 `HappyStk` - happy_x_4 `HappyStk` - happy_x_3 `HappyStk` - happy_x_2 `HappyStk` - happy_x_1 `HappyStk` - happyRest) - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> - case happyOutTok happy_x_5 of { (TokenInfo happy_var_5 TokCodeQuote) -> - happyIn17 - (TokenMonad happy_var_2 happy_var_3 happy_var_4 happy_var_5 - ) `HappyStk` happyRest}}}} - -happyReduce_37 = happySpecReduce_2 13# happyReduction_37 -happyReduction_37 happy_x_2 - happy_x_1 - = case happyOut21 happy_x_2 of { happy_var_2 -> - happyIn17 - (TokenNonassoc happy_var_2 - )} - -happyReduce_38 = happySpecReduce_2 13# happyReduction_38 -happyReduction_38 happy_x_2 - happy_x_1 - = case happyOut21 happy_x_2 of { happy_var_2 -> - happyIn17 - (TokenRight happy_var_2 - )} - -happyReduce_39 = happySpecReduce_2 13# happyReduction_39 -happyReduction_39 happy_x_2 - happy_x_1 - = case happyOut21 happy_x_2 of { happy_var_2 -> - happyIn17 - (TokenLeft happy_var_2 - )} - -happyReduce_40 = happySpecReduce_2 13# happyReduction_40 -happyReduction_40 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenNum happy_var_2 TokNum) -> - happyIn17 - (TokenExpect happy_var_2 - )} - -happyReduce_41 = happySpecReduce_2 13# happyReduction_41 -happyReduction_41 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn17 - (TokenError happy_var_2 - )} - -happyReduce_42 = happySpecReduce_2 13# happyReduction_42 -happyReduction_42 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn17 - (TokenAttributetype happy_var_2 - )} - -happyReduce_43 = happySpecReduce_3 13# happyReduction_43 -happyReduction_43 happy_x_3 - happy_x_2 - happy_x_1 - = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> - case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> - happyIn17 - (TokenAttribute happy_var_2 happy_var_3 - )}} - -happyReduce_44 = happySpecReduce_1 14# happyReduction_44 -happyReduction_44 happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - happyIn18 - (Just happy_var_1 - )} - -happyReduce_45 = happySpecReduce_0 14# happyReduction_45 -happyReduction_45 = happyIn18 - (Nothing - ) - -happyReduce_46 = happySpecReduce_2 15# happyReduction_46 -happyReduction_46 happy_x_2 - happy_x_1 - = case happyOut20 happy_x_1 of { happy_var_1 -> - case happyOut19 happy_x_2 of { happy_var_2 -> - happyIn19 - (happy_var_1:happy_var_2 - )}} - -happyReduce_47 = happySpecReduce_1 15# happyReduction_47 -happyReduction_47 happy_x_1 - = case happyOut20 happy_x_1 of { happy_var_1 -> - happyIn19 - ([happy_var_1] - )} - -happyReduce_48 = happySpecReduce_2 16# happyReduction_48 -happyReduction_48 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> - happyIn20 - ((happy_var_1,happy_var_2) - )}} - -happyReduce_49 = happySpecReduce_2 17# happyReduction_49 -happyReduction_49 happy_x_2 - happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> - case happyOut21 happy_x_2 of { happy_var_2 -> - happyIn21 - (happy_var_1 : happy_var_2 - )}} - -happyReduce_50 = happySpecReduce_0 17# happyReduction_50 -happyReduction_50 = happyIn21 - ([] - ) - -happyReduce_51 = happySpecReduce_1 18# happyReduction_51 -happyReduction_51 happy_x_1 - = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokCodeQuote) -> - happyIn22 - (Just happy_var_1 - )} - -happyReduce_52 = happySpecReduce_0 18# happyReduction_52 -happyReduction_52 = happyIn22 - (Nothing - ) - -happyNewToken action sts stk - = lexer(\tk -> - let cont i = happyDoAction i tk action sts stk in - case tk of { - TokenEOF -> happyDoAction 27# tk action sts stk; - TokenInfo happy_dollar_dollar TokId -> cont 1#; - TokenKW TokSpecId_TokenType -> cont 2#; - TokenKW TokSpecId_Token -> cont 3#; - TokenKW TokSpecId_Name -> cont 4#; - TokenKW TokSpecId_Partial -> cont 5#; - TokenKW TokSpecId_Lexer -> cont 6#; - TokenKW TokSpecId_ImportedIdentity -> cont 7#; - TokenKW TokSpecId_Monad -> cont 8#; - TokenKW TokSpecId_Nonassoc -> cont 9#; - TokenKW TokSpecId_Left -> cont 10#; - TokenKW TokSpecId_Right -> cont 11#; - TokenKW TokSpecId_Prec -> cont 12#; - TokenKW TokSpecId_Expect -> cont 13#; - TokenKW TokSpecId_Error -> cont 14#; - TokenKW TokSpecId_Attribute -> cont 15#; - TokenKW TokSpecId_Attributetype -> cont 16#; - TokenInfo happy_dollar_dollar TokCodeQuote -> cont 17#; - TokenNum happy_dollar_dollar TokNum -> cont 18#; - TokenKW TokColon -> cont 19#; - TokenKW TokSemiColon -> cont 20#; - TokenKW TokDoubleColon -> cont 21#; - TokenKW TokDoublePercent -> cont 22#; - TokenKW TokBar -> cont 23#; - TokenKW TokParenL -> cont 24#; - TokenKW TokParenR -> cont 25#; - TokenKW TokComma -> cont 26#; - _ -> happyError' tk - }) - -happyError_ 27# tk = happyError' tk -happyError_ _ tk = happyError' tk - -happyThen :: () => P a -> (a -> P b) -> P b -happyThen = (>>=) -happyReturn :: () => a -> P a -happyReturn = (return) -happyThen1 = happyThen -happyReturn1 :: () => a -> P a -happyReturn1 = happyReturn -happyError' :: () => (Token) -> P a -happyError' tk = (\token -> happyError) tk - -ourParser = happySomeParser where - happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (happyOut4 x)) - -happySeq = happyDontSeq - - -happyError :: P a -happyError = lineP >>= \l -> fail (show l ++ ": Parse error\n") -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "" #-} -{-# LINE 1 "" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} --- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp - -{-# LINE 13 "templates/GenericTemplate.hs" #-} - - - - - --- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. -#if __GLASGOW_HASKELL__ > 706 -#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Bool) -#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Bool) -#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Bool) -#else -#define LT(n,m) (n Happy_GHC_Exts.<# m) -#define GTE(n,m) (n Happy_GHC_Exts.>=# m) -#define EQ(n,m) (n Happy_GHC_Exts.==# m) -#endif -{-# LINE 46 "templates/GenericTemplate.hs" #-} - - -data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList - - - - - -{-# LINE 67 "templates/GenericTemplate.hs" #-} - -{-# LINE 77 "templates/GenericTemplate.hs" #-} - -{-# LINE 86 "templates/GenericTemplate.hs" #-} - -infixr 9 `HappyStk` -data HappyStk a = HappyStk a (HappyStk a) - ------------------------------------------------------------------------------ --- starting the parse - -happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll - ------------------------------------------------------------------------------ --- Accepting the parse - --- If the current token is 0#, it means we've just accepted a partial --- parse (a %partial parser). We must ignore the saved token on the top of --- the stack in this case. -happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = - happyReturn1 ans -happyAccept j tk st sts (HappyStk ans _) = - (happyTcHack j (happyTcHack st)) (happyReturn1 ans) - ------------------------------------------------------------------------------ --- Arrays only: do the next action - - - -happyDoAction i tk st - = {- nothing -} - - - case action of - 0# -> {- nothing -} - happyFail i tk st - -1# -> {- nothing -} - happyAccept i tk st - n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} - - (happyReduceArr Happy_Data_Array.! rule) i tk st - where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) - n -> {- nothing -} - - - happyShift new_state i tk st - where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) - where off = indexShortOffAddr happyActOffsets st - off_i = (off Happy_GHC_Exts.+# i) - check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) - then EQ(indexShortOffAddr happyCheck off_i, i) - else False - action - | check = indexShortOffAddr happyTable off_i - | otherwise = indexShortOffAddr happyDefActions st - - -indexShortOffAddr (HappyA# arr) off = - Happy_GHC_Exts.narrow16Int# i - where - i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) - high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) - low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) - off' = off Happy_GHC_Exts.*# 2# - - - - - -data HappyAddr = HappyA# Happy_GHC_Exts.Addr# - - - - ------------------------------------------------------------------------------ --- HappyState data type (not arrays) - -{-# LINE 170 "templates/GenericTemplate.hs" #-} - ------------------------------------------------------------------------------ --- Shifting a token - -happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "shifting the error token" $ - happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) - -happyShift new_state i tk st sts stk = - happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) - --- happyReduce is specialised for the common cases. - -happySpecReduce_0 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_0 nt fn j tk st@((action)) sts stk - = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) - -happySpecReduce_1 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') - = let r = fn v1 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_2 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') - = let r = fn v1 v2 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_3 i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') - = let r = fn v1 v2 v3 in - happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) - -happyReduce k i fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyReduce k nt fn j tk st sts stk - = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of - sts1@((HappyCons (st1@(action)) (_))) -> - let r = fn stk in -- it doesn't hurt to always seq here... - happyDoSeq r (happyGoto nt j tk st1 sts1 r) - -happyMonadReduce k nt fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyMonadReduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk in - happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) - -happyMonad2Reduce k nt fn 0# tk st sts stk - = happyFail 0# tk st sts stk -happyMonad2Reduce k nt fn j tk st sts stk = - case happyDrop k (HappyCons (st) (sts)) of - sts1@((HappyCons (st1@(action)) (_))) -> - let drop_stk = happyDropStk k stk - - off = indexShortOffAddr happyGotoOffsets st1 - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - in - happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) - -happyDrop 0# l = l -happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t - -happyDropStk 0# l = l -happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs - ------------------------------------------------------------------------------ --- Moving to a new state after a reduction - - -happyGoto nt j tk st = - {- nothing -} - happyDoAction j tk new_state - where off = indexShortOffAddr happyGotoOffsets st - off_i = (off Happy_GHC_Exts.+# nt) - new_state = indexShortOffAddr happyTable off_i - - - - ------------------------------------------------------------------------------ --- Error recovery (0# is the error token) - --- parse error if we are in recovery and we fail again -happyFail 0# tk old_st _ stk@(x `HappyStk` _) = - let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in --- trace "failing" $ - happyError_ i tk - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail 0# tk old_st (HappyCons ((action)) (sts)) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk)) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail i tk (action) sts stk = --- trace "entering error recovery" $ - happyDoAction 0# tk action sts ( (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) - --- Internal happy errors: - -notHappyAtAll :: a -notHappyAtAll = error "Internal Happy error\n" - ------------------------------------------------------------------------------ --- Hack to get the typechecker to accept our action functions - - -happyTcHack :: Happy_GHC_Exts.Int# -> a -> a -happyTcHack x y = y -{-# INLINE happyTcHack #-} - - ------------------------------------------------------------------------------ --- Seq-ing. If the --strict flag is given, then Happy emits --- happySeq = happyDoSeq --- otherwise it emits --- happySeq = happyDontSeq - -happyDoSeq, happyDontSeq :: a -> b -> b -happyDoSeq a b = a `seq` b -happyDontSeq a b = b - ------------------------------------------------------------------------------ --- Don't inline any functions from the template. GHC has a nasty habit --- of deciding to inline happyGoto everywhere, which increases the size of --- the generated parser quite a bit. - - -{-# NOINLINE happyDoAction #-} -{-# NOINLINE happyTable #-} -{-# NOINLINE happyCheck #-} -{-# NOINLINE happyActOffsets #-} -{-# NOINLINE happyGotoOffsets #-} -{-# NOINLINE happyDefActions #-} - -{-# NOINLINE happyShift #-} -{-# NOINLINE happySpecReduce_0 #-} -{-# NOINLINE happySpecReduce_1 #-} -{-# NOINLINE happySpecReduce_2 #-} -{-# NOINLINE happySpecReduce_3 #-} -{-# NOINLINE happyReduce #-} -{-# NOINLINE happyMonadReduce #-} -{-# NOINLINE happyGoto #-} -{-# NOINLINE happyFail #-} - --- end of Happy Template. diff -Nru happy-1.19.5/doc/configure.ac happy-1.19.8/doc/configure.ac --- happy-1.19.5/doc/configure.ac 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/doc/configure.ac 2017-10-12 07:46:11.000000000 +0000 @@ -5,7 +5,7 @@ dnl ** check for DocBook toolchain FP_CHECK_DOCBOOK_DTD -FP_DIR_DOCBOOK_XSL([/usr/share/xml/docbook/stylesheet/nwalsh/current /usr/share/xml/docbook/stylesheet/nwalsh /usr/share/sgml/docbook/docbook-xsl-stylesheets* /usr/share/sgml/docbook/xsl-stylesheets* /opt/kde?/share/apps/ksgmltools2/docbook/xsl /usr/share/docbook-xsl /usr/share/sgml/docbkxsl /usr/local/share/xsl/docbook /sw/share/xml/xsl/docbook-xsl]) +FP_DIR_DOCBOOK_XSL([/usr/share/xml/docbook/stylesheet/nwalsh/current /usr/share/xml/docbook/stylesheet/nwalsh /usr/share/sgml/docbook/docbook-xsl-stylesheets* /usr/share/sgml/docbook/xsl-stylesheets* /opt/kde?/share/apps/ksgmltools2/docbook/xsl /usr/share/docbook-xsl /usr/share/sgml/docbkxsl /usr/local/share/xsl/docbook /sw/share/xml/xsl/docbook-xsl /usr/share/xml/docbook/xsl-stylesheets*]) AC_PATH_PROG(DbLatexCmd,dblatex) diff -Nru happy-1.19.5/doc/happy.1.in happy-1.19.8/doc/happy.1.in --- happy-1.19.5/doc/happy.1.in 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/doc/happy.1.in 2017-10-12 07:46:11.000000000 +0000 @@ -85,7 +85,7 @@ resulting parser, but remember that parsers generated this way can only be compiled by GHC\ 3.02 and above. -This option may only be used in conjuction with +This option may only be used in conjunction with .BR \-g . .TP @@ -95,7 +95,7 @@ at run-time, including all the shifts, reductions, state transitions and token inputs performed by the parser. -This option may only be used in conjuction with +This option may only be used in conjunction with .BR \-a . .TP diff -Nru happy-1.19.5/doc/happy.xml happy-1.19.8/doc/happy.xml --- happy-1.19.5/doc/happy.xml 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/doc/happy.xml 2017-10-12 07:46:11.000000000 +0000 @@ -80,22 +80,24 @@ - `standard' Haskell 98 (should work with any compiler + standard Haskell 98 (should work with any compiler that compiles Haskell 98). + standard Haskell using arrays arrays back-endsarrays - standard Haskell using arrays (this is not the default + (this is not the default because we have found this generates slower parsers than ). + Haskell with GHC GHC back-endsGHC - Haskell with GHC (Glasgow Haskell) extensions. This is a + (Glasgow Haskell) extensions. This is a slightly faster option than for Glasgow Haskell users. @@ -124,7 +126,7 @@ Remember: parsers produced using Happy should compile without - difficulty under any Haskell 98 compiler or interpreterWith one + difficulty under any Haskell 98 compiler or interpreter.With one exception: if you have a production with a polymorphic type signature, then a compiler that supports local universal quantification is required. See . @@ -239,7 +241,7 @@ Use this module as part of your Haskell program, usually in conjunction with a lexical analyser (a function that splits - the input into ``tokens'', the basic unit of parsing). + the input into tokens, the basic unit of parsing). @@ -249,7 +251,7 @@ /, and the form let var = exp in exp. The grammar file starts off like this: - + { module Main where } @@ -378,7 +380,7 @@ usual. The way to think about a parser is with each symbol having a - `value': we defined the values of the tokens above, and the + value: we defined the values of the tokens above, and the grammar defines the values of non-terminal symbols in terms of sequences of other symbols (either tokens or non-terminals). In a production like this: @@ -387,7 +389,7 @@ n : t_1 ... t_n { E } - whenever the parser finds the symbols t_1..t_n in + whenever the parser finds the symbols t_1...t_n in the token stream, it constructs the symbol n and gives it the value E, which may refer to the values of t_1...t_n using the symbols @@ -675,7 +677,7 @@ statements, to be a bit more liberal in what we allow as legal syntax. We probably just want the parser to ignore these extra semicolons, and not generate a ``null statement'' value - or something. The following rule parses a sequence or zero or + or something. The following rule parses a sequence of zero or more statements separated by semicolons, in which the statements may be empty: @@ -794,7 +796,7 @@ %right and %nonassoc, assign precedence levels to the tokens in the declaration. A rule in the grammar may also have a precedence: if the last - terminal in the left hand side of the rule has a precedence, + terminal in the right hand side of the rule has a precedence, then this is the precedence of the whole rule. The precedences are used to resolve ambiguities in the @@ -958,6 +960,7 @@ + Handling parse errors parse errors handling @@ -968,15 +971,16 @@ parse errors --> - Handling parse errors by using an exception monad + by using an exception monad (see ). + Keeping track of line numbers line numbers - Keeping track of line numbers in the input file, for + in the input file, for example for use in error messages (see ). @@ -1480,7 +1484,7 @@ lexer :: (Token -> T a) -> T a where the type constructor T is whatever you want (usually T -a = String -> a. I'm not sure if this is useful, or even if it works +a = String -> a). I'm not sure if this is useful, or even if it works properly. @@ -1643,10 +1647,10 @@ the first alternative above. - - E -> E + E - E -> i -- any integer - + +E -> E + E +E -> i -- any integer + GLR parsing will accept this grammar without complaint, and produce @@ -1660,23 +1664,23 @@ Below is the simplified output of the GLR parser for this example. - - Root (0,7,G_E) - (0,1,G_E) => [[(0,1,Tok '1'))]] - (0,3,G_E) => [[(0,1,G_E),(1,2,Tok '+'),(2,3,G_E)]] - (0,5,G_E) => [[(0,1,G_E),(1,2,Tok '+'),(2,5,G_E)] - ,[(0,3,G_E),(3,4,Tok '+'),(4,5,G_E)]] - (0,7,G_E) => [[(0,3,G_E),(3,4,Tok '+'),(4,7,G_E)] - ,[(0,1,G_E),(1,2,Tok '+'),(2,7,G_E)] - ,[(0,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] - (2,3,G_E) => [[(2,3,Tok '2'))]}] - (2,5,G_E) => [[(2,3,G_E),(3,4,Tok '+'),(4,5,G_E)]}] - (2,7,G_E) => [[(2,3,G_E),(3,4,Tok '+'),(4,7,G_E)]} - ,[(2,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] - (4,5,G_E) => [[(4,5,Tok '3'))]}] - (4,7,G_E) => [[(4,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] - (6,7,G_E) => [[(6,7,Tok '4'))]}] - + +Root (0,7,G_E) +(0,1,G_E) => [[(0,1,Tok '1'))]] +(0,3,G_E) => [[(0,1,G_E),(1,2,Tok '+'),(2,3,G_E)]] +(0,5,G_E) => [[(0,1,G_E),(1,2,Tok '+'),(2,5,G_E)] + ,[(0,3,G_E),(3,4,Tok '+'),(4,5,G_E)]] +(0,7,G_E) => [[(0,3,G_E),(3,4,Tok '+'),(4,7,G_E)] + ,[(0,1,G_E),(1,2,Tok '+'),(2,7,G_E)] + ,[(0,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] +(2,3,G_E) => [[(2,3,Tok '2'))]}] +(2,5,G_E) => [[(2,3,G_E),(3,4,Tok '+'),(4,5,G_E)]}] +(2,7,G_E) => [[(2,3,G_E),(3,4,Tok '+'),(4,7,G_E)]} + ,[(2,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] +(4,5,G_E) => [[(4,5,Tok '3'))]}] +(4,7,G_E) => [[(4,5,G_E),(5,6,Tok '+'),(6,7,G_E)]}] +(6,7,G_E) => [[(6,7,Tok '4'))]}] + This is a directed, acyclic and-or graph. @@ -1798,15 +1802,26 @@ too slow. - Given a file Foo.y, file - FooData.hs is generated with basic type - information, the parser tables, and the header and tail code - that was included in the parser specification. - Note that Happy generates the - module declaration line, so you should NOT give it in the - grammar file. - The driver is placed in file Foo.hs, and - does not contain any user-supplied text. + Given a file Foo.y, the file + FooData.hs, containing the data + module, is generated with basic type information, the + parser tables, and the header and tail code that was + included in the parser specification. Note that + Happy can automatically + generate the necessary module declaration statements, + if you do not choose to provide one in the grammar + file. But, if you do choose to provide the module + declaration statement, then the name of the module will + be parsed and used as the name of the driver + module. The parsed name will also be used to form the + name of the data module, but with the string + Data appended to it. The driver + module, which is to be found in the file + Foo.hs, will not contain any other + user-supplied text besides the module name. Do not + bother to supply any export declarations in your module + declaration statement: they will be ignored and + dropped, in favor of the standard export declaration. @@ -1964,20 +1979,20 @@ returned, since it may contain useful information. Unconsumed tokens are returned when there is a global parse error. - - type ForestId = (Int,Int,GSymbol) - data GSymbol = <... automatically generated ...> - type Forest = FiniteMap ForestId [Branch] - type RootNode = ForestId - type Tokens = [[(Int, GSymbol)]] - data Branch = Branch {b_sem :: GSem, b_nodes :: [ForestId]} - data GSem = <... automatically generated ...> - - data GLRResult - = ParseOK RootNode Forest -- forest with root - | ParseError Tokens Forest -- partial forest with bad input - | ParseEOF Forest -- partial forest (missing input) - + +type ForestId = (Int,Int,GSymbol) +data GSymbol = <... automatically generated ...> +type Forest = FiniteMap ForestId [Branch] +type RootNode = ForestId +type Tokens = [[(Int, GSymbol)]] +data Branch = Branch {b_sem :: GSem, b_nodes :: [ForestId]} +data GSem = <... automatically generated ...> + +data GLRResult + = ParseOK RootNode Forest -- forest with root + | ParseError Tokens Forest -- partial forest with bad input + | ParseEOF Forest -- partial forest (missing input) + Conceptually, the parse forest is a directed, acyclic and-or graph. It is represented by a mapping of ForestIds @@ -2069,12 +2084,12 @@ Note that the type signature is required, else the types in use can't be determined by the parser generator. - - E :: {Int} -- type signature needed - : E '+' E { $1 + $3 } - | E '*' E { $1 * $3 } - | i { $1 } - + +E :: {Int} -- type signature needed + : E '+' E { $1 + $3 } + | E '*' E { $1 * $3 } + | i { $1 } + This mode works by converting each of the semantic rules into functions (abstracted over the dollar variables mentioned), @@ -2105,11 +2120,11 @@ See the full expr-eval example for more information. - - class TreeDecode a where - decode_b :: (ForestId -> [Branch]) -> Branch -> [Decode_Result a] - decode :: TreeDecode a => (ForestId -> [Branch]) -> ForestId -> [Decode_Result a] - + +class TreeDecode a where + decode_b :: (ForestId -> [Branch]) -> Branch -> [Decode_Result a] +decode :: TreeDecode a => (ForestId -> [Branch]) -> ForestId -> [Decode_Result a] + The GLR parser generator identifies the types involved in each @@ -2153,12 +2168,12 @@ The following grammar is from the expr-tree example. - - E :: {Tree ForestId Int} - : E '+' E { Plus $1 $3 } - | E '*' E { Times $1 $3 } - | i { Const $1 } - + +E :: {Tree ForestId Int} + : E '+' E { Plus $1 $3 } + | E '*' E { Times $1 $3 } + | i { Const $1 } + Here, the semantic values provide more meaningful labels than @@ -2198,10 +2213,10 @@ problems with resolution of class instances. - - class LabelDecode a where - unpack :: GSem -> a - + +class LabelDecode a where + unpack :: GSem -> a + Internally, the semantic values are packed in a union type as @@ -2437,10 +2452,10 @@ items are required, to match the number of i tokens in the input. - - S -> A Q i | + - A -> - + +S -> A Q i | + +A -> + The solution to this is not surprising. Problematic recursions are detected as zero-span reductions in a state which has a @@ -2752,7 +2767,7 @@ grammars. - On practical way to overcome this limitation is to ensure that each attribute + One practical way to overcome this limitation is to ensure that each attribute is always used in either a top-down (inherited) fashion or in a bottom-up (synthesized) fashion. If the calculations are sufficiently lazy, one can "tie the knot" by synthesizing a value in one attribute, and then assigning @@ -2953,6 +2968,25 @@ + file + =file + + + pretty print + + Directs Happy to produce a file + containing a pretty-printed form of the grammar, containing only + the productions, withouth any semantic actions or type signatures. + If no file name is provided, then the file name will be computed + by replacing the extension of the input file with + .grammar. + + + + + + + dir =dir @@ -3029,7 +3063,7 @@ generate smaller faster parsers. Type-safety isn't compromised. - This option may only be used in conjuction with + This option may only be used in conjunction with . @@ -3076,6 +3110,43 @@ + + + + + glr + + + back-ends + glr + + Generate a GLR parser for ambiguous grammars. + + + + + + + + + decode + + Generate simple decoding code for GLR result. + + + + + + + + + filter + + Filter the GLR parse forest with respect to semantic usage. + + + + @@ -3143,16 +3214,16 @@ the BNF syntax from the Haskell Report): - id ::= alpha { idchar } - | ' { any{^'} | \' } ' - | " { any{^"} | \" } " - - alpha ::= A | B | ... | Z - | a | b | ... | z - - idchar ::= alpha - | 0 | 1 | ... | 9 - | _ +id ::= alpha { idchar } + | ' { any{^'} | \' } ' + | " { any{^"} | \" } " + +alpha ::= A | B | ... | Z + | a | b | ... | z + +idchar ::= alpha + | 0 | 1 | ... | 9 + | _ @@ -3175,7 +3246,7 @@ The Haskell module header contains the module name, exports, and imports. No other code is allowed in the - header---this is because Happy may need to include + header—this is because Happy may need to include its own import statements directly after the user defined header. @@ -3439,9 +3510,31 @@ Specifies the function to be called in the event of a - parse error. The type of <f> varies + parse error. The type of <identifier> varies depending on the presence of %lexer (see - ). + ) and %errorhandlertype + (see the following). + + + + Additional error information + + +%errorhandlertype (explist | default) + + + + %errorhandlertype + + + (optional) The expected type of the user-supplied error handling can be + applied with additional information. By default, no information is added, for + compatibility with previous versions. However, if explist + is provided with this directive, then the first application will be of + type [String], providing a description of possible tokens + that would not have failed the parser in place of the token that has caused + the error. + @@ -3546,12 +3639,14 @@ Parameterized Productions + Starting from version 1.17.1, Happy supports parameterized productions which provide a convenient notation for capturing recurring patterns in context free grammars. This gives the benefits of something similar to parsing combinators in the context of Happy grammars. + This functionality is best illustrated with an example: opt(p) : p { Just $1 } @@ -3590,7 +3685,7 @@ The second one, list is like p*. - Parameterized productions are implemented as a prepossessing + Parameterized productions are implemented as a preprocessing pass in Happy: each instantiation of a production turns into a separate non-terminal, but are careful to avoid generating the same rule multiple times, as this would lead to an ambiguous grammar. @@ -3664,7 +3759,216 @@ info files - (section under construction) + + Happy info files, generated using the -i flag, + are your most important tool for debugging errors in your grammar. + Although they can be quite verbose, the general concept behind + them is quite simple. + + + + An info file contains the following information: + + + + + A summary of all shift/reduce and reduce/reduce + conflicts in the grammar. + + + Under section Grammar, a summary of all the rules in the grammar. These rules correspond directly to your input file, absent the actual Haskell code that is to be run for each rules. A rule is written in the form <non-terminal> -> <id> ... + + + Under section Terminals, a summary of all the terminal tokens you may run against, as well as a the Haskell pattern which matches against them. This corresponds directly to the contents of your %token directive (). + + + Under section Non-terminals, a summary of which rules apply to which productions. This is generally redundant with the Grammar section. + + + The primary section States, which describes the state-machine Happy built for your grammar, and all of the transitions for each state. + + + Finally, some statistics Grammar Totals at the end of the file. + + + In general, you will be most interested in the States section, as it will give you information, in particular, about any conflicts your grammar may have. + + + States + Although Happy does its best to insulate you from the + vagaries of parser generation, it's important to know a little + about how shift-reduce parsers work in order to be able to + interpret the entries in the States + section. + + In general, a shift-reduce parser operates by maintaining + parse stack, which tokens and productions are shifted onto or + reduced off of. The parser maintains a state machine, which + accepts a token, performs some shift or reduce, and transitions + to a new state for the next token. Importantly, these states + represent multiple possible productions, + because in general the parser does not know what the actual + production for the tokens it's parsing is going to be. + There's no direct correspondence between the state-machine + and the input grammar; this is something you have to + reverse engineer. + + With this knowledge in mind, we can look at two example states + from the example grammar from : + + + +State 5 + + Exp1 -> Term . (rule 5) + Term -> Term . '*' Factor (rule 6) + Term -> Term . '/' Factor (rule 7) + + in reduce using rule 5 + '+' reduce using rule 5 + '-' reduce using rule 5 + '*' shift, and enter state 11 + '/' shift, and enter state 12 + ')' reduce using rule 5 + %eof reduce using rule 5 + +State 9 + + Factor -> '(' . Exp ')' (rule 11) + + let shift, and enter state 2 + int shift, and enter state 7 + var shift, and enter state 8 + '(' shift, and enter state 9 + + Exp goto state 10 + Exp1 goto state 4 + Term goto state 5 + Factor goto state 6 + + + For each state, the first set of lines describes the + rules which correspond to this state. A + period . is inserted in the production to + indicate where, if this is indeed the correct production, we + would have parsed up to. In state 5, there are multiple rules, + so we don't know if we are parsing an Exp1, a + multiplication or a division (however, we do know there is a + Term on the parse stack); in state 9, there + is only one rule, so we know we are definitely parsing a + Factor. + + The next set of lines specifies the action and state + transition that should occur given a token. For example, if in + state 5 we process the '*' token, this token + is shifted onto the parse stack and we transition to the state + corresponding to the rule Term -> Term '*' . + Factor (matching the token disambiguated which state + we are in.) + + Finally, for states which shift on non-terminals, + there will be a last set of lines saying what should be done + after the non-terminal has been fully parsed; this information + is effectively the stack for the parser. When a reduce occurs, + these goto entries are used to determine what the next + state should be. + + + + + + + Interpreting conflicts + + When you have a conflict, you will see an entry like this + in your info file: + + +State 432 + + atype -> SIMPLEQUOTE '[' . comma_types0 ']' (rule 318) + sysdcon -> '[' . ']' (rule 613) + + '_' shift, and enter state 60 + 'as' shift, and enter state 16 + +... + + ']' shift, and enter state 381 + (reduce using rule 328) + +... + + + On large, complex grammars, determining what the conflict is + can be a bit of an art, since the state with the conflict may + not have enough information to determine why a conflict is + occurring). + + In some cases, the rules associated with the state with + the conflict will immediately give you enough guidance to + determine what the ambiguous syntax is. + For example, in the miniature shift/reduce conflict + described in , + the conflict looks like this: + + +State 13 + + exp -> exp . '+' exp0 (rule 1) + exp0 -> if exp then exp else exp . (rule 3) + + then reduce using rule 3 + else reduce using rule 3 + '+' shift, and enter state 7 + (reduce using rule 3) + + %eof reduce using rule 3 + + +Here, rule 3 makes it easy to imagine that we had been parsing a + statement like if 1 then 2 else 3 + 4; the conflict + arises from whether or not we should shift (thus parsing as + if 1 then 2 else (3 + 4)) or reduce (thus parsing + as (if 1 then 2 else 3) + 4). + +Sometimes, there's not as much helpful context in the error message; +take this abridged example from GHC's parser: + + +State 49 + + type -> btype . (rule 281) + type -> btype . '->' ctype (rule 284) + + '->' shift, and enter state 472 + (reduce using rule 281) + + +A pair of rules like this doesn't always result in a shift/reduce + conflict: to reduce with rule 281 implies that, in some context when + parsing the non-terminal type, it is possible for + an '->' to occur immediately afterwards (indeed + these source rules are factored such that there is no rule of the form + ... -> type '->' ...). + +The best way this author knows how to sleuth this out is to + look for instances of the token and check if any of the preceeding + non-terminals could terminate in a type: + + + texp -> exp '->' texp (500) + exp -> infixexp '::' sigtype (414) + sigtype -> ctype (260) + ctype -> type (274) + + +As it turns out, this shift/reduce conflict results from + ambiguity for view patterns, as in + the code sample case v of { x :: T -> T ... }. + + @@ -3682,16 +3986,17 @@ - + If you are using GHC + GHC - If you are using GHC, generate parsers using the + , generate parsers using the -a -g -c options, and compile them using GHC with the -fglasgow-exts option. This is worth a lot, in terms of compile-time, - execution speed and binary size omitting the + execution speed and binary size.omitting the -a may generate slightly faster parsers, - but they will be much bigger.. + but they will be much bigger. @@ -3710,10 +4015,10 @@ - + Use left recursion rather than right recursion + recursion, left vs. right - Use left recursion rather than right recursion wherever possible. While not strictly a performance issue, this affects the size of the parser stack, which is kept on the heap and thus needs to be garbage collected. @@ -3741,11 +4046,12 @@ + Give type signatures type signatures in grammar - Give type signatures for everything (see . This is reported to improve things by about 50%. If there is a type signature for every single non-terminal in the grammar, then Happy @@ -3917,7 +4223,7 @@ LALR(1) (look-ahead LR(1)), the method used by Happy and - yacc, is tradeoff between the two. + yacc, is a tradeoff between the two. An LALR(1) parser has the same number of states as an SLR(1) parser, but it uses a more complex method to calculate the lookahead tokens that are valid at each point, and resolves diff -Nru happy-1.19.5/examples/Calc.ly happy-1.19.8/examples/Calc.ly --- happy-1.19.5/examples/Calc.ly 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/Calc.ly 2017-10-12 07:46:11.000000000 +0000 @@ -56,7 +56,7 @@ > { -All parsers must declair this function, +All parsers must declare this function, which is called when an error is detected. Note that currently we do no error recovery. diff -Nru happy-1.19.5/examples/glr/bio-eg/Bio.y happy-1.19.8/examples/glr/bio-eg/Bio.y --- happy-1.19.5/examples/glr/bio-eg/Bio.y 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/bio-eg/Bio.y 2017-10-12 07:46:11.000000000 +0000 @@ -1,409 +1,409 @@ -{ - --- (c) 2004 University of Durham, Julia Fischer --- Portions of the grammar are derived from work by Leung/Mellish/Robertson - -import Data.Char -} - -%tokentype { Token } - -%token - a { Base_A } - c { Base_C } - g { Base_G } - t { Base_T } - -%lexer { lexer } { TokenEOF } - -%% - -M - : Intergenic_noise Match Intergenic_noise {} - -- replace NSkip by Intergenic_noise? - -Intergenic_noise - : {} - | Intergenic_noise N {} -- Left-assoc, less stack? - -Match - : Promoter Translation {} - -Promoter :: {Int} - : Promoter_consensus {1} - | Promoter_hcv_large {2} - | Promoter_cart {3} - | Promoter_hcv_small {4} - - --------------------- --- HCV SMALL --------------------- --- regions [data from Leung (hvc_small.gr)] -Promoter_hcv_small - : N V N7_skip K B K N20_skip R N12_skip {} --mod 3 = 0 - | K N B N N D N18_skip H N9_skip V N {} --mod 3 = 0 - | t N20_skip N6_skip t N4_skip t N6_skip {} --mod 3 = 0 - - - --------------------- --- CONSENSUS --------------------- --- regions [data from Leung (consensus.gr)] - -Promoter_consensus - : Minus_35 N15_skip Minus_10 {} - | Minus_35 N15_skip N1_skip Minus_10 N5_skip {} - | Minus_35 N15_skip N2_skip Minus_10 N5_skip {} - | Minus_35 N15_skip N3_skip Minus_10 N5_skip {} - | Minus_35 N15_skip N4_skip Minus_10 N5_skip {} - -Minus_35 - : t t g a c a {} - -Minus_10 - : t a t a a t {} - --------------------- --- HVC LARGE --------------------- --- regions [data from Leung (hvc_large.gr)] - -Promoter_hcv_large - : H N11_skip D Y B N3_skip H N12_skip B N5_skip Y N2_skip W N4_skip {} - | N D N3_skip V N1_skip B N12_skip H N2_skip B D N2_skip H N2_skip H B N4_skip W N6_skip H H {} - | N H N B N D N6_skip H N4_skip K B N6_skip D B N3_skip B N4_skip V N4_skip H N2_skip D N7_skip {} - | N N D N12_skip B D N2_skip V N2_skip H D N2_skip D H B N7_skip B D N5_skip H H N6_skip {} - | D N D N12_skip B N5_skip H N13_skip B N H H W N6_skip H Y {} - | N N D N B N D N H N3_skip D N4_skip V N2_skip H N D H N6_skip H N3_skip D N6_skip H N2_skip B N3_skip {} - | D N8_skip H N1_skip H N1_skip D N4_skip H N3_skip V H N11_skip H N2_skip H N5_skip D N1_skip V N1_skip H {} - | H N3_skip B N9_skip H N12_skip H D N4_skip W B N2_skip D D H N1_skip D N5_skip D H {} - | V N7_skip V N2_skip D N2_skip D N6_skip B H N11_skip D D N1_skip H N1_skip H H N1_skip B N2_skip {} - | D N8_skip B D D N2_skip B N6_skip H N4_skip D N5_skip D N1_skip H D N2_skip D N3_skip D D N6_skip {} - | B N13_skip H N1_skip D H V N14_skip B N1_skip V N2_skip D N1_skip D V D N1_skip D N3_skip H {} - | H V N4_skip B N1_skip D N6_skip D N4_skip D N4_skip H H N3_skip B N6_skip B N1_skip D N3_skip D N1_skip D N4_skip {} - | W N3_skip V N9_skip D N11_skip B N1_skip D H N5_skip D H N1_skip D N1_skip H D N6_skip {} - | K N2_skip D N3_skip H N1_skip H N6_skip H N2_skip B N5_skip D D N7_skip V N2_skip D N1_skip H H N7_skip {} - | D N11_skip H D D N2_skip D N6_skip D N3_skip H N6_skip V N1_skip D D N2_skip H B N1_skip B N1_skip {} - | H N3_skip B N1_skip H N6_skip V N1_skip B N2_skip V N2_skip D N7_skip B N8_skip H N3_skip H D N1_skip H N1_skip H N1_skip {} - | B N4_skip B N12_skip H N4_skip V N2_skip H D N2_skip V H N1_skip H N2_skip H N3_skip B N1_skip K N4_skip {} - | W D N7_skip B N1_skip D N2_skip D N2_skip W N1_skip D H N2_skip D N12_skip D N5_skip H {} - | a N2_skip t N4_skip g N18_skip {} - --------------------- --- CART --------------------- --- regions [data from Leung (cart.gr)] - -Promoter_cart - : N N t a N N N N N N N N N N N {} - | N N V a N N N t N N N N N N N {} - | t N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N t B N N N t N N N N N N N t N N N N N N N {} - - --------------------------------------------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------------- - -Translation - : Start Mincodon Stop {} - | Start Mincodon Codon Stop {} - | Start Mincodon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Stop {} - | Start Mincodon Mincodon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Stop {} - | Start Mincodon Mincodon Mincodon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} - | Start Mincodon Mincodon Mincodon Mincodon Mincodon Mincodon Stop {} --252 Basen - - -Mincodon : Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon {} --42 Basen - - - - -N0_skip - : {} -N1_skip - : N {} -- match starts one place on -N2_skip - : N N {} -- match starts two places on -N3_skip - : N N N {} -- missing an entire codon -N4_skip - : N N N N {} -- missing 4 bases -N5_skip - : N N N N N {} -- missing 5 bases -N6_skip - : N N N N N N {} -- missing 6 bases -N7_skip - : N N N N N N N {} -- missing 8 bases -N8_skip - : N N N N N N N N {} -- missing 7 bases -N9_skip - : N N N N N N N N N {} -- missing 9 bases -N10_skip - : N N N N N N N N N N {} -- missing 10 bases -N11_skip - : N10_skip N1_skip {} -- missing 11 bases -N12_skip - : N10_skip N2_skip {} -- missing 12 bases -N13_skip - : N10_skip N3_skip {} -- missing 13 bases -N14_skip - : N10_skip N4_skip {} -- missing 14 bases -N15_skip - : N10_skip N5_skip {} -- missing 15 bases -N16_skip - : N10_skip N6_skip {} -- missing 16 bases -N17_skip - : N10_skip N7_skip {} -- missing 17 bases -N18_skip - : N10_skip N8_skip {} -- missing 18 bases -N19_skip - : N10_skip N9_skip {} -- missing 19 bases -N20_skip - : N10_skip N10_skip {} -- missing 20 bases -N30_skip - : N10_skip N10_skip N10_skip {} -- missing 30 bases -N40_skip - : N10_skip N10_skip N10_skip N10_skip {} -- missing 40 bases -N50_skip - : N10_skip N10_skip N10_skip N10_skip N10_skip {} -- missing 50 bases -N60_skip - : N10_skip N50_skip {} -- missing 40 bases -N70_skip - : N10_skip N10_skip N50_skip {} -- missing 50 bases -N80_skip - : N10_skip N10_skip N10_skip N50_skip {} -- missing 40 bases -N90_skip - : N10_skip N10_skip N10_skip N10_skip N50_skip{} -- missing 50 bases -N100_skip - : N50_skip N50_skip {} - - - --- Definitions of base categories according to the --- International Union of Biochemistry (IUB) --- Standard Nucleotide Codes. [Leung_data] - -N -- any base - : a {} - | c {} - | g {} - | t {} - -Y -- pyrimidin - : c {} - | t {} - -R -- purine - : a {} - | g {} - -S -- strong bonding bases - : g {} - | c {} - -W -- weak bonding bases - : a {} - | t {} - -K -- keto bases - : g {} - | t {} - -AM -- aMino bases - : a {} - | c {} - -B -- not base a - : g {} - | c {} - | t {} - -D -- not base c - : a {} - | g {} - | t {} - -H -- not base g - : a {} - | c {} - | t {} - -V -- not base t - : a {} - | c {} - | g {} - - - -Base - : a {} - | c {} - | g {} - | t {} - --------------------- --- codons - -Start : a t g {} -- start codon - -Stop -- stop codons - : t a a {} - | t a g {} - | t g a {} - -Codon -- any other codon - : a a a {} - | a a c {} - | a a g {} - | a a t {} - | a c a {} - | a c c {} - | a c g {} - | a c t {} - | a g a {} - | a g c {} - | a g g {} - | a g t {} - | a t a {} - | a t c {} - | a t g {} - | a t t {} - | c a a {} - | c a c {} - | c a g {} - | c a t {} - | c c a {} - | c c c {} - | c c g {} - | c c t {} - | c g a {} - | c g c {} - | c g g {} - | c g t {} - | c t a {} - | c t c {} - | c t g {} - | c t t {} - | g a a {} - | g a c {} - | g a g {} - | g a t {} - | g c a {} - | g c c {} - | g c g {} - | g c t {} - | g g a {} - | g g c {} - | g g g {} - | g g t {} - | g t a {} - | g t c {} - | g t g {} - | g t t {} - | t a c {} - | t a t {} - | t c a {} - | t c c {} - | t c g {} - | t c t {} - | t g c {} - | t g g {} - | t g t {} - | t t a {} - | t t c {} - | t t g {} - | t t t {} - --------------------- - - - ---%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% ---some aux code -{ - -data Token - = TokenEOF - | Base_A - | Base_C - | Base_G - | Base_T - deriving (Show,Eq, Ord) - - -lexer :: String -> [Token] -lexer [] = [] -lexer (' ':cs) = lexer cs -lexer ('\n':cs) = lexer cs -lexer ('a':cs) = Base_A : lexer cs -lexer ('c':cs) = Base_C : lexer cs -lexer ('g':cs) = Base_G : lexer cs -lexer ('t':cs) = Base_T : lexer cs - -} +{ + +-- (c) 2004 University of Durham, Julia Fischer +-- Portions of the grammar are derived from work by Leung/Mellish/Robertson + +import Data.Char +} + +%tokentype { Token } + +%token + a { Base_A } + c { Base_C } + g { Base_G } + t { Base_T } + +%lexer { lexer } { TokenEOF } + +%% + +M + : Intergenic_noise Match Intergenic_noise {} + -- replace NSkip by Intergenic_noise? + +Intergenic_noise + : {} + | Intergenic_noise N {} -- Left-assoc, less stack? + +Match + : Promoter Translation {} + +Promoter :: {Int} + : Promoter_consensus {1} + | Promoter_hcv_large {2} + | Promoter_cart {3} + | Promoter_hcv_small {4} + + +-------------------- +-- HCV SMALL +-------------------- +-- regions [data from Leung (hvc_small.gr)] +Promoter_hcv_small + : N V N7_skip K B K N20_skip R N12_skip {} --mod 3 = 0 + | K N B N N D N18_skip H N9_skip V N {} --mod 3 = 0 + | t N20_skip N6_skip t N4_skip t N6_skip {} --mod 3 = 0 + + + +-------------------- +-- CONSENSUS +-------------------- +-- regions [data from Leung (consensus.gr)] + +Promoter_consensus + : Minus_35 N15_skip Minus_10 {} + | Minus_35 N15_skip N1_skip Minus_10 N5_skip {} + | Minus_35 N15_skip N2_skip Minus_10 N5_skip {} + | Minus_35 N15_skip N3_skip Minus_10 N5_skip {} + | Minus_35 N15_skip N4_skip Minus_10 N5_skip {} + +Minus_35 + : t t g a c a {} + +Minus_10 + : t a t a a t {} + +-------------------- +-- HVC LARGE +-------------------- +-- regions [data from Leung (hvc_large.gr)] + +Promoter_hcv_large + : H N11_skip D Y B N3_skip H N12_skip B N5_skip Y N2_skip W N4_skip {} + | N D N3_skip V N1_skip B N12_skip H N2_skip B D N2_skip H N2_skip H B N4_skip W N6_skip H H {} + | N H N B N D N6_skip H N4_skip K B N6_skip D B N3_skip B N4_skip V N4_skip H N2_skip D N7_skip {} + | N N D N12_skip B D N2_skip V N2_skip H D N2_skip D H B N7_skip B D N5_skip H H N6_skip {} + | D N D N12_skip B N5_skip H N13_skip B N H H W N6_skip H Y {} + | N N D N B N D N H N3_skip D N4_skip V N2_skip H N D H N6_skip H N3_skip D N6_skip H N2_skip B N3_skip {} + | D N8_skip H N1_skip H N1_skip D N4_skip H N3_skip V H N11_skip H N2_skip H N5_skip D N1_skip V N1_skip H {} + | H N3_skip B N9_skip H N12_skip H D N4_skip W B N2_skip D D H N1_skip D N5_skip D H {} + | V N7_skip V N2_skip D N2_skip D N6_skip B H N11_skip D D N1_skip H N1_skip H H N1_skip B N2_skip {} + | D N8_skip B D D N2_skip B N6_skip H N4_skip D N5_skip D N1_skip H D N2_skip D N3_skip D D N6_skip {} + | B N13_skip H N1_skip D H V N14_skip B N1_skip V N2_skip D N1_skip D V D N1_skip D N3_skip H {} + | H V N4_skip B N1_skip D N6_skip D N4_skip D N4_skip H H N3_skip B N6_skip B N1_skip D N3_skip D N1_skip D N4_skip {} + | W N3_skip V N9_skip D N11_skip B N1_skip D H N5_skip D H N1_skip D N1_skip H D N6_skip {} + | K N2_skip D N3_skip H N1_skip H N6_skip H N2_skip B N5_skip D D N7_skip V N2_skip D N1_skip H H N7_skip {} + | D N11_skip H D D N2_skip D N6_skip D N3_skip H N6_skip V N1_skip D D N2_skip H B N1_skip B N1_skip {} + | H N3_skip B N1_skip H N6_skip V N1_skip B N2_skip V N2_skip D N7_skip B N8_skip H N3_skip H D N1_skip H N1_skip H N1_skip {} + | B N4_skip B N12_skip H N4_skip V N2_skip H D N2_skip V H N1_skip H N2_skip H N3_skip B N1_skip K N4_skip {} + | W D N7_skip B N1_skip D N2_skip D N2_skip W N1_skip D H N2_skip D N12_skip D N5_skip H {} + | a N2_skip t N4_skip g N18_skip {} + +-------------------- +-- CART +-------------------- +-- regions [data from Leung (cart.gr)] + +Promoter_cart + : N N t a N N N N N N N N N N N {} + | N N V a N N N t N N N N N N N {} + | t N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N t B N N N t N N N N N N N t N N N N N N N {} + + +-------------------------------------------------------------------------------------------------------------- +-------------------------------------------------------------------------------------------------------------- + +Translation + : Start Mincodon Stop {} + | Start Mincodon Codon Stop {} + | Start Mincodon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Stop {} + | Start Mincodon Mincodon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Stop {} + | Start Mincodon Mincodon Mincodon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Stop {} + | Start Mincodon Mincodon Mincodon Mincodon Mincodon Mincodon Stop {} --252 Basen + + +Mincodon : Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon Codon {} --42 Basen + + + + +N0_skip + : {} +N1_skip + : N {} -- match starts one place on +N2_skip + : N N {} -- match starts two places on +N3_skip + : N N N {} -- missing an entire codon +N4_skip + : N N N N {} -- missing 4 bases +N5_skip + : N N N N N {} -- missing 5 bases +N6_skip + : N N N N N N {} -- missing 6 bases +N7_skip + : N N N N N N N {} -- missing 8 bases +N8_skip + : N N N N N N N N {} -- missing 7 bases +N9_skip + : N N N N N N N N N {} -- missing 9 bases +N10_skip + : N N N N N N N N N N {} -- missing 10 bases +N11_skip + : N10_skip N1_skip {} -- missing 11 bases +N12_skip + : N10_skip N2_skip {} -- missing 12 bases +N13_skip + : N10_skip N3_skip {} -- missing 13 bases +N14_skip + : N10_skip N4_skip {} -- missing 14 bases +N15_skip + : N10_skip N5_skip {} -- missing 15 bases +N16_skip + : N10_skip N6_skip {} -- missing 16 bases +N17_skip + : N10_skip N7_skip {} -- missing 17 bases +N18_skip + : N10_skip N8_skip {} -- missing 18 bases +N19_skip + : N10_skip N9_skip {} -- missing 19 bases +N20_skip + : N10_skip N10_skip {} -- missing 20 bases +N30_skip + : N10_skip N10_skip N10_skip {} -- missing 30 bases +N40_skip + : N10_skip N10_skip N10_skip N10_skip {} -- missing 40 bases +N50_skip + : N10_skip N10_skip N10_skip N10_skip N10_skip {} -- missing 50 bases +N60_skip + : N10_skip N50_skip {} -- missing 40 bases +N70_skip + : N10_skip N10_skip N50_skip {} -- missing 50 bases +N80_skip + : N10_skip N10_skip N10_skip N50_skip {} -- missing 40 bases +N90_skip + : N10_skip N10_skip N10_skip N10_skip N50_skip{} -- missing 50 bases +N100_skip + : N50_skip N50_skip {} + + + +-- Definitions of base categories according to the +-- International Union of Biochemistry (IUB) +-- Standard Nucleotide Codes. [Leung_data] + +N -- any base + : a {} + | c {} + | g {} + | t {} + +Y -- pyrimidin + : c {} + | t {} + +R -- purine + : a {} + | g {} + +S -- strong bonding bases + : g {} + | c {} + +W -- weak bonding bases + : a {} + | t {} + +K -- keto bases + : g {} + | t {} + +AM -- aMino bases + : a {} + | c {} + +B -- not base a + : g {} + | c {} + | t {} + +D -- not base c + : a {} + | g {} + | t {} + +H -- not base g + : a {} + | c {} + | t {} + +V -- not base t + : a {} + | c {} + | g {} + + + +Base + : a {} + | c {} + | g {} + | t {} + +-------------------- +-- codons + +Start : a t g {} -- start codon + +Stop -- stop codons + : t a a {} + | t a g {} + | t g a {} + +Codon -- any other codon + : a a a {} + | a a c {} + | a a g {} + | a a t {} + | a c a {} + | a c c {} + | a c g {} + | a c t {} + | a g a {} + | a g c {} + | a g g {} + | a g t {} + | a t a {} + | a t c {} + | a t g {} + | a t t {} + | c a a {} + | c a c {} + | c a g {} + | c a t {} + | c c a {} + | c c c {} + | c c g {} + | c c t {} + | c g a {} + | c g c {} + | c g g {} + | c g t {} + | c t a {} + | c t c {} + | c t g {} + | c t t {} + | g a a {} + | g a c {} + | g a g {} + | g a t {} + | g c a {} + | g c c {} + | g c g {} + | g c t {} + | g g a {} + | g g c {} + | g g g {} + | g g t {} + | g t a {} + | g t c {} + | g t g {} + | g t t {} + | t a c {} + | t a t {} + | t c a {} + | t c c {} + | t c g {} + | t c t {} + | t g c {} + | t g g {} + | t g t {} + | t t a {} + | t t c {} + | t t g {} + | t t t {} + +-------------------- + + + +--%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +--some aux code +{ + +data Token + = TokenEOF + | Base_A + | Base_C + | Base_G + | Base_T + deriving (Show,Eq, Ord) + + +lexer :: String -> [Token] +lexer [] = [] +lexer (' ':cs) = lexer cs +lexer ('\n':cs) = lexer cs +lexer ('a':cs) = Base_A : lexer cs +lexer ('c':cs) = Base_C : lexer cs +lexer ('g':cs) = Base_G : lexer cs +lexer ('t':cs) = Base_T : lexer cs + +} diff -Nru happy-1.19.5/examples/glr/bio-eg/Makefile happy-1.19.8/examples/glr/bio-eg/Makefile --- happy-1.19.5/examples/glr/bio-eg/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/bio-eg/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,39 +1,39 @@ -TOP=.. -include ${TOP}/Makefile.defs - -PROG=bio-eg - -# filtering causes this example to fail... -FILTER = --filter -FILTER = - -.hi.o : - @ dummy - -${PROG} : Bio.o Main.lhs - ${GHC} -cpp -fglasgow-exts -o ${PROG} --make Main.lhs - -BioData.hs Bio.hs : Bio.y - ${HAPPY} --info --glr --ghc ${FILTER} $< - -Bio.o : Bio.hs BioData.hi - ${GHC} -cpp -fglasgow-exts -O2 -c Bio.hs - -DATA_FLAGS = -funfolding-use-threshold0 -fno-strictness -BioData.hi BioData.o : BioData.hs - @echo "Making BioData.hs WITHOUT optimisation (for speed)" - ${GHC} -cpp -fglasgow-exts ${DATA_FLAGS} -c $< - -run : run12 - -run6 : ${PROG} - ./${PROG} +RTS -s -K5M -RTS `cat 1-600.dna` - -run12 : ${PROG} - rm -f out.1200 - ./${PROG} +RTS -s -K15M -RTS `cat 1-1200.dna` >& out.1200 - echo Expect NINE matches, got `grep '^[(,0-9]*G_Match' out.1200 | wc -l` - -clean : - rm -rf ${PROG} Bio.info Bio.hs BioData.hs *.o *.hi out.daVinci \ - out.1200 out.600 +TOP=.. +include ${TOP}/Makefile.defs + +PROG=bio-eg + +# filtering causes this example to fail... +FILTER = --filter +FILTER = + +.hi.o : + @ dummy + +${PROG} : Bio.o Main.lhs + ${GHC} -cpp -fglasgow-exts -o ${PROG} --make Main.lhs + +BioData.hs Bio.hs : Bio.y + ${HAPPY} --info --glr --ghc ${FILTER} $< + +Bio.o : Bio.hs BioData.hi + ${GHC} -cpp -fglasgow-exts -O2 -c Bio.hs + +DATA_FLAGS = -funfolding-use-threshold0 -fno-strictness +BioData.hi BioData.o : BioData.hs + @echo "Making BioData.hs WITHOUT optimisation (for speed)" + ${GHC} -cpp -fglasgow-exts ${DATA_FLAGS} -c $< + +run : run12 + +run6 : ${PROG} + ./${PROG} +RTS -s -K5M -RTS `cat 1-600.dna` + +run12 : ${PROG} + rm -f out.1200 + ./${PROG} +RTS -s -K15M -RTS `cat 1-1200.dna` > out.1200 2>&1 + echo Expect NINE matches, got `grep '^[(,0-9]*G_Match' out.1200 | wc -l` + +clean : + rm -rf ${PROG} Bio.info Bio.hs BioData.hs *.o *.hi out.daVinci \ + out.1200 out.600 diff -Nru happy-1.19.5/examples/glr/expr-eval/Makefile happy-1.19.8/examples/glr/expr-eval/Makefile --- happy-1.19.5/examples/glr/expr-eval/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/expr-eval/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,29 +1,29 @@ -TOP=.. -include ${TOP}/Makefile.defs - -OPT = -O -DECODE = --decode - -expr : Expr.hs Main.lhs - # might want to run happy with --ghc - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -run : expr - ./expr "1+2*4-3" - -runn : expr - ./expr +RTS -s -RTS `perl -e 'print join ("+", (1 .. ${NUM}));'` | tee out-${NUM} - cat expr.stat >> out-${NUM} - -eof : expr - echo testing premature eof - ./expr "1+2*" - -err : expr - echo testing syntax error - ./expr "1+2*2++3" - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci +TOP=.. +include ${TOP}/Makefile.defs + +OPT = -O +DECODE = --decode + +expr : Expr.hs Main.lhs + # might want to run happy with --ghc + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +run : expr + ./expr "1+2*4-3" + +runn : expr + ./expr +RTS -s -RTS `perl -e 'print join ("+", (1 .. ${NUM}));'` | tee out-${NUM} + cat expr.stat >> out-${NUM} + +eof : expr + echo testing premature eof + ./expr "1+2*" + +err : expr + echo testing syntax error + ./expr "1+2*2++3" + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/glr/expr-monad/Makefile happy-1.19.8/examples/glr/expr-monad/Makefile --- happy-1.19.5/examples/glr/expr-monad/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/expr-monad/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,24 +1,24 @@ -TOP=.. -include ${TOP}/Makefile.defs - -DECODE = --decode - - -expr : Expr.hs Main.lhs - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -run : expr - ./expr "1+2*4-3" - -eof : expr - echo testing premature eof - ./expr "1+2*" - -err : expr - echo testing syntax error - ./expr "1+2*2++3" - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci +TOP=.. +include ${TOP}/Makefile.defs + +DECODE = --decode + + +expr : Expr.hs Main.lhs + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +run : expr + ./expr "1+2*4-3" + +eof : expr + echo testing premature eof + ./expr "1+2*" + +err : expr + echo testing syntax error + ./expr "1+2*2++3" + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/glr/expr-tree/Makefile happy-1.19.8/examples/glr/expr-tree/Makefile --- happy-1.19.5/examples/glr/expr-tree/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/expr-tree/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,29 +1,29 @@ -TOP=.. -include ${TOP}/Makefile.defs - -OPT = -O2 - -expr : Expr.hs Main.lhs - # might want to run happy with --ghc - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -run : expr - ./expr "1+2*4-3" - -runn : expr - ./expr +RTS -s -RTS `perl -e 'print join ("+", (1 .. ${NUM}));'` | tee out-${NUM} - cat expr.stat >> out-${NUM} - - -eof : expr - echo testing premature eof - ./expr "1+2*" - -err : expr - echo testing syntax error - ./expr "1+2*2++3" - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci +TOP=.. +include ${TOP}/Makefile.defs + +OPT = -O2 + +expr : Expr.hs Main.lhs + # might want to run happy with --ghc + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +run : expr + ./expr "1+2*4-3" + +runn : expr + ./expr +RTS -s -RTS `perl -e 'print join ("+", (1 .. ${NUM}));'` | tee out-${NUM} + cat expr.stat >> out-${NUM} + + +eof : expr + echo testing premature eof + ./expr "1+2*" + +err : expr + echo testing syntax error + ./expr "1+2*2++3" + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/glr/hidden-leftrec/Makefile happy-1.19.8/examples/glr/hidden-leftrec/Makefile --- happy-1.19.5/examples/glr/hidden-leftrec/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/hidden-leftrec/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,21 +1,21 @@ -TOP=.. -include ${TOP}/Makefile.defs - -expr : Expr.hs Main.lhs - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -run : expr - ./expr "+ 1 1 1 1 1 1 " - -eof : expr - echo testing premature eof - ./expr "" - -err : expr - echo testing syntax error - ./expr "+ 1 +" - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci +TOP=.. +include ${TOP}/Makefile.defs + +expr : Expr.hs Main.lhs + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +run : expr + ./expr "+ 1 1 1 1 1 1 " + +eof : expr + echo testing premature eof + ./expr "" + +err : expr + echo testing syntax error + ./expr "+ 1 +" + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/glr/highly-ambiguous/Makefile happy-1.19.8/examples/glr/highly-ambiguous/Makefile --- happy-1.19.5/examples/glr/highly-ambiguous/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/highly-ambiguous/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,21 +1,21 @@ -TOP=.. -include ${TOP}/Makefile.defs - -expr : Expr.hs Main.lhs - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -NUM=20 -run : expr - ./expr +RTS -s -RTS ${NUM} | grep ^Ok - -run30 : - make run NUM=30 - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci - -tar : - tar chzf aj2.tgz Expr*hs Expr*y Main*hs D*hs - +TOP=.. +include ${TOP}/Makefile.defs + +expr : Expr.hs Main.lhs + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +NUM=20 +run : expr + ./expr +RTS -s -RTS ${NUM} | grep ^Ok + +run30 : + make run NUM=30 + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci + +tar : + tar chzf aj2.tgz Expr*hs Expr*y Main*hs D*hs + diff -Nru happy-1.19.5/examples/glr/Makefile happy-1.19.8/examples/glr/Makefile --- happy-1.19.5/examples/glr/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -10,4 +10,4 @@ nlp bio-eg loop : - for d in ${DIRS}; do (cd $$d && make ${CMD}); done + for d in ${DIRS}; do (cd $$d && make ${CMD}) || exit 1; done diff -Nru happy-1.19.5/examples/glr/Makefile.defs happy-1.19.8/examples/glr/Makefile.defs --- happy-1.19.5/examples/glr/Makefile.defs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/Makefile.defs 2017-10-12 07:46:11.000000000 +0000 @@ -1,7 +1,7 @@ .SUFFIXES: .y .hs .exe OPT= -GHC=ghc -rtsopts -I../common -i../common ${OPT} +GHC=ghc -rtsopts -I../common -i../common -fno-warn-tabs ${OPT} # -dshow-passes HAPPY=happy diff -Nru happy-1.19.5/examples/glr/nlp/Makefile happy-1.19.8/examples/glr/nlp/Makefile --- happy-1.19.5/examples/glr/nlp/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/nlp/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,21 +1,21 @@ -TOP=.. -include ${TOP}/Makefile.defs - -english : English.hs Main.lhs - ${GHC} -cpp -fglasgow-exts -o english --make Main.lhs - -run : english - ./english "the man saw the race with a telescope" - -eof : english - echo testing premature eof - ./english "the man saw a" - -err : english - echo testing syntax error - ./english "the the man saw race" - -test : run eof err - -clean : - rm -rf english English.info English.hs EnglishData.hs *.o *.hi out.daVinci +TOP=.. +include ${TOP}/Makefile.defs + +english : English.hs Main.lhs + ${GHC} -cpp -fglasgow-exts -o english --make Main.lhs + +run : english + ./english "the man saw the race with a telescope" + +eof : english + echo testing premature eof + ./english "the man saw a" + +err : english + echo testing syntax error + ./english "the the man saw race" + +test : run eof err + +clean : + rm -rf english English.info English.hs EnglishData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/glr/packing/Makefile happy-1.19.8/examples/glr/packing/Makefile --- happy-1.19.5/examples/glr/packing/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/glr/packing/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,27 +1,27 @@ -TOP = .. -include $(TOP)/Makefile.defs - -FILTER = --filter -FILTER = - -.y.hs : - ${HAPPY} -i -l $*.y ${FILTER} - -expr : Expr.hs Main.lhs - ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs - -run : expr - ./expr "+ 1 1 1 1 1 1 " - -eof : expr - echo testing premature eof - ./expr "" - -err : expr - echo testing syntax error - ./expr "+ 1 +" - -test : run eof err - -clean : - rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci +TOP = .. +include $(TOP)/Makefile.defs + +FILTER = --filter +FILTER = + +.y.hs : + ${HAPPY} -i -l $*.y ${FILTER} + +expr : Expr.hs Main.lhs + ${GHC} -cpp -fglasgow-exts -o expr --make Main.lhs + +run : expr + ./expr "+ 1 1 1 1 1 1 " + +eof : expr + echo testing premature eof + ./expr "" + +err : expr + echo testing syntax error + ./expr "+ 1 +" + +test : run eof err + +clean : + rm -rf expr Expr.info Expr.hs ExprData.hs *.o *.hi out.daVinci diff -Nru happy-1.19.5/examples/igloo/Lexer.hs happy-1.19.8/examples/igloo/Lexer.hs --- happy-1.19.5/examples/igloo/Lexer.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/igloo/Lexer.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,208 +0,0 @@ -{-# OPTIONS -cpp #-} -{-# LINE 2 "Lexer.x" #-} -module Lexer (lex_tok) where - -import Control.Monad.State (StateT, get) -import ParserM (ParserM (..), mkT, Token(..), St, start_code, - StartCode, Action, set_start_code, - show_pos, position, input, - AlexInput, alexGetChar, alexInputPrevChar) - -import Data.Array -import Data.Char (ord) -import Data.Array.Base (unsafeAt) -alex_base :: Array Int Int -alex_base = listArray (0,10) [-8,-3,2,0,-91,-97,-93,0,-83,-77,-80] - -alex_table :: Array Int Int -alex_table = listArray (0,257) [0,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,3,4,5,7,8,9,0,0,2,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0] - -alex_check :: Array Int Int -alex_check = listArray (0,257) [-1,9,10,11,12,13,9,10,11,12,13,9,10,11,12,13,107,114,111,102,97,101,-1,-1,32,-1,-1,-1,-1,32,-1,-1,-1,-1,32,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,102,-1,-1,-1,-1,-1,108,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1] - -alex_deflt :: Array Int Int -alex_deflt = listArray (0,10) [-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1] - -alex_accept = listArray (0::Int,10) [[],[],[(AlexAccSkip)],[(AlexAcc (alex_action_1))],[],[],[],[(AlexAcc (alex_action_2))],[],[],[]] -{-# LINE 18 "Lexer.x" #-} -get_tok :: AlexInput -> StateT St (Either String) (Token, AlexInput) -get_tok = \i -> - do st <- get - case alexScan i (start_code st) of - AlexEOF -> return (TEOF, i) - AlexError _ -> fail $ "Lexical error at " ++ show_pos (position i) - AlexSkip i' _ -> get_tok i' - AlexToken i' l a -> a (i', take l (input i)) - -begin :: StartCode -> Action -begin sc (i, _) = do set_start_code sc - get_tok i - -lex_tok :: (Token -> ParserM a) -> ParserM a -lex_tok cont = ParserM $ \i -> - do (tok, iz) <- get_tok i - case cont tok of - ParserM x -> x iz - -alex_action_1 = mkT TFork -alex_action_2 = mkT TLeaf -{-# LINE 1 "GenericTemplate.hs" #-} --- ----------------------------------------------------------------------------- --- ALEX TEMPLATE --- --- This code is in the PUBLIC DOMAIN; you may copy it freely and use --- it for any purpose whatsoever. - --- ----------------------------------------------------------------------------- --- INTERNALS and main scanner engine - -{-# LINE 22 "GenericTemplate.hs" #-} - - - - - - - - - - - - - - - - - - - - - - - -{-# LINE 66 "GenericTemplate.hs" #-} - -alexIndexShortOffAddr arr off = arr ! off - - --- ----------------------------------------------------------------------------- --- Main lexing routines - -data AlexReturn a - = AlexEOF - | AlexError !AlexInput - | AlexSkip !AlexInput !Int - | AlexToken !AlexInput !Int a - --- alexScan :: AlexInput -> StartCode -> Maybe (AlexInput,Int,act) -alexScan input (sc) - = alexScanUser undefined input (sc) - -alexScanUser user input (sc) - = case alex_scan_tkn user input (0) input sc AlexNone of - (AlexNone, input') -> - case alexGetChar input of - Nothing -> - - - - AlexEOF - Just _ -> - - - - AlexError input - - (AlexLastSkip input len, _) -> - - - - AlexSkip input len - - (AlexLastAcc k input len, _) -> - - - - AlexToken input len k - - --- Push the input through the DFA, remembering the most recent accepting --- state it encountered. - -alex_scan_tkn user orig_input len input s last_acc = - input `seq` -- strict in the input - case s of - (-1) -> (last_acc, input) - _ -> alex_scan_tkn' user orig_input len input s last_acc - -alex_scan_tkn' user orig_input len input s last_acc = - let - new_acc = check_accs (alex_accept `unsafeAt` (s)) - in - new_acc `seq` - case alexGetChar input of - Nothing -> (new_acc, input) - Just (c, new_input) -> - - - - let - base = alexIndexShortOffAddr alex_base s - (ord_c) = ord c - offset = (base + ord_c) - check = alexIndexShortOffAddr alex_check offset - - new_s = if (offset >= (0)) && (check == ord_c) - then alexIndexShortOffAddr alex_table offset - else alexIndexShortOffAddr alex_deflt s - in - alex_scan_tkn user orig_input (len + (1)) new_input new_s new_acc - - where - check_accs [] = last_acc - check_accs (AlexAcc a : _) = AlexLastAcc a input (len) - check_accs (AlexAccSkip : _) = AlexLastSkip input (len) - check_accs (AlexAccPred a pred : rest) - | pred user orig_input (len) input - = AlexLastAcc a input (len) - check_accs (AlexAccSkipPred pred : rest) - | pred user orig_input (len) input - = AlexLastSkip input (len) - check_accs (_ : rest) = check_accs rest - -data AlexLastAcc a - = AlexNone - | AlexLastAcc a !AlexInput !Int - | AlexLastSkip !AlexInput !Int - -data AlexAcc a user - = AlexAcc a - | AlexAccSkip - | AlexAccPred a (AlexAccPred user) - | AlexAccSkipPred (AlexAccPred user) - -type AlexAccPred user = user -> AlexInput -> Int -> AlexInput -> Bool - --- ----------------------------------------------------------------------------- --- Predicates on a rule - -alexAndPred p1 p2 user in1 len in2 - = p1 user in1 len in2 && p2 user in1 len in2 - ---alexPrevCharIsPred :: Char -> AlexAccPred _ -alexPrevCharIs c _ input _ _ = c == alexInputPrevChar input - ---alexPrevCharIsOneOfPred :: Array Char Bool -> AlexAccPred _ -alexPrevCharIsOneOf arr _ input _ _ = arr ! alexInputPrevChar input - ---alexRightContext :: Int -> AlexAccPred _ -alexRightContext (sc) user _ _ input = - case alex_scan_tkn user input (0) input sc AlexNone of - (AlexNone, _) -> False - _ -> True - -- TODO: there's no need to find the longest - -- match when checking the right context, just - -- the first match will do. - --- used by wrappers -iUnbox (i) = i diff -Nru happy-1.19.5/examples/igloo/Lexer.x happy-1.19.8/examples/igloo/Lexer.x --- happy-1.19.5/examples/igloo/Lexer.x 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/igloo/Lexer.x 2017-10-12 07:46:11.000000000 +0000 @@ -6,7 +6,7 @@ import ParserM (ParserM (..), mkT, Token(..), St, start_code, StartCode, Action, set_start_code, show_pos, position, input, - AlexInput, alexGetChar, alexInputPrevChar) + AlexInput, alexGetByte, alexInputPrevChar) } words :- diff -Nru happy-1.19.5/examples/igloo/Makefile happy-1.19.8/examples/igloo/Makefile --- happy-1.19.5/examples/igloo/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/igloo/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -6,8 +6,9 @@ test: echo fork leaf leaf | ./foo - echo fork leaf leafqleaf | ./foo - echo leaf leaf leaf leaf leaf | ./foo + -echo fork leaf leafqleaf | ./foo + -echo leaf leaf leaf leaf leaf | ./foo + @echo ok clean: rm -f *.o *.hi Parser.hs Lexer.hs foo diff -Nru happy-1.19.5/examples/igloo/Parser.hs happy-1.19.8/examples/igloo/Parser.hs --- happy-1.19.5/examples/igloo/Parser.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/igloo/Parser.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,333 +0,0 @@ --- parser produced by Happy Version 1.14 - -module Parser (parse) where - -import Lexer (lex_tok) -import ParserM (Token(..), Tree(..), ParserM, run_parser, get_pos, show_pos, - happyError) - -data HappyAbsSyn - = HappyTerminal Token - | HappyErrorToken Int - | HappyAbsSyn4 (Tree) - -type HappyReduction = - Int - -> (Token) - -> HappyState (Token) (HappyStk HappyAbsSyn -> ParserM(HappyAbsSyn)) - -> [HappyState (Token) (HappyStk HappyAbsSyn -> ParserM(HappyAbsSyn))] - -> HappyStk HappyAbsSyn - -> ParserM(HappyAbsSyn) - -action_0, - action_1, - action_2, - action_3, - action_4, - action_5, - action_6 :: Int -> HappyReduction - -happyReduce_1, - happyReduce_2 :: HappyReduction - -action_0 (5) = happyShift action_4 -action_0 (6) = happyShift action_2 -action_0 (4) = happyGoto action_3 -action_0 _ = happyFail - -action_1 (6) = happyShift action_2 -action_1 _ = happyFail - -action_2 _ = happyReduce_1 - -action_3 (7) = happyAccept -action_3 _ = happyFail - -action_4 (5) = happyShift action_4 -action_4 (6) = happyShift action_2 -action_4 (4) = happyGoto action_5 -action_4 _ = happyFail - -action_5 (5) = happyShift action_4 -action_5 (6) = happyShift action_2 -action_5 (4) = happyGoto action_6 -action_5 _ = happyFail - -action_6 _ = happyReduce_2 - -happyReduce_1 = happySpecReduce_1 4 happyReduction_1 -happyReduction_1 _ - = HappyAbsSyn4 - (Leaf - ) - -happyReduce_2 = happySpecReduce_3 4 happyReduction_2 -happyReduction_2 (HappyAbsSyn4 happy_var_3) - (HappyAbsSyn4 happy_var_2) - _ - = HappyAbsSyn4 - (Fork happy_var_2 happy_var_3 - ) -happyReduction_2 _ _ _ = notHappyAtAll - -happyNewToken action sts stk - = lex_tok(\tk -> - let cont i = action i i tk (HappyState action) sts stk in - case tk of { - TEOF -> action 7 7 (error "reading EOF!") (HappyState action) sts stk; - TFork -> cont 5; - TLeaf -> cont 6; - _ -> happyError - }) - -happyThen :: ParserM a -> (a -> ParserM b) -> ParserM b -happyThen = (>>=) -happyReturn :: a -> ParserM a -happyReturn = (return) -happyThen1 = happyThen -happyReturn1 = happyReturn - -parsex = happyThen (happyParse action_0) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll }) - -happySeq = happyDontSeq - -parse :: String -> Either String Tree -parse = run_parser parsex -{-# LINE 1 "GenericTemplate.hs" #-} --- $Id: Parser.hs,v 1.1 2004/02/20 11:38:05 simonmar Exp $ - -{-# LINE 15 "GenericTemplate.hs" #-} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -infixr 9 `HappyStk` -data HappyStk a = HappyStk a (HappyStk a) - ------------------------------------------------------------------------------ --- starting the parse - -happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll - ------------------------------------------------------------------------------ --- Accepting the parse - -happyAccept j tk st sts (HappyStk ans _) = - - (happyReturn1 ans) - ------------------------------------------------------------------------------ --- Arrays only: do the next action - -{-# LINE 150 "GenericTemplate.hs" #-} - - ------------------------------------------------------------------------------ --- HappyState data type (not arrays) - - - -newtype HappyState b c = HappyState - (Int -> -- token number - Int -> -- token number (yes, again) - b -> -- token semantic value - HappyState b c -> -- current state - [HappyState b c] -> -- state stack - c) - - - ------------------------------------------------------------------------------ --- Shifting a token - -happyShift new_state (1) tk st sts stk@(x `HappyStk` _) = - let i = (case x of { HappyErrorToken (i) -> i }) in --- trace "shifting the error token" $ - new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk) - -happyShift new_state i tk st sts stk = - happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk) - --- happyReduce is specialised for the common cases. - -happySpecReduce_0 i fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk - = action nt j tk st ((st):(sts)) (fn `HappyStk` stk) - -happySpecReduce_1 i fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk') - = let r = fn v1 in - happySeq r (action nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_2 i fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk') - = let r = fn v1 v2 in - happySeq r (action nt j tk st sts (r `HappyStk` stk')) - -happySpecReduce_3 i fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') - = let r = fn v1 v2 v3 in - happySeq r (action nt j tk st sts (r `HappyStk` stk')) - -happyReduce k i fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happyReduce k nt fn j tk st sts stk - = case happyDrop (k - ((1) :: Int)) sts of - sts1@(((st1@(HappyState (action))):(_))) -> - let r = fn stk in -- it doesn't hurt to always seq here... - happyDoSeq r (action nt j tk st1 sts1 r) - -happyMonadReduce k nt fn (1) tk st sts stk - = happyFail (1) tk st sts stk -happyMonadReduce k nt fn j tk st sts stk = - happyThen1 (fn stk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk)) - where sts1@(((st1@(HappyState (action))):(_))) = happyDrop k ((st):(sts)) - drop_stk = happyDropStk k stk - -happyDrop (0) l = l -happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t - -happyDropStk (0) l = l -happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs - ------------------------------------------------------------------------------ --- Moving to a new state after a reduction - - - - - - - - - -happyGoto action j tk st = action j j tk (HappyState action) - - ------------------------------------------------------------------------------ --- Error recovery ((1) is the error token) - --- parse error if we are in recovery and we fail again -happyFail (1) tk old_st _ stk = --- trace "failing" $ - happyError - - -{- We don't need state discarding for our restricted implementation of - "error". In fact, it can cause some bogus parses, so I've disabled it - for now --SDM - --- discard a state -happyFail (1) tk old_st (((HappyState (action))):(sts)) - (saved_tok `HappyStk` _ `HappyStk` stk) = --- trace ("discarding state, depth " ++ show (length stk)) $ - action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk)) --} - --- Enter error recovery: generate an error token, --- save the old token and carry on. -happyFail i tk (HappyState (action)) sts stk = --- trace "entering error recovery" $ - action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk) - --- Internal happy errors: - -notHappyAtAll = error "Internal Happy error\n" - ------------------------------------------------------------------------------ --- Hack to get the typechecker to accept our action functions - - - - - - - ------------------------------------------------------------------------------ --- Seq-ing. If the --strict flag is given, then Happy emits --- happySeq = happyDoSeq --- otherwise it emits --- happySeq = happyDontSeq - -happyDoSeq, happyDontSeq :: a -> b -> b -happyDoSeq a b = a `seq` b -happyDontSeq a b = b - ------------------------------------------------------------------------------ --- Don't inline any functions from the template. GHC has a nasty habit --- of deciding to inline happyGoto everywhere, which increases the size of --- the generated parser quite a bit. - - - - - - - - - -{-# NOINLINE happyShift #-} -{-# NOINLINE happySpecReduce_0 #-} -{-# NOINLINE happySpecReduce_1 #-} -{-# NOINLINE happySpecReduce_2 #-} -{-# NOINLINE happySpecReduce_3 #-} -{-# NOINLINE happyReduce #-} -{-# NOINLINE happyMonadReduce #-} -{-# NOINLINE happyGoto #-} -{-# NOINLINE happyFail #-} - --- end of Happy Template. diff -Nru happy-1.19.5/examples/igloo/ParserM.hs happy-1.19.8/examples/igloo/ParserM.hs --- happy-1.19.5/examples/igloo/ParserM.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/examples/igloo/ParserM.hs 2017-10-12 07:46:11.000000000 +0000 @@ -13,23 +13,34 @@ -- Positions get_pos, show_pos, -- Input - alexGetChar, alexInputPrevChar, input, position, + alexGetByte, alexInputPrevChar, input, position, -- Other happyError ) where -import Control.Monad.Error (throwError) +import Control.Applicative (Applicative(..)) +import Control.Monad (ap, liftM) +import Control.Monad.Except (throwError) import Control.Monad.State (StateT, evalStateT, get, put) import Control.Monad.Trans (lift) +import Data.Char (ord) +import Data.Word (Word8) -- Parser Monad newtype ParserM a = ParserM (AlexInput -> StateT St (Either String) (AlexInput, a)) +instance Functor ParserM where + fmap = liftM + +instance Applicative ParserM where + pure a = ParserM $ \i -> return (i, a) + (<*>) = ap + instance Monad ParserM where + return = pure ParserM m >>= k = ParserM $ \i -> do (i', x) <- m i case k x of ParserM y -> y i' - return a = ParserM $ \i -> return (i, a) fail err = ParserM $ \_ -> fail err run_parser :: ParserM a -> (String -> Either String a) @@ -95,9 +106,10 @@ data AlexInput = AlexInput {position :: !Pos, input :: String} -alexGetChar :: AlexInput -> Maybe (Char,AlexInput) -alexGetChar (AlexInput p (x:xs)) = Just (x, AlexInput (alexMove p x) xs) -alexGetChar (AlexInput _ []) = Nothing +alexGetByte :: AlexInput -> Maybe (Word8,AlexInput) +alexGetByte (AlexInput p (x:xs)) = Just (fromIntegral (ord x), + AlexInput (alexMove p x) xs) +alexGetByte (AlexInput _ []) = Nothing alexInputPrevChar :: AlexInput -> Char alexInputPrevChar _ = error "Lexer doesn't implement alexInputPrevChar" diff -Nru happy-1.19.5/happy.cabal happy-1.19.8/happy.cabal --- happy-1.19.5/happy.cabal 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/happy.cabal 2017-10-12 07:46:11.000000000 +0000 @@ -1,13 +1,13 @@ name: happy -version: 1.19.5 -license: BSD3 +version: 1.19.8 +license: BSD2 license-file: LICENSE copyright: (c) Andy Gill, Simon Marlow author: Andy Gill and Simon Marlow maintainer: Simon Marlow bug-reports: https://github.com/simonmar/happy/issues stability: stable -homepage: http://www.haskell.org/happy/ +homepage: https://www.haskell.org/happy/ synopsis: Happy is a parser generator for Haskell category: Development cabal-version: >= 1.8 @@ -18,11 +18,13 @@ specification in BNF, Happy generates Haskell code to parse the grammar. Happy works in a similar way to the @yacc@ tool for C. +tested-with: GHC==8.0.1, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3 + extra-source-files: ANNOUNCE CHANGES Makefile - README + README.md TODO doc/Makefile doc/aclocal.m4 @@ -85,9 +87,7 @@ examples/igloo/Parser.y examples/igloo/Foo.hs examples/igloo/README - examples/igloo/Lexer.hs examples/igloo/Lexer.x - examples/igloo/Parser.hs examples/README examples/Calc.ly examples/DavesExample.ly @@ -102,27 +102,41 @@ templates/GLR_Lib.hs tests/AttrGrammar001.y tests/AttrGrammar002.y - tests/error001.y - tests/error001.stdout - tests/error001.stderr - tests/monad001.y - tests/monaderror.y tests/Makefile - tests/TestMulti.ly tests/Partial.ly - tests/precedence001.ly + tests/Test.ly + tests/TestMulti.ly tests/TestPrecedence.ly tests/bogus-token.y - tests/monad002.ly tests/bug001.ly tests/bug002.y - tests/Test.ly + tests/error001.stderr + tests/error001.stdout + tests/error001.y + tests/monad001.y + tests/monad002.ly + tests/monaderror.y + tests/precedence001.ly tests/precedence002.y tests/test_rules.y + tests/issue91.y + tests/issue93.y + tests/issue94.y + tests/issue95.y + tests/monaderror-explist.y + tests/typeclass_monad001.y + tests/typeclass_monad002.ly + tests/typeclass_monad_lexer.y + +custom-setup + setup-depends: Cabal <2.1, + base <5, + directory <1.4, + filepath <1.5 source-repository head type: git - location: http://github.com/simonmar/happy.git + location: https://github.com/simonmar/happy.git flag small_base description: Deprecated. Does nothing. @@ -133,11 +147,12 @@ build-depends: base < 5, array, - containers, - mtl >= 1.0 + containers >= 0.4.2, + mtl >= 2.2.1 + -- mtl-2.2.1 added Control.Monad.Except extensions: CPP, MagicHash, FlexibleContexts - ghc-options: -Wall -fno-warn-type-defaults + ghc-options: -Wall other-modules: AbsSyn First @@ -156,8 +171,10 @@ AttrGrammar AttrGrammarParser ParamRules + PrettyGrammar test-suite tests type: exitcode-stdio-1.0 main-is: test.hs build-depends: base, process + diff -Nru happy-1.19.5/Makefile happy-1.19.8/Makefile --- happy-1.19.5/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,10 +1,18 @@ -TOP = .. -include $(TOP)/mk/boilerplate.mk - -SUBDIRS = src templates doc - -include $(TOP)/mk/target.mk - - +HAPPY = happy +HAPPY_OPTS = -agc +ALEX = alex +ALEX_OPTS = -g +sdist :: + @if [ "`git status -s`" != '' ]; then \ + echo Tree is not clean; \ + exit 1; \ + fi + $(HAPPY) $(HAPPY_OPTS) src/Parser.ly -o src/Parser.hs + $(HAPPY) $(HAPPY_OPTS) src/AttrGrammarParser.ly -o src/AttrGrammer.hs + mv src/Parser.ly src/Parser.ly.boot + mv src/AttrGrammarParser.ly src/AttrGrammarParser.ly.boot + cabal sdist + git checkout . + git clean -f diff -Nru happy-1.19.5/README happy-1.19.8/README --- happy-1.19.5/README 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -This is Happy version 1.18, a parser generator for Haskell 98. - - http://www.haskell.org/happy/ - http://hackage.haskell.org/cgi-bin/hackage-scripts/package/happy - -Happy is built using Cabal. First install GHC, then: - - $ runhaskell Setup.lhs configure - $ runhaskell Setup.lhs build - $ runhaskell Setup.lhs install - -Complete documentation can be found in the directory 'doc', in -DocBook XML format. To format the documentation, the DocBook-Tools -suite (see http://sourceware.cygnus.com/docbook-tools/) -provides all the bits & pieces you need. Alternatively, pre-formatted -documentation is available from Happy's homepage (URL above). - -The directory 'examples' contains some example parsers that use Happy. - -For information on copying and distributing this program, see the file -LICENSE in this directory. - -Bug reports should be sent to marlowsd@gmail.com - -Happy Parsing! - -Simon. diff -Nru happy-1.19.5/README.md happy-1.19.8/README.md --- happy-1.19.5/README.md 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/README.md 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,31 @@ +# Happy + +Happy is a parser generator for Haskell 98 (and later). + +[![Build Status](https://secure.travis-ci.org/simonmar/happy.svg?branch=master)](http://travis-ci.org/simonmar/happy) + +* https://www.haskell.org/happy/ +* http://hackage.haskell.org/package/happy + +Happy is built using Cabal. First install GHC, then: +``` + $ cabal configure + $ cabal build + $ cabal install +``` +Complete documentation can be found in the directory 'doc', in +DocBook XML format. To format the documentation, the DocBook-Tools +suite (see http://wiki.docbook.org/DocBookTools) +provides all the bits & pieces you need. Alternatively, pre-formatted +documentation is available from Happy's homepage (URL above). + +The directory 'examples' contains some example parsers that use Happy. + +For information on copying and distributing this program, see the file +LICENSE in this directory. + +Bugs should be reported at: https://github.com/simonmar/happy/issues + +Happy Parsing! + +Simon. diff -Nru happy-1.19.5/Setup.lhs happy-1.19.8/Setup.lhs --- happy-1.19.5/Setup.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/Setup.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -18,11 +18,11 @@ main :: IO () main = defaultMainWithHooks simpleUserHooks { postBuild = myPostBuild, - postClean = myPostClean, - copyHook = myCopy, - instHook = myInstall } + postClean = myPostClean, + copyHook = myCopy, + instHook = myInstall } --- hack to turn cpp-style '# 27 "GenericTemplate.hs"' into +-- hack to turn cpp-style '# 27 "GenericTemplate.hs"' into -- '{-# LINE 27 "GenericTemplate.hs" #-}'. mungeLinePragma line = case symbols line of syms | Just prag <- getLinePrag syms -> prag @@ -66,40 +66,40 @@ instHook simpleUserHooks pkg_descr' lbi hooks flags where pkg_descr' = pkg_descr { dataFiles = dataFiles pkg_descr ++ all_template_files - } + } myCopy pkg_descr lbi hooks copy_flags = copyHook simpleUserHooks pkg_descr' lbi hooks copy_flags where pkg_descr' = pkg_descr { dataFiles = dataFiles pkg_descr ++ all_template_files - } + } all_template_files :: [FilePath] all_template_files = map fst (templates ++ glr_base_templates ++ glr_templates) templates :: [(FilePath,[String])] templates = [ - ("HappyTemplate" , []), - ("HappyTemplate-ghc" , ["-DHAPPY_GHC"]), - ("HappyTemplate-coerce" , ["-DHAPPY_GHC","-DHAPPY_COERCE"]), - ("HappyTemplate-arrays" , ["-DHAPPY_ARRAY"]), - ("HappyTemplate-arrays-ghc" , ["-DHAPPY_ARRAY","-DHAPPY_GHC"]), - ("HappyTemplate-arrays-coerce" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_COERCE"]), - ("HappyTemplate-arrays-debug" , ["-DHAPPY_ARRAY","-DHAPPY_DEBUG"]), - ("HappyTemplate-arrays-ghc-debug" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_DEBUG"]), - ("HappyTemplate-arrays-coerce-debug" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_COERCE","-DHAPPY_DEBUG"]) + ("HappyTemplate" , []), + ("HappyTemplate-ghc" , ["-DHAPPY_GHC"]), + ("HappyTemplate-coerce" , ["-DHAPPY_GHC","-DHAPPY_COERCE"]), + ("HappyTemplate-arrays" , ["-DHAPPY_ARRAY"]), + ("HappyTemplate-arrays-ghc" , ["-DHAPPY_ARRAY","-DHAPPY_GHC"]), + ("HappyTemplate-arrays-coerce" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_COERCE"]), + ("HappyTemplate-arrays-debug" , ["-DHAPPY_ARRAY","-DHAPPY_DEBUG"]), + ("HappyTemplate-arrays-ghc-debug" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_DEBUG"]), + ("HappyTemplate-arrays-coerce-debug" , ["-DHAPPY_ARRAY","-DHAPPY_GHC","-DHAPPY_COERCE","-DHAPPY_DEBUG"]) ] glr_base_templates :: [(FilePath,[String])] glr_base_templates = [ - ("GLR_Base" , []) + ("GLR_Base" , []) ] glr_templates :: [(FilePath,[String])] glr_templates = [ - ("GLR_Lib" , []), - ("GLR_Lib-ghc" , ["-DHAPPY_GHC"]), - ("GLR_Lib-ghc-debug" , ["-DHAPPY_GHC", "-DHAPPY_DEBUG"]) + ("GLR_Lib" , []), + ("GLR_Lib-ghc" , ["-DHAPPY_GHC"]), + ("GLR_Lib-ghc-debug" , ["-DHAPPY_GHC", "-DHAPPY_DEBUG"]) ] \end{code} diff -Nru happy-1.19.5/src/AbsSyn.lhs happy-1.19.8/src/AbsSyn.lhs --- happy-1.19.5/src/AbsSyn.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/AbsSyn.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -7,10 +7,10 @@ Here is the abstract syntax of the language we parse. > module AbsSyn ( -> AbsSyn(..), Directive(..), +> AbsSyn(..), Directive(..), ErrorHandlerType(..), > getTokenType, getTokenSpec, getParserNames, getLexer, > getImportedIdentity, getMonad, getError, -> getPrios, getPrioNames, getExpect, +> getPrios, getPrioNames, getExpect, getErrorHandlerType, > getAttributes, getAttributetype, > Rule,Prod,Term(..) > ) where @@ -40,11 +40,16 @@ ToDo: find a consistent way to analyse all the directives together and generate some error messages. +> data ErrorHandlerType +> = ErrorHandlerTypeDefault +> | ErrorHandlerTypeExpList +> > data Directive a > = TokenType String -- %tokentype > | TokenSpec [(a,String)] -- %token > | TokenName String (Maybe String) Bool -- %name/%partial (True <=> %partial) > | TokenLexer String String -- %lexer +> | TokenErrorHandlerType String -- %errorhandlertype > | TokenImportedIdentity -- %importedidentity > | TokenMonad String String String String -- %monad > | TokenNonassoc [String] -- %nonassoc @@ -125,6 +130,16 @@ > [] -> Nothing > _ -> error "multiple error directives" +> getErrorHandlerType :: [Directive t] -> ErrorHandlerType +> getErrorHandlerType ds +> = case [ a | (TokenErrorHandlerType a) <- ds ] of +> [t] -> case t of +> "explist" -> ErrorHandlerTypeExpList +> "default" -> ErrorHandlerTypeDefault +> _ -> error "unsupported %errorhandlertype value" +> [] -> ErrorHandlerTypeDefault +> _ -> error "multiple errorhandlertype directives" + > getAttributes :: [Directive t] -> [(String, String)] > getAttributes ds > = [ (ident,typ) | (TokenAttribute ident typ) <- ds ] diff -Nru happy-1.19.5/src/AttrGrammarParser.hs happy-1.19.8/src/AttrGrammarParser.hs --- happy-1.19.5/src/AttrGrammarParser.hs 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/src/AttrGrammarParser.hs 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,616 @@ +{-# OPTIONS_GHC -w #-} +{-# OPTIONS -fglasgow-exts -cpp #-} +{-# OPTIONS_GHC -w #-} +module AttrGrammarParser (agParser) where +import ParseMonad +import AttrGrammar +import qualified Data.Array as Happy_Data_Array +import qualified GHC.Exts as Happy_GHC_Exts +import Control.Applicative(Applicative(..)) + +-- parser produced by Happy Version 1.19.4 + +newtype HappyAbsSyn = HappyAbsSyn HappyAny +#if __GLASGOW_HASKELL__ >= 607 +type HappyAny = Happy_GHC_Exts.Any +#else +type HappyAny = forall a . a +#endif +happyIn4 :: ([AgRule]) -> (HappyAbsSyn ) +happyIn4 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn4 #-} +happyOut4 :: (HappyAbsSyn ) -> ([AgRule]) +happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut4 #-} +happyIn5 :: ([AgRule]) -> (HappyAbsSyn ) +happyIn5 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn5 #-} +happyOut5 :: (HappyAbsSyn ) -> ([AgRule]) +happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut5 #-} +happyIn6 :: (AgRule) -> (HappyAbsSyn ) +happyIn6 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn6 #-} +happyOut6 :: (HappyAbsSyn ) -> (AgRule) +happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut6 #-} +happyIn7 :: ([AgToken]) -> (HappyAbsSyn ) +happyIn7 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn7 #-} +happyOut7 :: (HappyAbsSyn ) -> ([AgToken]) +happyOut7 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut7 #-} +happyIn8 :: ([AgToken]) -> (HappyAbsSyn ) +happyIn8 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn8 #-} +happyOut8 :: (HappyAbsSyn ) -> ([AgToken]) +happyOut8 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut8 #-} +happyInTok :: (AgToken) -> (HappyAbsSyn ) +happyInTok x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyInTok #-} +happyOutTok :: (HappyAbsSyn ) -> (AgToken) +happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOutTok #-} + + +happyActOffsets :: HappyAddr +happyActOffsets = HappyA# "\x0f\x00\x0f\x00\x00\x00\x30\x00\x0a\x00\x2e\x00\x2d\x00\x2b\x00\x14\x00\x0a\x00\x0a\x00\x0a\x00\x00\x00\x01\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0a\x00\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x0a\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x26\x00\x0a\x00\x00\x00\x01\x00\x00\x00\x00\x00"# + +happyGotoOffsets :: HappyAddr +happyGotoOffsets = HappyA# "\x18\x00\x1a\x00\x00\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x28\x00\x27\x00\x00\x00\x25\x00\x24\x00\x23\x00\x22\x00\x21\x00\x20\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1d\x00\x1c\x00\x1b\x00\x19\x00\x0c\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\xfd\xff\x00\x00\x00\x00"# + +happyDefActions :: HappyAddr +happyDefActions = HappyA# "\xfb\xff\x00\x00\xfe\xff\xfc\xff\xf0\xff\x00\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xf0\xff\xf0\xff\xf7\xff\xe8\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xf0\xff\xfb\xff\xfd\xff\xf1\xff\xf2\xff\xf3\xff\xf4\xff\xf5\xff\x00\x00\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xe8\xff\xf0\xff\xe8\xff\xfa\xff\xf9\xff\xf8\xff\xe9\xff\xea\xff\xeb\xff\xec\xff\xee\xff\xed\xff\x00\x00\xf0\xff\xf6\xff\xe8\xff\xef\xff"# + +happyCheck :: HappyAddr +happyCheck = HappyA# "\xff\xff\x04\x00\x01\x00\x04\x00\x03\x00\x04\x00\x03\x00\x06\x00\x07\x00\x08\x00\x09\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x04\x00\x0a\x00\x04\x00\x04\x00\x04\x00\x04\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x04\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x04\x00\xff\xff\x04\x00\x04\x00\x03\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# + +happyTable :: HappyAddr +happyTable = HappyA# "\x00\x00\x2f\x00\x1c\x00\x25\x00\x1d\x00\x1e\x00\x2d\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x0e\x00\x14\x00\x03\x00\x0f\x00\x26\x00\x10\x00\x11\x00\x12\x00\x13\x00\x05\x00\x06\x00\x07\x00\x08\x00\x08\x00\x02\x00\x03\x00\x02\x00\x03\x00\x27\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x2f\x00\x1a\x00\x22\x00\x23\x00\x24\x00\x0c\x00\x2d\x00\x0a\x00\x00\x00\x0b\x00\x0c\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# + +happyReduceArr = Happy_Data_Array.array (1, 23) [ + (1 , happyReduce_1), + (2 , happyReduce_2), + (3 , happyReduce_3), + (4 , happyReduce_4), + (5 , happyReduce_5), + (6 , happyReduce_6), + (7 , happyReduce_7), + (8 , happyReduce_8), + (9 , happyReduce_9), + (10 , happyReduce_10), + (11 , happyReduce_11), + (12 , happyReduce_12), + (13 , happyReduce_13), + (14 , happyReduce_14), + (15 , happyReduce_15), + (16 , happyReduce_16), + (17 , happyReduce_17), + (18 , happyReduce_18), + (19 , happyReduce_19), + (20 , happyReduce_20), + (21 , happyReduce_21), + (22 , happyReduce_22), + (23 , happyReduce_23) + ] + +happy_n_terms = 11 :: Int +happy_n_nonterms = 5 :: Int + +happyReduce_1 = happySpecReduce_1 0# happyReduction_1 +happyReduction_1 happy_x_1 + = case happyOut5 happy_x_1 of { happy_var_1 -> + happyIn4 + (happy_var_1 + )} + +happyReduce_2 = happySpecReduce_3 1# happyReduction_2 +happyReduction_2 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOut6 happy_x_1 of { happy_var_1 -> + case happyOut5 happy_x_3 of { happy_var_3 -> + happyIn5 + (happy_var_1 : happy_var_3 + )}} + +happyReduce_3 = happySpecReduce_1 1# happyReduction_3 +happyReduction_3 happy_x_1 + = case happyOut6 happy_x_1 of { happy_var_1 -> + happyIn5 + (happy_var_1 : [] + )} + +happyReduce_4 = happySpecReduce_0 1# happyReduction_4 +happyReduction_4 = happyIn5 + ([] + ) + +happyReduce_5 = happySpecReduce_3 2# happyReduction_5 +happyReduction_5 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_3 of { happy_var_3 -> + happyIn6 + (SelfAssign (selfRefVal happy_var_1) happy_var_3 + )}} + +happyReduce_6 = happySpecReduce_3 2# happyReduction_6 +happyReduction_6 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_3 of { happy_var_3 -> + happyIn6 + (SubAssign (subRefVal happy_var_1) happy_var_3 + )}} + +happyReduce_7 = happySpecReduce_3 2# happyReduction_7 +happyReduction_7 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_3 of { happy_var_3 -> + happyIn6 + (RightmostAssign (rightRefVal happy_var_1) happy_var_3 + )}} + +happyReduce_8 = happySpecReduce_2 2# happyReduction_8 +happyReduction_8 happy_x_2 + happy_x_1 + = case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn6 + (Conditional happy_var_2 + )} + +happyReduce_9 = happyReduce 4# 3# happyReduction_9 +happyReduction_9 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_3 of { happy_var_3 -> + case happyOut7 happy_x_4 of { happy_var_4 -> + happyIn7 + ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 + ) `HappyStk` happyRest}}}} + +happyReduce_10 = happySpecReduce_2 3# happyReduction_10 +happyReduction_10 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn7 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_11 = happySpecReduce_2 3# happyReduction_11 +happyReduction_11 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn7 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_12 = happySpecReduce_2 3# happyReduction_12 +happyReduction_12 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn7 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_13 = happySpecReduce_2 3# happyReduction_13 +happyReduction_13 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn7 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_14 = happySpecReduce_2 3# happyReduction_14 +happyReduction_14 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn7 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_15 = happySpecReduce_0 3# happyReduction_15 +happyReduction_15 = happyIn7 + ([] + ) + +happyReduce_16 = happyReduce 4# 4# happyReduction_16 +happyReduction_16 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_3 of { happy_var_3 -> + case happyOut8 happy_x_4 of { happy_var_4 -> + happyIn8 + ([happy_var_1] ++ happy_var_2 ++ [happy_var_3] ++ happy_var_4 + ) `HappyStk` happyRest}}}} + +happyReduce_17 = happySpecReduce_2 4# happyReduction_17 +happyReduction_17 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_18 = happySpecReduce_2 4# happyReduction_18 +happyReduction_18 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_19 = happySpecReduce_2 4# happyReduction_19 +happyReduction_19 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_20 = happySpecReduce_2 4# happyReduction_20 +happyReduction_20 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_21 = happySpecReduce_2 4# happyReduction_21 +happyReduction_21 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut7 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_22 = happySpecReduce_2 4# happyReduction_22 +happyReduction_22 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { happy_var_1 -> + case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn8 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_23 = happySpecReduce_0 4# happyReduction_23 +happyReduction_23 = happyIn8 + ([] + ) + +happyNewToken action sts stk + = agLexer(\tk -> + let cont i = happyDoAction i tk action sts stk in + case tk of { + AgTok_EOF -> happyDoAction 10# tk action sts stk; + AgTok_LBrace -> cont 1#; + AgTok_RBrace -> cont 2#; + AgTok_Semicolon -> cont 3#; + AgTok_Eq -> cont 4#; + AgTok_Where -> cont 5#; + AgTok_SelfRef _ -> cont 6#; + AgTok_SubRef _ -> cont 7#; + AgTok_RightmostRef _ -> cont 8#; + AgTok_Unknown _ -> cont 9#; + _ -> happyError' tk + }) + +happyError_ 10# tk = happyError' tk +happyError_ _ tk = happyError' tk + +happyThen :: () => P a -> (a -> P b) -> P b +happyThen = (>>=) +happyReturn :: () => a -> P a +happyReturn = (return) +happyThen1 = happyThen +happyReturn1 :: () => a -> P a +happyReturn1 = happyReturn +happyError' :: () => (AgToken) -> P a +happyError' tk = (\token -> happyError) tk + +agParser = happySomeParser where + happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (happyOut4 x)) + +happySeq = happyDontSeq + + +happyError :: P a +happyError = fail ("Parse error\n") +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "" #-} +{-# LINE 1 "" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp + +{-# LINE 13 "templates/GenericTemplate.hs" #-} + + + + + +-- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. +#if __GLASGOW_HASKELL__ > 706 +#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Bool) +#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Bool) +#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Bool) +#else +#define LT(n,m) (n Happy_GHC_Exts.<# m) +#define GTE(n,m) (n Happy_GHC_Exts.>=# m) +#define EQ(n,m) (n Happy_GHC_Exts.==# m) +#endif +{-# LINE 46 "templates/GenericTemplate.hs" #-} + + +data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList + + + + + +{-# LINE 67 "templates/GenericTemplate.hs" #-} + +{-# LINE 77 "templates/GenericTemplate.hs" #-} + +{-# LINE 86 "templates/GenericTemplate.hs" #-} + +infixr 9 `HappyStk` +data HappyStk a = HappyStk a (HappyStk a) + +----------------------------------------------------------------------------- +-- starting the parse + +happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll + +----------------------------------------------------------------------------- +-- Accepting the parse + +-- If the current token is 0#, it means we've just accepted a partial +-- parse (a %partial parser). We must ignore the saved token on the top of +-- the stack in this case. +happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = + happyReturn1 ans +happyAccept j tk st sts (HappyStk ans _) = + (happyTcHack j (happyTcHack st)) (happyReturn1 ans) + +----------------------------------------------------------------------------- +-- Arrays only: do the next action + + + +happyDoAction i tk st + = {- nothing -} + + + case action of + 0# -> {- nothing -} + happyFail i tk st + -1# -> {- nothing -} + happyAccept i tk st + n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} + + (happyReduceArr Happy_Data_Array.! rule) i tk st + where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) + n -> {- nothing -} + + + happyShift new_state i tk st + where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) + where off = indexShortOffAddr happyActOffsets st + off_i = (off Happy_GHC_Exts.+# i) + check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) + then EQ(indexShortOffAddr happyCheck off_i, i) + else False + action + | check = indexShortOffAddr happyTable off_i + | otherwise = indexShortOffAddr happyDefActions st + + +indexShortOffAddr (HappyA# arr) off = + Happy_GHC_Exts.narrow16Int# i + where + i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) + high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) + low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) + off' = off Happy_GHC_Exts.*# 2# + + + + + +data HappyAddr = HappyA# Happy_GHC_Exts.Addr# + + + + +----------------------------------------------------------------------------- +-- HappyState data type (not arrays) + +{-# LINE 170 "templates/GenericTemplate.hs" #-} + +----------------------------------------------------------------------------- +-- Shifting a token + +happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = + let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in +-- trace "shifting the error token" $ + happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) + +happyShift new_state i tk st sts stk = + happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) + +-- happyReduce is specialised for the common cases. + +happySpecReduce_0 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_0 nt fn j tk st@((action)) sts stk + = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) + +happySpecReduce_1 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') + = let r = fn v1 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happySpecReduce_2 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') + = let r = fn v1 v2 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happySpecReduce_3 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') + = let r = fn v1 v2 v3 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happyReduce k i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyReduce k nt fn j tk st sts stk + = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of + sts1@((HappyCons (st1@(action)) (_))) -> + let r = fn stk in -- it doesn't hurt to always seq here... + happyDoSeq r (happyGoto nt j tk st1 sts1 r) + +happyMonadReduce k nt fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyMonadReduce k nt fn j tk st sts stk = + case happyDrop k (HappyCons (st) (sts)) of + sts1@((HappyCons (st1@(action)) (_))) -> + let drop_stk = happyDropStk k stk in + happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) + +happyMonad2Reduce k nt fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyMonad2Reduce k nt fn j tk st sts stk = + case happyDrop k (HappyCons (st) (sts)) of + sts1@((HappyCons (st1@(action)) (_))) -> + let drop_stk = happyDropStk k stk + + off = indexShortOffAddr happyGotoOffsets st1 + off_i = (off Happy_GHC_Exts.+# nt) + new_state = indexShortOffAddr happyTable off_i + + + + in + happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) + +happyDrop 0# l = l +happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t + +happyDropStk 0# l = l +happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs + +----------------------------------------------------------------------------- +-- Moving to a new state after a reduction + + +happyGoto nt j tk st = + {- nothing -} + happyDoAction j tk new_state + where off = indexShortOffAddr happyGotoOffsets st + off_i = (off Happy_GHC_Exts.+# nt) + new_state = indexShortOffAddr happyTable off_i + + + + +----------------------------------------------------------------------------- +-- Error recovery (0# is the error token) + +-- parse error if we are in recovery and we fail again +happyFail 0# tk old_st _ stk@(x `HappyStk` _) = + let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in +-- trace "failing" $ + happyError_ i tk + +{- We don't need state discarding for our restricted implementation of + "error". In fact, it can cause some bogus parses, so I've disabled it + for now --SDM + +-- discard a state +happyFail 0# tk old_st (HappyCons ((action)) (sts)) + (saved_tok `HappyStk` _ `HappyStk` stk) = +-- trace ("discarding state, depth " ++ show (length stk)) $ + happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk)) +-} + +-- Enter error recovery: generate an error token, +-- save the old token and carry on. +happyFail i tk (action) sts stk = +-- trace "entering error recovery" $ + happyDoAction 0# tk action sts ( (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) + +-- Internal happy errors: + +notHappyAtAll :: a +notHappyAtAll = error "Internal Happy error\n" + +----------------------------------------------------------------------------- +-- Hack to get the typechecker to accept our action functions + + +happyTcHack :: Happy_GHC_Exts.Int# -> a -> a +happyTcHack x y = y +{-# INLINE happyTcHack #-} + + +----------------------------------------------------------------------------- +-- Seq-ing. If the --strict flag is given, then Happy emits +-- happySeq = happyDoSeq +-- otherwise it emits +-- happySeq = happyDontSeq + +happyDoSeq, happyDontSeq :: a -> b -> b +happyDoSeq a b = a `seq` b +happyDontSeq a b = b + +----------------------------------------------------------------------------- +-- Don't inline any functions from the template. GHC has a nasty habit +-- of deciding to inline happyGoto everywhere, which increases the size of +-- the generated parser quite a bit. + + +{-# NOINLINE happyDoAction #-} +{-# NOINLINE happyTable #-} +{-# NOINLINE happyCheck #-} +{-# NOINLINE happyActOffsets #-} +{-# NOINLINE happyGotoOffsets #-} +{-# NOINLINE happyDefActions #-} + +{-# NOINLINE happyShift #-} +{-# NOINLINE happySpecReduce_0 #-} +{-# NOINLINE happySpecReduce_1 #-} +{-# NOINLINE happySpecReduce_2 #-} +{-# NOINLINE happySpecReduce_3 #-} +{-# NOINLINE happyReduce #-} +{-# NOINLINE happyMonadReduce #-} +{-# NOINLINE happyGoto #-} +{-# NOINLINE happyFail #-} + +-- end of Happy Template. diff -Nru happy-1.19.5/src/AttrGrammarParser.ly happy-1.19.8/src/AttrGrammarParser.ly --- happy-1.19.5/src/AttrGrammarParser.ly 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/AttrGrammarParser.ly 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -This parser parses the contents of the attribute grammar -into a list of rules. A rule can either be an assignment -to an attribute of the LHS (synthesized attribute), and -assignment to an attribute of the RHS (an inherited attribute), -or a conditional statement. - -> { -> {-# OPTIONS_GHC -w #-} -> module AttrGrammarParser (agParser) where -> import ParseMonad -> import AttrGrammar -> } - -> %name agParser -> %tokentype { AgToken } -> %token -> "{" { AgTok_LBrace } -> "}" { AgTok_RBrace } -> ";" { AgTok_Semicolon } -> "=" { AgTok_Eq } -> where { AgTok_Where } -> selfRef { AgTok_SelfRef _ } -> subRef { AgTok_SubRef _ } -> rightRef { AgTok_RightmostRef _ } -> unknown { AgTok_Unknown _ } -> -> %monad { P } -> %lexer { agLexer } { AgTok_EOF } - -> %% - -> agParser :: { [AgRule] } -> : rules { $1 } - -> rules :: { [AgRule] } -> : rule ";" rules { $1 : $3 } -> | rule { $1 : [] } -> | { [] } - -> rule :: { AgRule } -> : selfRef "=" code { SelfAssign (selfRefVal $1) $3 } -> | subRef "=" code { SubAssign (subRefVal $1) $3 } -> | rightRef "=" code { RightmostAssign (rightRefVal $1) $3 } -> | where code { Conditional $2 } - -> code :: { [AgToken] } -> : "{" code0 "}" code { [$1] ++ $2 ++ [$3] ++ $4 } -> | "=" code { $1 : $2 } -> | selfRef code { $1 : $2 } -> | subRef code { $1 : $2 } -> | rightRef code { $1 : $2 } -> | unknown code { $1 : $2 } -> | { [] } - -> code0 :: { [AgToken] } -> : "{" code0 "}" code0 { [$1] ++ $2 ++ [$3] ++ $4 } -> | "=" code0 { $1 : $2 } -> | ";" code0 { $1 : $2 } -> | selfRef code0 { $1 : $2 } -> | subRef code0 { $1 : $2 } -> | rightRef code { $1 : $2 } -> | unknown code0 { $1 : $2 } -> | { [] } - -> { -> happyError :: P a -> happyError = fail ("Parse error\n") -> } diff -Nru happy-1.19.5/src/Grammar.lhs happy-1.19.8/src/Grammar.lhs --- happy-1.19.5/src/Grammar.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/Grammar.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -9,7 +9,7 @@ > module Grammar ( > Name, > -> Production, Grammar(..), mangler, +> Production, Grammar(..), mangler, ErrorHandlerType(..), > > LRAction(..), ActionTable, Goto(..), GotoTable, Priority(..), > Assoc(..), @@ -32,12 +32,6 @@ > import Control.Monad.Writer -#ifdef DEBUG - -> import System.IOExts - -#endif - > type Name = Int > type Production = (Name,[Name],(String,[Int]),Priority) @@ -64,7 +58,8 @@ > attributes :: [(String,String)], > attributetype :: String, > lexer :: Maybe (String,String), -> error_handler :: Maybe String +> error_handler :: Maybe String, +> error_sig :: ErrorHandlerType > } #ifdef DEBUG @@ -297,8 +292,35 @@ > rules2 <- mapM transRule rules1 > let -> tys = accumArray (\_ x -> x) Nothing (first_nt, last_nt) -> [ (nm, Just ty) | (nm, _, Just ty) <- rules1 ] +> type_env = [(nt, t) | (nt, _, Just (t,[])) <- rules] ++ +> [(nt, getTokenType dirs) | nt <- terminal_strs] -- XXX: Doesn't handle $$ type! +> +> fixType (ty,s) = go "" ty +> where go acc [] = return (reverse acc) +> go acc (c:r) | isLower c = -- look for a run of alphanumerics starting with a lower case letter +> let (cs,r1) = span isAlphaNum r +> go1 x = go (reverse x ++ acc) r1 +> in case lookup (c:cs) s of +> Nothing -> go1 (c:cs) -- no binding found +> Just a -> case lookup a type_env of +> Nothing -> do +> addErr ("Parameterized rule argument '" ++ a ++ "' does not have type") +> go1 (c:cs) +> Just t -> go1 $ "(" ++ t ++ ")" +> | otherwise = go (c:acc) r +> +> convType (nm, t) +> = do t' <- fixType t +> return (nm, t') +> +> -- in +> tys <- mapM convType [ (nm, t) | (nm, _, Just t) <- rules1 ] +> + +> let +> type_array :: Array Int (Maybe String) +> type_array = accumArray (\_ x -> x) Nothing (first_nt, last_nt) +> [ (nm, Just t) | (nm, t) <- tys ] > env_array :: Array Int String > env_array = array (errorTok, last_t) name_env @@ -334,7 +356,7 @@ > -- INCLUDES the %start tokens > starts = zip4 parser_names start_names start_toks > start_partials, -> types = tys, +> types = type_array, > token_names = env_array, > first_nonterm = first_nt, > first_term = first_t, @@ -344,6 +366,7 @@ > monad = getMonad dirs, > lexer = getLexer dirs, > error_handler = getError dirs, +> error_sig = getErrorHandlerType dirs, > token_type = getTokenType dirs, > expect = getExpect dirs, > attributes = attrs, diff -Nru happy-1.19.5/src/LALR.lhs happy-1.19.8/src/LALR.lhs --- happy-1.19.5/src/LALR.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/LALR.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -22,7 +22,8 @@ > import Control.Monad.ST > import Data.Array.ST > import Data.Array as Array -> import Data.List (nub) +> import Data.List (nub,foldl',groupBy,sortBy) +> import Data.Function (on) > import Data.Maybe (listToMaybe, maybeToList) > unionMap :: (Ord b) => (a -> Set b) -> Set a -> Set b @@ -34,9 +35,24 @@ This means rule $a$, with dot at $b$ (all starting at 0) > data Lr0Item = Lr0 {-#UNPACK#-}!Int {-#UNPACK#-}!Int -- (rule, dot) -> deriving (Eq,Ord) +> deriving (Eq,Ord + +#ifdef DEBUG + +> ,Show + +#endif + +> ) > data Lr1Item = Lr1 {-#UNPACK#-}!Int {-#UNPACK#-}!Int NameSet -- (rule, dot, lookahead) + +#ifdef DEBUG + +> deriving (Show) + +#endif + > type RuleList = [Lr0Item] ----------------------------------------------------------------------------- @@ -248,7 +264,7 @@ > indexInto :: Eq a => Int -> a -> [a] -> Maybe Int > indexInto _ _ [] = Nothing > indexInto i x (y:ys) | x == y = Just i -> | otherwise = indexInto (i+1) x ys +> | otherwise = let j = i + 1 in j `seq` indexInto j x ys ----------------------------------------------------------------------------- Computing propagation of lookaheads @@ -335,7 +351,7 @@ > calcLookaheads n_states spont prop > = runST $ do > arr <- newArray (0,n_states) [] -> propagate arr (foldr fold_lookahead [] spont) +> propagate arr (fold_lookahead spont) > freeze arr > where @@ -388,13 +404,12 @@ > | otherwise = > get_new' l las new -> fold_lookahead :: (Int,Lr0Item,NameSet) -> [(Int,Lr0Item,NameSet)] -> -> [(Int,Lr0Item,NameSet)] -> fold_lookahead l [] = [l] -> fold_lookahead l@(i,item,s) (m@(i',item',s'):las) -> | i == i' && item == item' = (i,item, s `NameSet.union` s'):las -> | i < i' = (i,item,s):m:las -> | otherwise = m : fold_lookahead l las +> fold_lookahead :: [(Int,Lr0Item,NameSet)] -> [(Int,Lr0Item,NameSet)] +> fold_lookahead = +> map (\cs@(((a,b),_):_) -> (a,b,NameSet.unions $ map snd cs)) . +> groupBy ((==) `on` fst) . +> sortBy (compare `on` fst) . +> map (\(a,b,c) -> ((a,b),c)) ----------------------------------------------------------------------------- Merge lookaheads @@ -572,7 +587,7 @@ > countConflicts :: ActionTable -> (Array Int (Int,Int), (Int,Int)) > countConflicts action -> = (conflictArray, foldr (\(a,b) (c,d) -> (a+c, b+d)) (0,0) conflictList) +> = (conflictArray, foldl' (\(a,b) (c,d) -> let ac = a + c; bd = b + d in ac `seq` bd `seq` (ac,bd)) (0,0) conflictList) > > where > diff -Nru happy-1.19.5/src/Lexer.lhs happy-1.19.8/src/Lexer.lhs --- happy-1.19.5/src/Lexer.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/Lexer.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -37,6 +37,7 @@ > | TokSpecId_Token -- %token > | TokSpecId_Name -- %name > | TokSpecId_Partial -- %partial +> | TokSpecId_ErrorHandlerType -- %errorhandlertype > | TokSpecId_Lexer -- %lexer > | TokSpecId_ImportedIdentity -- %importedidentity > | TokSpecId_Monad -- %monad @@ -133,6 +134,8 @@ > returnToken cont (TokenKW TokSpecId_Prec) rest > 'e':'x':'p':'e':'c':'t':rest -> > returnToken cont (TokenKW TokSpecId_Expect) rest +> 'e':'r':'r':'o':'r':'h':'a':'n':'d':'l':'e':'r':'t':'y':'p':'e':rest -> +> returnToken cont (TokenKW TokSpecId_ErrorHandlerType) rest > 'e':'r':'r':'o':'r':rest -> > returnToken cont (TokenKW TokSpecId_Error) rest > 'a':'t':'t':'r':'i':'b':'u':'t':'e':'t':'y':'p':'e':rest -> diff -Nru happy-1.19.5/src/Main.lhs happy-1.19.8/src/Main.lhs --- happy-1.19.5/src/Main.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/Main.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -14,6 +14,7 @@ > import ParseMonad > import AbsSyn > import Grammar +> import PrettyGrammar > import Parser > import First > import LALR @@ -175,11 +176,23 @@ > unused_rules > unused_terminals > in - > (case info_filename of > Just s -> writeFile s info > Nothing -> return ()) >> + +Pretty print the grammar. + +> getPrettyFileName name cli >>= \pretty_filename -> +> (let out = render (ppAbsSyn abssyn) +> in +> case pretty_filename of +> Just s -> writeFile s out +> Nothing -> return ()) >> + + + + Now, let's get on with generating the parser. Firstly, find out what kind of code we should generate, and where it should go: @@ -195,7 +208,7 @@ > let > header = Just ( > (case hd of Just s -> s; Nothing -> "") -> ++ importsToInject target cli +> ++ importsToInject cli > ) > in @@ -208,7 +221,7 @@ > filtering | OptGLR_Filter `elem` cli = UseFiltering > | otherwise = NoFiltering > ghc_exts | OptGhcTarget `elem` cli = UseGhcExts -> (importsToInject target cli) +> (importsToInject cli) > (optsToInject target cli) > | otherwise = NoGhcExts > debug = OptDebugParser `elem` cli @@ -387,6 +400,7 @@ > DumpVersion > | DumpHelp > | OptInfoFile (Maybe String) +> | OptPrettyFile (Maybe String) > | OptTemplate String > | OptMagicName String > @@ -407,6 +421,8 @@ > "write the output to FILE (default: file.hs)", > Option ['i'] ["info"] (OptArg OptInfoFile "FILE") > "put detailed grammar info in FILE", +> Option ['p'] ["pretty"] (OptArg OptPrettyFile "FILE") +> "pretty print the production rules to FILE", > Option ['t'] ["template"] (ReqArg OptTemplate "DIR") > "look in DIR for template files", > Option ['m'] ["magic-name"] (ReqArg OptMagicName "NAME") @@ -478,21 +494,19 @@ > optsToInject :: Target -> [CLIFlags] -> String > optsToInject tgt cli -> | OptGhcTarget `elem` cli = "-fglasgow-exts -cpp" +> | OptGhcTarget `elem` cli = "-XMagicHash -XBangPatterns -XTypeSynonymInstances -XFlexibleInstances -cpp" > | tgt == TargetArrayBased = "-cpp" > | OptDebugParser `elem` cli = "-cpp" > | otherwise = "" -> importsToInject :: Target -> [CLIFlags] -> String -> importsToInject tgt cli = -> concat ["\n", array_import, glaexts_import, debug_imports, applicative_imports] +> importsToInject :: [CLIFlags] -> String +> importsToInject cli = +> concat ["\n", import_array, import_bits, +> glaexts_import, debug_imports, applicative_imports] > where > glaexts_import | is_ghc = import_glaexts > | otherwise = "" > -> array_import | is_array = import_array -> | otherwise = "" -> > debug_imports | is_debug = import_debug > | otherwise = "" > @@ -500,7 +514,6 @@ > > is_ghc = OptGhcTarget `elem` cli > is_debug = OptDebugParser `elem` cli -> is_array = tgt == TargetArrayBased CPP is turned on for -fglasgow-exts, so we can use conditional compilation: @@ -510,6 +523,9 @@ > import_array :: String > import_array = "import qualified Data.Array as Happy_Data_Array\n" +> import_bits :: String +> import_bits = "import qualified Data.Bits as Bits\n" + > import_debug :: String > import_debug = > "import qualified System.IO as Happy_System_IO\n" ++ @@ -545,6 +561,15 @@ > Just j -> return (Just j) > _many -> dieHappy "multiple --info/-i options\n" +> getPrettyFileName :: String -> [CLIFlags] -> IO (Maybe String) +> getPrettyFileName base cli +> = case [ s | (OptPrettyFile s) <- cli ] of +> [] -> return Nothing +> [f] -> case f of +> Nothing -> return (Just (base ++ ".grammar")) +> Just j -> return (Just j) +> _many -> dieHappy "multiple --pretty/-p options\n" + > getTemplate :: IO String -> [CLIFlags] -> IO String > getTemplate def cli > = case [ s | (OptTemplate s) <- cli ] of diff -Nru happy-1.19.5/src/ParamRules.hs happy-1.19.8/src/ParamRules.hs --- happy-1.19.5/src/ParamRules.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/ParamRules.hs 2017-10-12 07:46:11.000000000 +0000 @@ -2,8 +2,7 @@ import AbsSyn import Control.Monad.Writer -import Control.Monad.Error -import Control.Monad.Instances() -- mtl is broken, so we use Either monad +import Control.Monad.Except import Data.List(partition,intersperse) import qualified Data.Set as S import qualified Data.Map as M -- XXX: Make it work with old GHC. @@ -17,18 +16,19 @@ type RuleName = String type Inst = (RuleName, [RuleName]) type Funs = M.Map RuleName Rule -type Rule1 = (RuleName,[Prod1],Maybe String) +type Rule1 = (RuleName,[Prod1],Maybe (String, Subst)) type Prod1 = ([RuleName],String,Int,Maybe String) inst_name :: Inst -> RuleName inst_name (f,[]) = f -inst_name (f,xs) = f ++ "(" ++ concat (intersperse "," xs) ++ ")" +--inst_name (f,xs) = f ++ "(" ++ concat (intersperse "," xs) ++ ")" +inst_name (f,xs) = f ++ "__" ++ concat (intersperse "__" xs) ++ "__" -- | A renaming substitution used when we instantiate a parameterized rule. type Subst = [(RuleName,RuleName)] type M1 = Writer (S.Set Inst) -type M2 = ErrorT String M1 +type M2 = ExceptT String M1 -- | Collects the instances arising from a term. from_term :: Subst -> Term -> M1 RuleName @@ -54,7 +54,7 @@ inst_rule (x,xs,ps,t) ts = do s <- build xs ts [] ps1 <- lift $ mapM (inst_prod s) ps let y = inst_name (x,ts) - return (y,ps1,t) -- XXX: type? + return (y,ps1,fmap (\x' -> (x',s)) t) where build (x':xs') (t':ts') m = build xs' ts' ((x',t'):m) build [] [] m = return m build xs' [] _ = err ("Need " ++ show (length xs') ++ " more arguments") @@ -68,8 +68,8 @@ Just r -> inst_rule r xs Nothing -> throwError ("Undefined rule: " ++ f) -runM2 :: ErrorT e (Writer w) a -> Either e (a, w) -runM2 m = case runWriter (runErrorT m) of +runM2 :: ExceptT e (Writer w) a -> Either e (a, w) +runM2 m = case runWriter (runExceptT m) of (Left e,_) -> Left e (Right a,xs) -> Right (a,xs) diff -Nru happy-1.19.5/src/Parser.hs happy-1.19.8/src/Parser.hs --- happy-1.19.5/src/Parser.hs 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/src/Parser.hs 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,1013 @@ +{-# OPTIONS_GHC -w #-} +{-# OPTIONS -fglasgow-exts -cpp #-} +{-# OPTIONS_GHC -w #-} +module Parser (ourParser,AbsSyn) where +import ParseMonad +import AbsSyn +import Lexer +import qualified Data.Array as Happy_Data_Array +import qualified GHC.Exts as Happy_GHC_Exts +import Control.Applicative(Applicative(..)) + +-- parser produced by Happy Version 1.19.4 + +newtype HappyAbsSyn = HappyAbsSyn HappyAny +#if __GLASGOW_HASKELL__ >= 607 +type HappyAny = Happy_GHC_Exts.Any +#else +type HappyAny = forall a . a +#endif +happyIn4 :: (AbsSyn) -> (HappyAbsSyn ) +happyIn4 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn4 #-} +happyOut4 :: (HappyAbsSyn ) -> (AbsSyn) +happyOut4 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut4 #-} +happyIn5 :: ([Rule]) -> (HappyAbsSyn ) +happyIn5 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn5 #-} +happyOut5 :: (HappyAbsSyn ) -> ([Rule]) +happyOut5 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut5 #-} +happyIn6 :: (Rule) -> (HappyAbsSyn ) +happyIn6 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn6 #-} +happyOut6 :: (HappyAbsSyn ) -> (Rule) +happyOut6 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut6 #-} +happyIn7 :: ([String]) -> (HappyAbsSyn ) +happyIn7 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn7 #-} +happyOut7 :: (HappyAbsSyn ) -> ([String]) +happyOut7 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut7 #-} +happyIn8 :: ([String]) -> (HappyAbsSyn ) +happyIn8 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn8 #-} +happyOut8 :: (HappyAbsSyn ) -> ([String]) +happyOut8 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut8 #-} +happyIn9 :: ([Prod]) -> (HappyAbsSyn ) +happyIn9 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn9 #-} +happyOut9 :: (HappyAbsSyn ) -> ([Prod]) +happyOut9 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut9 #-} +happyIn10 :: (Prod) -> (HappyAbsSyn ) +happyIn10 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn10 #-} +happyOut10 :: (HappyAbsSyn ) -> (Prod) +happyOut10 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut10 #-} +happyIn11 :: (Term) -> (HappyAbsSyn ) +happyIn11 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn11 #-} +happyOut11 :: (HappyAbsSyn ) -> (Term) +happyOut11 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut11 #-} +happyIn12 :: ([Term]) -> (HappyAbsSyn ) +happyIn12 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn12 #-} +happyOut12 :: (HappyAbsSyn ) -> ([Term]) +happyOut12 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut12 #-} +happyIn13 :: ([Term]) -> (HappyAbsSyn ) +happyIn13 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn13 #-} +happyOut13 :: (HappyAbsSyn ) -> ([Term]) +happyOut13 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut13 #-} +happyIn14 :: ([Term]) -> (HappyAbsSyn ) +happyIn14 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn14 #-} +happyOut14 :: (HappyAbsSyn ) -> ([Term]) +happyOut14 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut14 #-} +happyIn15 :: (Maybe String) -> (HappyAbsSyn ) +happyIn15 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn15 #-} +happyOut15 :: (HappyAbsSyn ) -> (Maybe String) +happyOut15 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut15 #-} +happyIn16 :: ([Directive String]) -> (HappyAbsSyn ) +happyIn16 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn16 #-} +happyOut16 :: (HappyAbsSyn ) -> ([Directive String]) +happyOut16 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut16 #-} +happyIn17 :: (Directive String) -> (HappyAbsSyn ) +happyIn17 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn17 #-} +happyOut17 :: (HappyAbsSyn ) -> (Directive String) +happyOut17 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut17 #-} +happyIn18 :: (Maybe String) -> (HappyAbsSyn ) +happyIn18 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn18 #-} +happyOut18 :: (HappyAbsSyn ) -> (Maybe String) +happyOut18 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut18 #-} +happyIn19 :: ([(String,String)]) -> (HappyAbsSyn ) +happyIn19 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn19 #-} +happyOut19 :: (HappyAbsSyn ) -> ([(String,String)]) +happyOut19 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut19 #-} +happyIn20 :: ((String,String)) -> (HappyAbsSyn ) +happyIn20 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn20 #-} +happyOut20 :: (HappyAbsSyn ) -> ((String,String)) +happyOut20 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut20 #-} +happyIn21 :: ([String]) -> (HappyAbsSyn ) +happyIn21 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn21 #-} +happyOut21 :: (HappyAbsSyn ) -> ([String]) +happyOut21 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut21 #-} +happyIn22 :: (Maybe String) -> (HappyAbsSyn ) +happyIn22 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyIn22 #-} +happyOut22 :: (HappyAbsSyn ) -> (Maybe String) +happyOut22 x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOut22 #-} +happyInTok :: (Token) -> (HappyAbsSyn ) +happyInTok x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyInTok #-} +happyOutTok :: (HappyAbsSyn ) -> (Token) +happyOutTok x = Happy_GHC_Exts.unsafeCoerce# x +{-# INLINE happyOutTok #-} + + +happyActOffsets :: HappyAddr +happyActOffsets = HappyA# "\x73\x00\x73\x00\x23\x00\x00\x00\x71\x00\xff\xff\x00\x00\x70\x00\x7a\x00\x79\x00\x76\x00\x6f\x00\x00\x00\x6b\x00\x75\x00\x75\x00\x75\x00\x69\x00\x67\x00\x72\x00\x6e\x00\x66\x00\x00\x00\x63\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6d\x00\x00\x00\x00\x00\x62\x00\x60\x00\x6c\x00\x6c\x00\x00\x00\x6a\x00\x5f\x00\x00\x00\x00\x00\x68\x00\x12\x00\x00\x00\x51\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5e\x00\x00\x00\x00\x00\x5a\x00\x09\x00\x65\x00\x00\x00\x00\x00\x33\x00\x00\x00\x64\x00\x52\x00\x00\x00\x0a\x00\x00\x00\x50\x00\x00\x00\x57\x00\x61\x00\x4e\x00\x00\x00\x5d\x00\x00\x00\x5c\x00\x00\x00\x4f\x00\x5b\x00\x59\x00\x4c\x00\x58\x00\x00\x00\x58\x00\x00\x00\x00\x00\x4a\x00\x00\x00\x31\x00\x00\x00\x53\x00\x00\x00\x00\x00\x00\x00\x00\x00"# + +happyGotoOffsets :: HappyAddr +happyGotoOffsets = HappyA# "\x11\x00\x49\x00\x3d\x00\x00\x00\x00\x00\x48\x00\x00\x00\x00\x00\x38\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x47\x00\x46\x00\x45\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x42\x00\x00\x00\x00\x00\x00\x00\x00\x00\x44\x00\x43\x00\x00\x00\x36\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x10\x00\x00\x00\x4d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x39\x00\x28\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x35\x00\x00\x00\x30\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00"# + +happyDefActions :: HappyAddr +happyDefActions = HappyA# "\xca\xff\x00\x00\x00\x00\xcb\xff\x00\x00\x00\x00\xe5\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x00\x00\xcc\xff\xcc\xff\xcc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd4\xff\x00\x00\xd5\xff\xd6\xff\xd7\xff\xd9\xff\xcc\xff\xd8\xff\xda\xff\xde\xff\x00\x00\xd1\xff\xd1\xff\xe3\xff\xcf\xff\x00\x00\xe4\xff\xe6\xff\x00\x00\xca\xff\xfc\xff\xf7\xff\xce\xff\xd0\xff\xe2\xff\xd2\xff\xe1\xff\xdf\xff\xdd\xff\xcd\xff\xd3\xff\xdc\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\xf6\xff\xed\xff\x00\x00\xdb\xff\x00\x00\xf9\xff\xf3\xff\xec\xff\xe7\xff\xee\xff\xf0\xff\xf8\xff\x00\x00\xf5\xff\x00\x00\xeb\xff\x00\x00\x00\x00\xed\xff\x00\x00\xed\xff\xfb\xff\xed\xff\xf4\xff\xe8\xff\xf1\xff\xea\xff\x00\x00\xef\xff\x00\x00\xf2\xff\xfa\xff\xe9\xff"# + +happyCheck :: HappyAddr +happyCheck = HappyA# "\xff\xff\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x01\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x00\x00\x02\x00\x01\x00\x07\x00\x07\x00\x17\x00\x0a\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x14\x00\x14\x00\x16\x00\x01\x00\x02\x00\x12\x00\x12\x00\x12\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x07\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0b\x00\x0f\x00\x10\x00\x0f\x00\x10\x00\x0c\x00\x0d\x00\x1a\x00\x1b\x00\x1a\x00\x1b\x00\x04\x00\x03\x00\x0e\x00\x0e\x00\x11\x00\x01\x00\x0d\x00\x11\x00\x11\x00\x11\x00\x01\x00\x01\x00\x12\x00\x01\x00\x01\x00\x01\x00\x15\x00\x14\x00\x12\x00\x01\x00\x0c\x00\x12\x00\x01\x00\x01\x00\x19\x00\x18\x00\x01\x00\x19\x00\x01\x00\x12\x00\x01\x00\x01\x00\x01\x00\x12\x00\x12\x00\x12\x00\x01\x00\x12\x00\x12\x00\x01\x00\x01\x00\x12\x00\x12\x00\x01\x00\x01\x00\x13\x00\x12\x00\xff\xff\xff\xff\xff\xff\x12\x00\x12\x00\xff\xff\xff\xff\x12\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1c\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# + +happyTable :: HappyAddr +happyTable = HappyA# "\x00\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x4f\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x04\x00\x38\x00\x2c\x00\x55\x00\x5b\x00\x29\x00\x56\x00\x5a\x00\x41\x00\x42\x00\x43\x00\x44\x00\x3d\x00\x50\x00\x3e\x00\x29\x00\x2a\x00\x39\x00\x02\x00\x04\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x4a\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x50\x00\x41\x00\x42\x00\x43\x00\x44\x00\x52\x00\x41\x00\x42\x00\x43\x00\x44\x00\x40\x00\x41\x00\x42\x00\x43\x00\x44\x00\x4b\x00\x2d\x00\x24\x00\x23\x00\x24\x00\x05\x00\x06\x00\x58\x00\x59\x00\x47\x00\x48\x00\x3a\x00\x36\x00\x2e\x00\x30\x00\x33\x00\x46\x00\x27\x00\x1b\x00\x1d\x00\x1e\x00\x46\x00\x46\x00\x02\x00\x54\x00\x46\x00\x49\x00\x5a\x00\x52\x00\x55\x00\x46\x00\x4d\x00\x40\x00\x46\x00\x3c\x00\x4a\x00\x4e\x00\x2c\x00\x38\x00\x26\x00\x3f\x00\x30\x00\x1d\x00\x18\x00\x36\x00\x2d\x00\x32\x00\x19\x00\x33\x00\x35\x00\x1d\x00\x22\x00\x17\x00\x1a\x00\x23\x00\x26\x00\x1b\x00\x20\x00\x00\x00\x00\x00\x00\x00\x21\x00\x27\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# + +happyReduceArr = Happy_Data_Array.array (1, 53) [ + (1 , happyReduce_1), + (2 , happyReduce_2), + (3 , happyReduce_3), + (4 , happyReduce_4), + (5 , happyReduce_5), + (6 , happyReduce_6), + (7 , happyReduce_7), + (8 , happyReduce_8), + (9 , happyReduce_9), + (10 , happyReduce_10), + (11 , happyReduce_11), + (12 , happyReduce_12), + (13 , happyReduce_13), + (14 , happyReduce_14), + (15 , happyReduce_15), + (16 , happyReduce_16), + (17 , happyReduce_17), + (18 , happyReduce_18), + (19 , happyReduce_19), + (20 , happyReduce_20), + (21 , happyReduce_21), + (22 , happyReduce_22), + (23 , happyReduce_23), + (24 , happyReduce_24), + (25 , happyReduce_25), + (26 , happyReduce_26), + (27 , happyReduce_27), + (28 , happyReduce_28), + (29 , happyReduce_29), + (30 , happyReduce_30), + (31 , happyReduce_31), + (32 , happyReduce_32), + (33 , happyReduce_33), + (34 , happyReduce_34), + (35 , happyReduce_35), + (36 , happyReduce_36), + (37 , happyReduce_37), + (38 , happyReduce_38), + (39 , happyReduce_39), + (40 , happyReduce_40), + (41 , happyReduce_41), + (42 , happyReduce_42), + (43 , happyReduce_43), + (44 , happyReduce_44), + (45 , happyReduce_45), + (46 , happyReduce_46), + (47 , happyReduce_47), + (48 , happyReduce_48), + (49 , happyReduce_49), + (50 , happyReduce_50), + (51 , happyReduce_51), + (52 , happyReduce_52), + (53 , happyReduce_53) + ] + +happy_n_terms = 29 :: Int +happy_n_nonterms = 19 :: Int + +happyReduce_1 = happyReduce 5# 0# happyReduction_1 +happyReduction_1 (happy_x_5 `HappyStk` + happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOut22 happy_x_1 of { happy_var_1 -> + case happyOut16 happy_x_2 of { happy_var_2 -> + case happyOut5 happy_x_4 of { happy_var_4 -> + case happyOut22 happy_x_5 of { happy_var_5 -> + happyIn4 + (AbsSyn happy_var_1 (reverse happy_var_2) (reverse happy_var_4) happy_var_5 + ) `HappyStk` happyRest}}}} + +happyReduce_2 = happySpecReduce_2 1# happyReduction_2 +happyReduction_2 happy_x_2 + happy_x_1 + = case happyOut5 happy_x_1 of { happy_var_1 -> + case happyOut6 happy_x_2 of { happy_var_2 -> + happyIn5 + (happy_var_2 : happy_var_1 + )}} + +happyReduce_3 = happySpecReduce_1 1# happyReduction_3 +happyReduction_3 happy_x_1 + = case happyOut6 happy_x_1 of { happy_var_1 -> + happyIn5 + ([happy_var_1] + )} + +happyReduce_4 = happyReduce 6# 2# happyReduction_4 +happyReduction_4 (happy_x_6 `HappyStk` + happy_x_5 `HappyStk` + happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOut7 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> + case happyOut9 happy_x_6 of { happy_var_6 -> + happyIn6 + ((happy_var_1,happy_var_2,happy_var_6,Just happy_var_4) + ) `HappyStk` happyRest}}}} + +happyReduce_5 = happyReduce 7# 2# happyReduction_5 +happyReduction_5 (happy_x_7 `HappyStk` + happy_x_6 `HappyStk` + happy_x_5 `HappyStk` + happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOut7 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> + case happyOut9 happy_x_7 of { happy_var_7 -> + happyIn6 + ((happy_var_1,happy_var_2,happy_var_7,Just happy_var_4) + ) `HappyStk` happyRest}}}} + +happyReduce_6 = happyReduce 4# 2# happyReduction_6 +happyReduction_6 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOut7 happy_x_2 of { happy_var_2 -> + case happyOut9 happy_x_4 of { happy_var_4 -> + happyIn6 + ((happy_var_1,happy_var_2,happy_var_4,Nothing) + ) `HappyStk` happyRest}}} + +happyReduce_7 = happySpecReduce_3 3# happyReduction_7 +happyReduction_7 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOut8 happy_x_2 of { happy_var_2 -> + happyIn7 + (reverse happy_var_2 + )} + +happyReduce_8 = happySpecReduce_0 3# happyReduction_8 +happyReduction_8 = happyIn7 + ([] + ) + +happyReduce_9 = happySpecReduce_1 4# happyReduction_9 +happyReduction_9 happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + happyIn8 + ([happy_var_1] + )} + +happyReduce_10 = happySpecReduce_3 4# happyReduction_10 +happyReduction_10 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOut8 happy_x_1 of { happy_var_1 -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokId) -> + happyIn8 + (happy_var_3 : happy_var_1 + )}} + +happyReduce_11 = happySpecReduce_3 5# happyReduction_11 +happyReduction_11 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOut10 happy_x_1 of { happy_var_1 -> + case happyOut9 happy_x_3 of { happy_var_3 -> + happyIn9 + (happy_var_1 : happy_var_3 + )}} + +happyReduce_12 = happySpecReduce_1 5# happyReduction_12 +happyReduction_12 happy_x_1 + = case happyOut10 happy_x_1 of { happy_var_1 -> + happyIn9 + ([happy_var_1] + )} + +happyReduce_13 = happyMonadReduce 4# 6# happyReduction_13 +happyReduction_13 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) tk + = happyThen (case happyOut12 happy_x_1 of { happy_var_1 -> + case happyOut15 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + ( lineP >>= \l -> return (happy_var_1,happy_var_3,l,happy_var_2))}}} + ) (\r -> happyReturn (happyIn10 r)) + +happyReduce_14 = happyMonadReduce 3# 6# happyReduction_14 +happyReduction_14 (happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) tk + = happyThen (case happyOut12 happy_x_1 of { happy_var_1 -> + case happyOut15 happy_x_2 of { happy_var_2 -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + ( lineP >>= \l -> return (happy_var_1,happy_var_3,l,happy_var_2))}}} + ) (\r -> happyReturn (happyIn10 r)) + +happyReduce_15 = happySpecReduce_1 7# happyReduction_15 +happyReduction_15 happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + happyIn11 + (App happy_var_1 [] + )} + +happyReduce_16 = happyReduce 4# 7# happyReduction_16 +happyReduction_16 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOut14 happy_x_3 of { happy_var_3 -> + happyIn11 + (App happy_var_1 (reverse happy_var_3) + ) `HappyStk` happyRest}} + +happyReduce_17 = happySpecReduce_1 8# happyReduction_17 +happyReduction_17 happy_x_1 + = case happyOut13 happy_x_1 of { happy_var_1 -> + happyIn12 + (reverse happy_var_1 + )} + +happyReduce_18 = happySpecReduce_0 8# happyReduction_18 +happyReduction_18 = happyIn12 + ([] + ) + +happyReduce_19 = happySpecReduce_1 9# happyReduction_19 +happyReduction_19 happy_x_1 + = case happyOut11 happy_x_1 of { happy_var_1 -> + happyIn13 + ([happy_var_1] + )} + +happyReduce_20 = happySpecReduce_2 9# happyReduction_20 +happyReduction_20 happy_x_2 + happy_x_1 + = case happyOut13 happy_x_1 of { happy_var_1 -> + case happyOut11 happy_x_2 of { happy_var_2 -> + happyIn13 + (happy_var_2 : happy_var_1 + )}} + +happyReduce_21 = happySpecReduce_1 10# happyReduction_21 +happyReduction_21 happy_x_1 + = case happyOut11 happy_x_1 of { happy_var_1 -> + happyIn14 + ([happy_var_1] + )} + +happyReduce_22 = happySpecReduce_3 10# happyReduction_22 +happyReduction_22 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOut14 happy_x_1 of { happy_var_1 -> + case happyOut11 happy_x_3 of { happy_var_3 -> + happyIn14 + (happy_var_3 : happy_var_1 + )}} + +happyReduce_23 = happySpecReduce_2 11# happyReduction_23 +happyReduction_23 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> + happyIn15 + (Just happy_var_2 + )} + +happyReduce_24 = happySpecReduce_0 11# happyReduction_24 +happyReduction_24 = happyIn15 + (Nothing + ) + +happyReduce_25 = happySpecReduce_2 12# happyReduction_25 +happyReduction_25 happy_x_2 + happy_x_1 + = case happyOut16 happy_x_1 of { happy_var_1 -> + case happyOut17 happy_x_2 of { happy_var_2 -> + happyIn16 + (happy_var_2 : happy_var_1 + )}} + +happyReduce_26 = happySpecReduce_1 12# happyReduction_26 +happyReduction_26 happy_x_1 + = case happyOut17 happy_x_1 of { happy_var_1 -> + happyIn16 + ([happy_var_1] + )} + +happyReduce_27 = happySpecReduce_2 13# happyReduction_27 +happyReduction_27 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + happyIn17 + (TokenType happy_var_2 + )} + +happyReduce_28 = happySpecReduce_2 13# happyReduction_28 +happyReduction_28 happy_x_2 + happy_x_1 + = case happyOut19 happy_x_2 of { happy_var_2 -> + happyIn17 + (TokenSpec happy_var_2 + )} + +happyReduce_29 = happySpecReduce_3 13# happyReduction_29 +happyReduction_29 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> + case happyOut18 happy_x_3 of { happy_var_3 -> + happyIn17 + (TokenName happy_var_2 happy_var_3 False + )}} + +happyReduce_30 = happySpecReduce_3 13# happyReduction_30 +happyReduction_30 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> + case happyOut18 happy_x_3 of { happy_var_3 -> + happyIn17 + (TokenName happy_var_2 happy_var_3 True + )}} + +happyReduce_31 = happySpecReduce_1 13# happyReduction_31 +happyReduction_31 happy_x_1 + = happyIn17 + (TokenImportedIdentity + ) + +happyReduce_32 = happySpecReduce_3 13# happyReduction_32 +happyReduction_32 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + happyIn17 + (TokenLexer happy_var_2 happy_var_3 + )}} + +happyReduce_33 = happySpecReduce_2 13# happyReduction_33 +happyReduction_33 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + happyIn17 + (TokenMonad "()" happy_var_2 ">>=" "return" + )} + +happyReduce_34 = happySpecReduce_3 13# happyReduction_34 +happyReduction_34 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + happyIn17 + (TokenMonad happy_var_2 happy_var_3 ">>=" "return" + )}} + +happyReduce_35 = happyReduce 4# 13# happyReduction_35 +happyReduction_35 (happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> + happyIn17 + (TokenMonad "()" happy_var_2 happy_var_3 happy_var_4 + ) `HappyStk` happyRest}}} + +happyReduce_36 = happyReduce 5# 13# happyReduction_36 +happyReduction_36 (happy_x_5 `HappyStk` + happy_x_4 `HappyStk` + happy_x_3 `HappyStk` + happy_x_2 `HappyStk` + happy_x_1 `HappyStk` + happyRest) + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + case happyOutTok happy_x_4 of { (TokenInfo happy_var_4 TokCodeQuote) -> + case happyOutTok happy_x_5 of { (TokenInfo happy_var_5 TokCodeQuote) -> + happyIn17 + (TokenMonad happy_var_2 happy_var_3 happy_var_4 happy_var_5 + ) `HappyStk` happyRest}}}} + +happyReduce_37 = happySpecReduce_2 13# happyReduction_37 +happyReduction_37 happy_x_2 + happy_x_1 + = case happyOut21 happy_x_2 of { happy_var_2 -> + happyIn17 + (TokenNonassoc happy_var_2 + )} + +happyReduce_38 = happySpecReduce_2 13# happyReduction_38 +happyReduction_38 happy_x_2 + happy_x_1 + = case happyOut21 happy_x_2 of { happy_var_2 -> + happyIn17 + (TokenRight happy_var_2 + )} + +happyReduce_39 = happySpecReduce_2 13# happyReduction_39 +happyReduction_39 happy_x_2 + happy_x_1 + = case happyOut21 happy_x_2 of { happy_var_2 -> + happyIn17 + (TokenLeft happy_var_2 + )} + +happyReduce_40 = happySpecReduce_2 13# happyReduction_40 +happyReduction_40 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenNum happy_var_2 TokNum) -> + happyIn17 + (TokenExpect happy_var_2 + )} + +happyReduce_41 = happySpecReduce_2 13# happyReduction_41 +happyReduction_41 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + happyIn17 + (TokenError happy_var_2 + )} + +happyReduce_42 = happySpecReduce_2 13# happyReduction_42 +happyReduction_42 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> + happyIn17 + (TokenErrorHandlerType happy_var_2 + )} + +happyReduce_43 = happySpecReduce_2 13# happyReduction_43 +happyReduction_43 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + happyIn17 + (TokenAttributetype happy_var_2 + )} + +happyReduce_44 = happySpecReduce_3 13# happyReduction_44 +happyReduction_44 happy_x_3 + happy_x_2 + happy_x_1 + = case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokId) -> + case happyOutTok happy_x_3 of { (TokenInfo happy_var_3 TokCodeQuote) -> + happyIn17 + (TokenAttribute happy_var_2 happy_var_3 + )}} + +happyReduce_45 = happySpecReduce_1 14# happyReduction_45 +happyReduction_45 happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + happyIn18 + (Just happy_var_1 + )} + +happyReduce_46 = happySpecReduce_0 14# happyReduction_46 +happyReduction_46 = happyIn18 + (Nothing + ) + +happyReduce_47 = happySpecReduce_2 15# happyReduction_47 +happyReduction_47 happy_x_2 + happy_x_1 + = case happyOut20 happy_x_1 of { happy_var_1 -> + case happyOut19 happy_x_2 of { happy_var_2 -> + happyIn19 + (happy_var_1:happy_var_2 + )}} + +happyReduce_48 = happySpecReduce_1 15# happyReduction_48 +happyReduction_48 happy_x_1 + = case happyOut20 happy_x_1 of { happy_var_1 -> + happyIn19 + ([happy_var_1] + )} + +happyReduce_49 = happySpecReduce_2 16# happyReduction_49 +happyReduction_49 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOutTok happy_x_2 of { (TokenInfo happy_var_2 TokCodeQuote) -> + happyIn20 + ((happy_var_1,happy_var_2) + )}} + +happyReduce_50 = happySpecReduce_2 17# happyReduction_50 +happyReduction_50 happy_x_2 + happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokId) -> + case happyOut21 happy_x_2 of { happy_var_2 -> + happyIn21 + (happy_var_1 : happy_var_2 + )}} + +happyReduce_51 = happySpecReduce_0 17# happyReduction_51 +happyReduction_51 = happyIn21 + ([] + ) + +happyReduce_52 = happySpecReduce_1 18# happyReduction_52 +happyReduction_52 happy_x_1 + = case happyOutTok happy_x_1 of { (TokenInfo happy_var_1 TokCodeQuote) -> + happyIn22 + (Just happy_var_1 + )} + +happyReduce_53 = happySpecReduce_0 18# happyReduction_53 +happyReduction_53 = happyIn22 + (Nothing + ) + +happyNewToken action sts stk + = lexer(\tk -> + let cont i = happyDoAction i tk action sts stk in + case tk of { + TokenEOF -> happyDoAction 28# tk action sts stk; + TokenInfo happy_dollar_dollar TokId -> cont 1#; + TokenKW TokSpecId_TokenType -> cont 2#; + TokenKW TokSpecId_Token -> cont 3#; + TokenKW TokSpecId_Name -> cont 4#; + TokenKW TokSpecId_Partial -> cont 5#; + TokenKW TokSpecId_Lexer -> cont 6#; + TokenKW TokSpecId_ImportedIdentity -> cont 7#; + TokenKW TokSpecId_Monad -> cont 8#; + TokenKW TokSpecId_Nonassoc -> cont 9#; + TokenKW TokSpecId_Left -> cont 10#; + TokenKW TokSpecId_Right -> cont 11#; + TokenKW TokSpecId_Prec -> cont 12#; + TokenKW TokSpecId_Expect -> cont 13#; + TokenKW TokSpecId_Error -> cont 14#; + TokenKW TokSpecId_ErrorHandlerType -> cont 15#; + TokenKW TokSpecId_Attribute -> cont 16#; + TokenKW TokSpecId_Attributetype -> cont 17#; + TokenInfo happy_dollar_dollar TokCodeQuote -> cont 18#; + TokenNum happy_dollar_dollar TokNum -> cont 19#; + TokenKW TokColon -> cont 20#; + TokenKW TokSemiColon -> cont 21#; + TokenKW TokDoubleColon -> cont 22#; + TokenKW TokDoublePercent -> cont 23#; + TokenKW TokBar -> cont 24#; + TokenKW TokParenL -> cont 25#; + TokenKW TokParenR -> cont 26#; + TokenKW TokComma -> cont 27#; + _ -> happyError' tk + }) + +happyError_ 28# tk = happyError' tk +happyError_ _ tk = happyError' tk + +happyThen :: () => P a -> (a -> P b) -> P b +happyThen = (>>=) +happyReturn :: () => a -> P a +happyReturn = (return) +happyThen1 = happyThen +happyReturn1 :: () => a -> P a +happyReturn1 = happyReturn +happyError' :: () => (Token) -> P a +happyError' tk = (\token -> happyError) tk + +ourParser = happySomeParser where + happySomeParser = happyThen (happyParse 0#) (\x -> happyReturn (happyOut4 x)) + +happySeq = happyDontSeq + + +happyError :: P a +happyError = lineP >>= \l -> fail (show l ++ ": Parse error\n") +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "" #-} +{-# LINE 1 "" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp + +{-# LINE 13 "templates/GenericTemplate.hs" #-} + + + + + +-- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. +#if __GLASGOW_HASKELL__ > 706 +#define LT(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.<# m)) :: Bool) +#define GTE(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.>=# m)) :: Bool) +#define EQ(n,m) ((Happy_GHC_Exts.tagToEnum# (n Happy_GHC_Exts.==# m)) :: Bool) +#else +#define LT(n,m) (n Happy_GHC_Exts.<# m) +#define GTE(n,m) (n Happy_GHC_Exts.>=# m) +#define EQ(n,m) (n Happy_GHC_Exts.==# m) +#endif +{-# LINE 46 "templates/GenericTemplate.hs" #-} + + +data Happy_IntList = HappyCons Happy_GHC_Exts.Int# Happy_IntList + + + + + +{-# LINE 67 "templates/GenericTemplate.hs" #-} + +{-# LINE 77 "templates/GenericTemplate.hs" #-} + +{-# LINE 86 "templates/GenericTemplate.hs" #-} + +infixr 9 `HappyStk` +data HappyStk a = HappyStk a (HappyStk a) + +----------------------------------------------------------------------------- +-- starting the parse + +happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll + +----------------------------------------------------------------------------- +-- Accepting the parse + +-- If the current token is 0#, it means we've just accepted a partial +-- parse (a %partial parser). We must ignore the saved token on the top of +-- the stack in this case. +happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) = + happyReturn1 ans +happyAccept j tk st sts (HappyStk ans _) = + (happyTcHack j (happyTcHack st)) (happyReturn1 ans) + +----------------------------------------------------------------------------- +-- Arrays only: do the next action + + + +happyDoAction i tk st + = {- nothing -} + + + case action of + 0# -> {- nothing -} + happyFail i tk st + -1# -> {- nothing -} + happyAccept i tk st + n | LT(n,(0# :: Happy_GHC_Exts.Int#)) -> {- nothing -} + + (happyReduceArr Happy_Data_Array.! rule) i tk st + where rule = (Happy_GHC_Exts.I# ((Happy_GHC_Exts.negateInt# ((n Happy_GHC_Exts.+# (1# :: Happy_GHC_Exts.Int#)))))) + n -> {- nothing -} + + + happyShift new_state i tk st + where new_state = (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) + where off = indexShortOffAddr happyActOffsets st + off_i = (off Happy_GHC_Exts.+# i) + check = if GTE(off_i,(0# :: Happy_GHC_Exts.Int#)) + then EQ(indexShortOffAddr happyCheck off_i, i) + else False + action + | check = indexShortOffAddr happyTable off_i + | otherwise = indexShortOffAddr happyDefActions st + + +indexShortOffAddr (HappyA# arr) off = + Happy_GHC_Exts.narrow16Int# i + where + i = Happy_GHC_Exts.word2Int# (Happy_GHC_Exts.or# (Happy_GHC_Exts.uncheckedShiftL# high 8#) low) + high = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr (off' Happy_GHC_Exts.+# 1#))) + low = Happy_GHC_Exts.int2Word# (Happy_GHC_Exts.ord# (Happy_GHC_Exts.indexCharOffAddr# arr off')) + off' = off Happy_GHC_Exts.*# 2# + + + + + +data HappyAddr = HappyA# Happy_GHC_Exts.Addr# + + + + +----------------------------------------------------------------------------- +-- HappyState data type (not arrays) + +{-# LINE 170 "templates/GenericTemplate.hs" #-} + +----------------------------------------------------------------------------- +-- Shifting a token + +happyShift new_state 0# tk st sts stk@(x `HappyStk` _) = + let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in +-- trace "shifting the error token" $ + happyDoAction i tk new_state (HappyCons (st) (sts)) (stk) + +happyShift new_state i tk st sts stk = + happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk) + +-- happyReduce is specialised for the common cases. + +happySpecReduce_0 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_0 nt fn j tk st@((action)) sts stk + = happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk) + +happySpecReduce_1 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk') + = let r = fn v1 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happySpecReduce_2 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk') + = let r = fn v1 v2 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happySpecReduce_3 i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') + = let r = fn v1 v2 v3 in + happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk')) + +happyReduce k i fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyReduce k nt fn j tk st sts stk + = case happyDrop (k Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) sts of + sts1@((HappyCons (st1@(action)) (_))) -> + let r = fn stk in -- it doesn't hurt to always seq here... + happyDoSeq r (happyGoto nt j tk st1 sts1 r) + +happyMonadReduce k nt fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyMonadReduce k nt fn j tk st sts stk = + case happyDrop k (HappyCons (st) (sts)) of + sts1@((HappyCons (st1@(action)) (_))) -> + let drop_stk = happyDropStk k stk in + happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk)) + +happyMonad2Reduce k nt fn 0# tk st sts stk + = happyFail 0# tk st sts stk +happyMonad2Reduce k nt fn j tk st sts stk = + case happyDrop k (HappyCons (st) (sts)) of + sts1@((HappyCons (st1@(action)) (_))) -> + let drop_stk = happyDropStk k stk + + off = indexShortOffAddr happyGotoOffsets st1 + off_i = (off Happy_GHC_Exts.+# nt) + new_state = indexShortOffAddr happyTable off_i + + + + in + happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk)) + +happyDrop 0# l = l +happyDrop n (HappyCons (_) (t)) = happyDrop (n Happy_GHC_Exts.-# (1# :: Happy_GHC_Exts.Int#)) t + +happyDropStk 0# l = l +happyDropStk n (x `HappyStk` xs) = happyDropStk (n Happy_GHC_Exts.-# (1#::Happy_GHC_Exts.Int#)) xs + +----------------------------------------------------------------------------- +-- Moving to a new state after a reduction + + +happyGoto nt j tk st = + {- nothing -} + happyDoAction j tk new_state + where off = indexShortOffAddr happyGotoOffsets st + off_i = (off Happy_GHC_Exts.+# nt) + new_state = indexShortOffAddr happyTable off_i + + + + +----------------------------------------------------------------------------- +-- Error recovery (0# is the error token) + +-- parse error if we are in recovery and we fail again +happyFail 0# tk old_st _ stk@(x `HappyStk` _) = + let i = (case Happy_GHC_Exts.unsafeCoerce# x of { (Happy_GHC_Exts.I# (i)) -> i }) in +-- trace "failing" $ + happyError_ i tk + +{- We don't need state discarding for our restricted implementation of + "error". In fact, it can cause some bogus parses, so I've disabled it + for now --SDM + +-- discard a state +happyFail 0# tk old_st (HappyCons ((action)) (sts)) + (saved_tok `HappyStk` _ `HappyStk` stk) = +-- trace ("discarding state, depth " ++ show (length stk)) $ + happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk)) +-} + +-- Enter error recovery: generate an error token, +-- save the old token and carry on. +happyFail i tk (action) sts stk = +-- trace "entering error recovery" $ + happyDoAction 0# tk action sts ( (Happy_GHC_Exts.unsafeCoerce# (Happy_GHC_Exts.I# (i))) `HappyStk` stk) + +-- Internal happy errors: + +notHappyAtAll :: a +notHappyAtAll = error "Internal Happy error\n" + +----------------------------------------------------------------------------- +-- Hack to get the typechecker to accept our action functions + + +happyTcHack :: Happy_GHC_Exts.Int# -> a -> a +happyTcHack x y = y +{-# INLINE happyTcHack #-} + + +----------------------------------------------------------------------------- +-- Seq-ing. If the --strict flag is given, then Happy emits +-- happySeq = happyDoSeq +-- otherwise it emits +-- happySeq = happyDontSeq + +happyDoSeq, happyDontSeq :: a -> b -> b +happyDoSeq a b = a `seq` b +happyDontSeq a b = b + +----------------------------------------------------------------------------- +-- Don't inline any functions from the template. GHC has a nasty habit +-- of deciding to inline happyGoto everywhere, which increases the size of +-- the generated parser quite a bit. + + +{-# NOINLINE happyDoAction #-} +{-# NOINLINE happyTable #-} +{-# NOINLINE happyCheck #-} +{-# NOINLINE happyActOffsets #-} +{-# NOINLINE happyGotoOffsets #-} +{-# NOINLINE happyDefActions #-} + +{-# NOINLINE happyShift #-} +{-# NOINLINE happySpecReduce_0 #-} +{-# NOINLINE happySpecReduce_1 #-} +{-# NOINLINE happySpecReduce_2 #-} +{-# NOINLINE happySpecReduce_3 #-} +{-# NOINLINE happyReduce #-} +{-# NOINLINE happyMonadReduce #-} +{-# NOINLINE happyGoto #-} +{-# NOINLINE happyFail #-} + +-- end of Happy Template. diff -Nru happy-1.19.5/src/Parser.ly happy-1.19.8/src/Parser.ly --- happy-1.19.5/src/Parser.ly 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/Parser.ly 1970-01-01 00:00:00.000000000 +0000 @@ -1,146 +0,0 @@ ------------------------------------------------------------------------------ -$Id: Parser.ly,v 1.15 2005/01/26 01:10:42 ross Exp $ - -The parser. - -(c) 1993-2000 Andy Gill, Simon Marlow ------------------------------------------------------------------------------ - -> { -> {-# OPTIONS_GHC -w #-} -> module Parser (ourParser,AbsSyn) where -> import ParseMonad -> import AbsSyn -> import Lexer -> } - -> %name ourParser -> %tokentype { Token } -> %token -> id { TokenInfo $$ TokId } -> spec_tokentype { TokenKW TokSpecId_TokenType } -> spec_token { TokenKW TokSpecId_Token } -> spec_name { TokenKW TokSpecId_Name } -> spec_partial { TokenKW TokSpecId_Partial } -> spec_lexer { TokenKW TokSpecId_Lexer } -> spec_imported_identity { TokenKW TokSpecId_ImportedIdentity } -> spec_monad { TokenKW TokSpecId_Monad } -> spec_nonassoc { TokenKW TokSpecId_Nonassoc } -> spec_left { TokenKW TokSpecId_Left } -> spec_right { TokenKW TokSpecId_Right } -> spec_prec { TokenKW TokSpecId_Prec } -> spec_expect { TokenKW TokSpecId_Expect } -> spec_error { TokenKW TokSpecId_Error } -> spec_attribute { TokenKW TokSpecId_Attribute } -> spec_attributetype { TokenKW TokSpecId_Attributetype } -> code { TokenInfo $$ TokCodeQuote } -> int { TokenNum $$ TokNum } -> ":" { TokenKW TokColon } -> ";" { TokenKW TokSemiColon } -> "::" { TokenKW TokDoubleColon } -> "%%" { TokenKW TokDoublePercent } -> "|" { TokenKW TokBar } -> "(" { TokenKW TokParenL } -> ")" { TokenKW TokParenR } -> "," { TokenKW TokComma } - -> %monad { P } -> %lexer { lexer } { TokenEOF } - -> %% - -> parser :: { AbsSyn } -> : optCode tokInfos "%%" rules optCode -> { AbsSyn $1 (reverse $2) (reverse $4) $5 } - -> rules :: { [Rule] } -> : rules rule { $2 : $1 } -> | rule { [$1] } - -> rule :: { Rule } -> : id params "::" code ":" prods { ($1,$2,$6,Just $4) } -> | id params "::" code id ":" prods { ($1,$2,$7,Just $4) } -> | id params ":" prods { ($1,$2,$4,Nothing) } - -> params :: { [String] } -> : "(" comma_ids ")" { reverse $2 } -> | {- empty -} { [] } - -> comma_ids :: { [String] } -> : id { [$1] } -> | comma_ids "," id { $3 : $1 } - -> prods :: { [Prod] } -> : prod "|" prods { $1 : $3 } -> | prod { [$1] } - -> prod :: { Prod } -> : terms prec code ";" {% lineP >>= \l -> return ($1,$3,l,$2) } -> | terms prec code {% lineP >>= \l -> return ($1,$3,l,$2) } - -> term :: { Term } -> : id { App $1 [] } -> | id "(" comma_terms ")" { App $1 (reverse $3) } - -> terms :: { [Term] } -> : terms_rev { reverse $1 } -> | { [] } - -> terms_rev :: { [Term] } -> : term { [$1] } -> | terms_rev term { $2 : $1 } - -> comma_terms :: { [Term] } -> : term { [$1] } -> | comma_terms "," term { $3 : $1 } - -> prec :: { Maybe String } -> : spec_prec id { Just $2 } -> | { Nothing } - -> tokInfos :: { [Directive String] } -> : tokInfos tokInfo { $2 : $1 } -> | tokInfo { [$1] } - -> tokInfo :: { Directive String } -> : spec_tokentype code { TokenType $2 } -> | spec_token tokenSpecs { TokenSpec $2 } -> | spec_name id optStart { TokenName $2 $3 False } -> | spec_partial id optStart { TokenName $2 $3 True } -> | spec_imported_identity { TokenImportedIdentity } -> | spec_lexer code code { TokenLexer $2 $3 } -> | spec_monad code { TokenMonad "()" $2 ">>=" "return" } -> | spec_monad code code { TokenMonad $2 $3 ">>=" "return" } -> | spec_monad code code code { TokenMonad "()" $2 $3 $4 } -> | spec_monad code code code code { TokenMonad $2 $3 $4 $5 } -> | spec_nonassoc ids { TokenNonassoc $2 } -> | spec_right ids { TokenRight $2 } -> | spec_left ids { TokenLeft $2 } -> | spec_expect int { TokenExpect $2 } -> | spec_error code { TokenError $2 } -> | spec_attributetype code { TokenAttributetype $2 } -> | spec_attribute id code { TokenAttribute $2 $3 } - -> optStart :: { Maybe String } -> : id { Just $1 } -> | {- nothing -} { Nothing } - -> tokenSpecs :: { [(String,String)] } -> : tokenSpec tokenSpecs { $1:$2 } -> | tokenSpec { [$1] } - -> tokenSpec :: { (String,String) } -> : id code { ($1,$2) } - -> ids :: { [String] } -> : id ids { $1 : $2 } -> | {- nothing -} { [] } - -> optCode :: { Maybe String } -> : code { Just $1 } -> | {- nothing -} { Nothing } - -> { -> happyError :: P a -> happyError = lineP >>= \l -> fail (show l ++ ": Parse error\n") -> } diff -Nru happy-1.19.5/src/PrettyGrammar.hs happy-1.19.8/src/PrettyGrammar.hs --- happy-1.19.5/src/PrettyGrammar.hs 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/src/PrettyGrammar.hs 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,92 @@ +module PrettyGrammar where + +import AbsSyn + +render :: Doc -> String +render = maybe "" ($ "") + +ppAbsSyn :: AbsSyn -> Doc +ppAbsSyn (AbsSyn _ ds rs _) = vsep (vcat (map ppDirective ds) : map ppRule rs) + +ppDirective :: Directive a -> Doc +ppDirective dir = + case dir of + TokenNonassoc xs -> prec "%nonassoc" xs + TokenRight xs -> prec "%right" xs + TokenLeft xs -> prec "%left" xs + _ -> empty + where + prec x xs = text x <+> hsep (map text xs) + +ppRule :: Rule -> Doc +ppRule (name,_,prods,_) = text name + $$ vcat (zipWith (<+>) starts (map ppProd prods)) + where + starts = text " :" : repeat (text " |") + +ppProd :: Prod -> Doc +ppProd (ts,_,_,p) = psDoc <+> precDoc + where + psDoc = if null ts then text "{- empty -}" else hsep (map ppTerm ts) + precDoc = maybe empty (\x -> text "%prec" <+> text x) p + +ppTerm :: Term -> Doc +ppTerm (App x ts) = text x <> ppTuple (map ppTerm ts) + +ppTuple :: [Doc] -> Doc +ppTuple [] = empty +ppTuple xs = parens (hsep (punctuate comma xs)) + +-------------------------------------------------------------------------------- +-- Pretty printing combinator + +type Doc = Maybe ShowS + +empty :: Doc +empty = Nothing + +punctuate :: Doc -> [Doc] -> [Doc] +punctuate _ [] = [] +punctuate _ [x] = [x] +punctuate sep (x : xs) = (x <> sep) : punctuate sep xs + +comma :: Doc +comma = char ',' + +char :: Char -> Doc +char x = Just (showChar x) + +text :: String -> Doc +text x = if null x then Nothing else Just (showString x) + +(<+>) :: Doc -> Doc -> Doc +Nothing <+> y = y +x <+> Nothing = x +x <+> y = x <> char ' ' <> y + +(<>) :: Doc -> Doc -> Doc +Nothing <> y = y +x <> Nothing = x +Just x <> Just y = Just (x . y) + +($$) :: Doc -> Doc -> Doc +Nothing $$ y = y +x $$ Nothing = x +x $$ y = x <> char '\n' <> y + +hsep :: [Doc] -> Doc +hsep = hcat . punctuate (char ' ') + +vcat :: [Doc] -> Doc +vcat = foldr ($$) empty + +vsep :: [Doc] -> Doc +vsep = vcat . punctuate (char '\n') + +parens :: Doc -> Doc +parens x = char '(' <> x <> char ')' + +hcat :: [Doc] -> Doc +hcat = foldr (<>) empty + + diff -Nru happy-1.19.5/src/ProduceCode.lhs happy-1.19.8/src/ProduceCode.lhs --- happy-1.19.5/src/ProduceCode.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/ProduceCode.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -14,13 +14,15 @@ > interleave, interleave', maybestr, > brack, brack' ) -> import Data.Maybe ( isJust, isNothing ) +> import Data.Maybe ( isJust, isNothing, fromMaybe ) > import Data.Char > import Data.List +> import Control.Monad ( forM_ ) > import Control.Monad.ST -> import Data.Array.ST ( STUArray ) -> import Data.Array.Unboxed ( UArray ) +> import Data.Bits ( setBit ) +> import Data.Array.ST ( STUArray ) +> import Data.Array.Unboxed ( UArray ) > import Data.Array.MArray > import Data.Array.IArray @@ -47,6 +49,7 @@ > , first_nonterm = first_nonterm' > , eof_term = eof > , first_term = fst_term +> , token_names = token_names' > , lexer = lexer' > , imported_identity = imported_identity' > , monad = (use_monad,monad_context,monad_tycon,monad_then,monad_return) @@ -54,6 +57,7 @@ > , token_type = token_type' > , starts = starts' > , error_handler = error_handler' +> , error_sig = error_sig' > , attributetype = attributetype' > , attributes = attributes' > }) @@ -66,6 +70,7 @@ > -- don't screw up any OPTIONS pragmas in the header. > . produceAbsSynDecl . nl > . produceTypes +> . produceExpListPerState > . produceActionTable target > . produceReductions > . produceTokenConverter . nl @@ -85,13 +90,42 @@ > -- fix, others not so easy, and others would require GHC version > -- #ifdefs. For now I'm just disabling all of them. > -> top_opts = nowarn_opts . -> case top_options of -> "" -> str "" -> _ -> str (unwords [ "{-# OPTIONS" -> , top_options -> , "#-}" -> ]) . nl +> partTySigs_opts = ifGeGhc710 (str "{-# OPTIONS_GHC -XPartialTypeSignatures #-}" . nl) +> +> intMaybeHash | ghc = str "Happy_GHC_Exts.Int#" +> | otherwise = str "Int" +> +> -- Parsing monad and its constraints +> pty = str monad_tycon +> pcont = str monad_context +> +> -- If GHC is enabled, wrap the content in a CPP ifdef that includes the +> -- content and tests whether the GHC version is >= 7.10.3 +> ifGeGhc710 :: (String -> String) -> String -> String +> ifGeGhc710 content | ghc = str "#if __GLASGOW_HASKELL__ >= 710" . nl +> . content +> . str "#endif" . nl +> | otherwise = id +> +> n_missing_types = length (filter isNothing (elems nt_types)) +> happyAbsSyn = str "(HappyAbsSyn " . str wild_tyvars . str ")" +> where wild_tyvars = unwords (replicate n_missing_types "_") +> +> -- This decides how to include (if at all) a type signature +> -- See +> filterTypeSig :: (String -> String) -> String -> String +> filterTypeSig content | n_missing_types == 0 = content +> | otherwise = ifGeGhc710 content +> +> top_opts = +> nowarn_opts +> . (case top_options of +> "" -> str "" +> _ -> str (unwords [ "{-# OPTIONS" +> , top_options +> , "#-}" +> ]) . nl) +> . partTySigs_opts %----------------------------------------------------------------------------- Make the abstract syntax type declaration, of the form: @@ -125,14 +159,14 @@ > bhappy_item = brack' happy_item > > inject n ty -> = mkHappyIn n . str " :: " . type_param n ty +> = mkHappyIn n . str " :: " . typeParam n ty > . str " -> " . bhappy_item . char '\n' > . mkHappyIn n . str " x = Happy_GHC_Exts.unsafeCoerce# x\n" > . str "{-# INLINE " . mkHappyIn n . str " #-}" > > extract n ty > = mkHappyOut n . str " :: " . bhappy_item -> . str " -> " . type_param n ty . char '\n' +> . str " -> " . typeParam n ty . char '\n' > . mkHappyOut n . str " x = Happy_GHC_Exts.unsafeCoerce# x\n" > . str "{-# INLINE " . mkHappyOut n . str " #-}" > in @@ -178,7 +212,7 @@ > . str "\n\t= HappyTerminal " . token > . str "\n\t| HappyErrorToken Int\n" > . interleave "\n" -> [ str "\t| " . makeAbsSynCon n . strspace . type_param n ty +> [ str "\t| " . makeAbsSynCon n . strspace . typeParam n ty > | (n, ty) <- assocs nt_types, > (nt_types_index ! n) == n] @@ -214,9 +248,7 @@ > | otherwise = id -> where intMaybeHash | ghc = str "Happy_GHC_Exts.Int#" -> | otherwise = str "Int" -> tokens = +> where tokens = > case lexer' of > Nothing -> char '[' . token . str "] -> " > Just _ -> id @@ -291,7 +323,9 @@ > = mkReductionHdr (showInt lt) monad_reduce > . char '(' . interleave " `HappyStk`\n\t" tokPatterns > . str "happyRest) tk\n\t = happyThen (" +> . str "(" > . tokLets (char '(' . str code' . char ')') +> . str ")" > . (if monad_pass_token then str " tk" else id) > . str "\n\t) (\\r -> happyReturn (" . this_absSynCon . str " r))" @@ -307,7 +341,7 @@ > id > else > nl . reductionFun . strspace -> . interleave " " (map str (take (length toks) (repeat "_"))) +> . interleave " " (replicate (length toks) (str "_")) > . str " = notHappyAtAll ") > | otherwise @@ -333,7 +367,18 @@ > | otherwise = nt > > mkReductionHdr lt' s = -> mkReduceFun i . str " = " +> let tysig = case lexer' of +> Nothing -> id +> _ | target == TargetArrayBased -> +> mkReduceFun i . str " :: " . pcont +> . str " => " . intMaybeHash +> . str " -> " . str token_type' +> . str " -> " . intMaybeHash +> . str " -> Happy_IntList -> HappyStk " +> . happyAbsSyn . str " -> " +> . pty . str " " . happyAbsSyn . str "\n" +> | otherwise -> id in +> filterTypeSig tysig . mkReduceFun i . str " = " > . str s . strspace . lt' . strspace . showInt adjusted_nt > . strspace . reductionFun . nl > . reductionFun . strspace @@ -361,7 +406,7 @@ > tokLets code'' > | coerce && not (null cases) > = interleave "\n\t" cases -> . code'' . str (take (length cases) (repeat '}')) +> . code'' . str (replicate (length cases) '}') > | otherwise = code'' > > cases = [ str "case " . extract t . strspace . mkDummyVar n @@ -391,15 +436,32 @@ > . str "let cont i = " . doAction . str " sts stk tks in\n\t" > . str "case tk of {\n\t" > . interleave ";\n\t" (map doToken token_rep) -> . str "_ -> happyError' (tk:tks)\n\t" +> . str "_ -> happyError' ((tk:tks), [])\n\t" > . str "}\n\n" -> . str "happyError_ " . eofTok . str " tk tks = happyError' tks\n" -> . str "happyError_ _ tk tks = happyError' (tk:tks)\n"; +> . str "happyError_ explist " . eofTok . str " tk tks = happyError' (tks, explist)\n" +> . str "happyError_ explist _ tk tks = happyError' ((tk:tks), explist)\n"; > -- when the token is EOF, tk == _|_ (notHappyAtAll) > -- so we must not pass it to happyError' > Just (lexer'',eof') -> -> str "happyNewToken action sts stk\n\t= " +> case (target, ghc) of +> (TargetHaskell, True) -> +> str "happyNewToken :: " . pcont . str " => " +> . str "(Happy_GHC_Exts.Int#\n" +> . str " -> Happy_GHC_Exts.Int#\n" +> . str " -> " . token . str "\n" +> . str " -> HappyState " . token . str " (t -> " +> . pty . str " a)\n" +> . str " -> [HappyState " . token . str " (t -> " +> . pty . str " a)]\n" +> . str " -> t\n" +> . str " -> " . pty . str " a)\n" +> . str " -> [HappyState " . token . str " (t -> " +> . pty . str " a)]\n" +> . str " -> t\n" +> . str " -> " . pty . str " a\n" +> _ -> id +> . str "happyNewToken action sts stk\n\t= " > . str lexer'' > . str "(\\tk -> " > . str "\n\tlet cont i = " @@ -409,10 +471,10 @@ > . str (eof' ++ " -> ") > . eofAction "tk" . str ";\n\t" > . interleave ";\n\t" (map doToken token_rep) -> . str "_ -> happyError' tk\n\t" +> . str "_ -> happyError' (tk, [])\n\t" > . str "})\n\n" -> . str "happyError_ " . eofTok . str " tk = happyError' tk\n" -> . str "happyError_ _ tk = happyError' tk\n"; +> . str "happyError_ explist " . eofTok . str " tk = happyError' (tk, explist)\n" +> . str "happyError_ explist _ tk = happyError' (tk, explist)\n"; > -- superfluous pattern match needed to force happyError_ to > -- have the correct type. > } @@ -517,7 +579,26 @@ > . produceReduceArray > . str "happy_n_terms = " . shows n_terminals . str " :: Int\n" > . str "happy_n_nonterms = " . shows n_nonterminals . str " :: Int\n\n" - +> +> produceExpListPerState +> = produceExpListArray +> . str "{-# NOINLINE happyExpListPerState #-}\n" +> . str "happyExpListPerState st =\n" +> . str " token_strs_expected\n" +> . str " where token_strs = " . str (show $ elems token_names') . str "\n" +> . str " bit_start = st * " . str (show nr_tokens) . str "\n" +> . str " bit_end = (st + 1) * " . str (show nr_tokens) . str "\n" +> . str " read_bit = readArrayBit happyExpList\n" +> . str " bits = map read_bit [bit_start..bit_end - 1]\n" +> . str " bits_indexed = zip bits [0.." +> . str (show (nr_tokens - 1)) . str "]\n" +> . str " token_strs_expected = concatMap f bits_indexed\n" +> . str " f (False, _) = []\n" +> . str " f (True, nr) = [token_strs !! nr]\n" +> . str "\n" +> where (first_token, last_token) = bounds token_names' +> nr_tokens = last_token - first_token + 1 +> > produceStateFunction goto' (state, acts) > = foldr (.) id (map produceActions assocs_acts) > . foldr (.) id (map produceGotos (assocs gotos)) @@ -526,18 +607,32 @@ > then str " x = happyTcHack x " > else str " _ = ") > . mkAction default_act +> . (case default_act of +> LR'Fail -> callHappyExpListPerState +> LR'MustFail -> callHappyExpListPerState +> _ -> str "") > . str "\n\n" > > where gotos = goto' ! state > +> callHappyExpListPerState = str " (happyExpListPerState " +> . str (show state) . str ")" +> > produceActions (_, LR'Fail{-'-}) = id > produceActions (t, action'@(LR'Reduce{-'-} _ _)) > | action' == default_act = id -> | otherwise = actionFunction t -> . mkAction action' . str "\n" +> | otherwise = producePossiblyFailingAction t action' > produceActions (t, action') +> = producePossiblyFailingAction t action' +> +> producePossiblyFailingAction t action' > = actionFunction t -> . mkAction action' . str "\n" +> . mkAction action' +> . (case action' of +> LR'Fail -> str " []" +> LR'MustFail -> str " []" +> _ -> str "") +> . str "\n" > > produceGotos (t, Goto i) > = actionFunction t @@ -559,14 +654,23 @@ > | ghc > = str "happyActOffsets :: HappyAddr\n" > . str "happyActOffsets = HappyA# \"" --" -> . str (hexChars act_offs) +> . str (checkedHexChars min_off act_offs) > . str "\"#\n\n" --" > > . str "happyGotoOffsets :: HappyAddr\n" > . str "happyGotoOffsets = HappyA# \"" --" -> . str (hexChars goto_offs) +> . str (checkedHexChars min_off goto_offs) > . str "\"#\n\n" --" > +> . str "happyAdjustOffset :: Happy_GHC_Exts.Int# -> Happy_GHC_Exts.Int#\n" +> . str "happyAdjustOffset off = " +> . (if length table < 32768 +> then str "off" +> else str "if happyLt off (" . shows min_off . str "# :: Happy_GHC_Exts.Int#)" +> . str " then off Happy_GHC_Exts.+# 65536#" +> . str " else off") +> . str "\n\n" --" +> > . str "happyDefActions :: HappyAddr\n" > . str "happyDefActions = HappyA# \"" --" > . str (hexChars defaults) @@ -585,19 +689,22 @@ > | otherwise > = str "happyActOffsets :: Happy_Data_Array.Array Int Int\n" > . str "happyActOffsets = Happy_Data_Array.listArray (0," -> . shows (n_states) . str ") ([" +> . shows n_states . str ") ([" > . interleave' "," (map shows act_offs) > . str "\n\t])\n\n" > > . str "happyGotoOffsets :: Happy_Data_Array.Array Int Int\n" > . str "happyGotoOffsets = Happy_Data_Array.listArray (0," -> . shows (n_states) . str ") ([" +> . shows n_states . str ") ([" > . interleave' "," (map shows goto_offs) > . str "\n\t])\n\n" +> +> . str "happyAdjustOffset :: Int -> Int\n" +> . str "happyAdjustOffset = id\n\n" > > . str "happyDefActions :: Happy_Data_Array.Array Int Int\n" > . str "happyDefActions = Happy_Data_Array.listArray (0," -> . shows (n_states) . str ") ([" +> . shows n_states . str ") ([" > . interleave' "," (map shows defaults) > . str "\n\t])\n\n" > @@ -612,15 +719,28 @@ > . shows table_size . str ") ([" > . interleave' "," (map shows table) > . str "\n\t])\n\n" -> + +> produceExpListArray +> | ghc +> = str "happyExpList :: HappyAddr\n" +> . str "happyExpList = HappyA# \"" --" +> . str (hexChars explist) +> . str "\"#\n\n" --" +> | otherwise +> = str "happyExpList :: Happy_Data_Array.Array Int Int\n" +> . str "happyExpList = Happy_Data_Array.listArray (0," +> . shows table_size . str ") ([" +> . interleave' "," (map shows explist) +> . str "\n\t])\n\n" + > (_, last_state) = bounds action > n_states = last_state + 1 > n_terminals = length terms > n_nonterminals = length nonterms - n_starts -- lose %starts > -> (act_offs,goto_offs,table,defaults,check) +> (act_offs,goto_offs,table,defaults,check,explist,min_off) > = mkTables action goto first_nonterm' fst_term -> n_terminals n_nonterminals n_starts +> n_terminals n_nonterminals n_starts (bounds token_names') > > table_size = length table - 1 > @@ -665,9 +785,7 @@ > [ (a, fn a b) | (a, b) <- assocs nt_types ] > where > fn n Nothing = n -> fn _ (Just a) = case lookup a assoc_list of -> Just v -> v -> Nothing -> error ("cant find an item in list") +> fn _ (Just a) = fromMaybe (error "can't find an item in list") (lookup a assoc_list) > assoc_list = [ (b,a) | (a, Just b) <- assocs nt_types ] > makeAbsSynCon = mkAbsSynCon nt_types_index @@ -685,10 +803,10 @@ > . str "instance Functor HappyIdentity where\n" > . str " fmap f (HappyIdentity a) = HappyIdentity (f a)\n\n" > . str "instance Applicative HappyIdentity where\n" -> . str " pure = return\n" +> . str " pure = HappyIdentity\n" > . str " (<*>) = ap\n" > . str "instance Monad HappyIdentity where\n" -> . str " return = HappyIdentity\n" +> . str " return = pure\n" > . str " (HappyIdentity p) >>= q = q p\n\n" MonadStuff: @@ -716,8 +834,6 @@ > produceMonadStuff = -> let pcont = str monad_context in -> let pty = str monad_tycon in > str "happyThen :: " . pcont . str " => " . pty > . str " a -> (a -> " . pty > . str " b) -> " . pty . str " b\n" @@ -731,38 +847,74 @@ > . str "happyReturn1 :: " . pcont . str " => a -> b -> " . pty . str " a\n" > . str "happyReturn1 = \\a tks -> " . brack monad_return > . str " a\n" -> . str "happyError' :: " . str monad_context . str " => [" +> . str "happyError' :: " . str monad_context . str " => ([" > . token -> . str "] -> " +> . str "], [String]) -> " > . str monad_tycon > . str " a\n" > . str "happyError' = " > . str (if use_monad then "" else "HappyIdentity . ") -> . errorHandler -> . str "\n\n" +> . errorHandler . str "\n" > _ -> -> str "happyThen1 = happyThen\n" +> let +> happyParseSig +> | target == TargetArrayBased = +> str "happyParse :: " . pcont . str " => " . intMaybeHash +> . str " -> " . pty . str " " . happyAbsSyn . str "\n" +> . str "\n" +> | otherwise = id +> newTokenSig +> | target == TargetArrayBased = +> str "happyNewToken :: " . pcont . str " => " . intMaybeHash +> . str " -> Happy_IntList -> HappyStk " . happyAbsSyn +> . str " -> " . pty . str " " . happyAbsSyn . str"\n" +> . str "\n" +> | otherwise = id +> doActionSig +> | target == TargetArrayBased = +> str "happyDoAction :: " . pcont . str " => " . intMaybeHash +> . str " -> " . str token_type' . str " -> " . intMaybeHash +> . str " -> Happy_IntList -> HappyStk " . happyAbsSyn +> . str " -> " . pty . str " " . happyAbsSyn . str "\n" +> . str "\n" +> | otherwise = id +> reduceArrSig +> | target == TargetArrayBased = +> str "happyReduceArr :: " . pcont +> . str " => Happy_Data_Array.Array Int (" . intMaybeHash +> . str " -> " . str token_type' . str " -> " . intMaybeHash +> . str " -> Happy_IntList -> HappyStk " . happyAbsSyn +> . str " -> " . pty . str " " . happyAbsSyn . str ")\n" +> . str "\n" +> | otherwise = id in +> filterTypeSig (happyParseSig . newTokenSig . doActionSig . reduceArrSig) +> . str "happyThen1 :: " . pcont . str " => " . pty +> . str " a -> (a -> " . pty +> . str " b) -> " . pty . str " b\n" +> . str "happyThen1 = happyThen\n" > . str "happyReturn1 :: " . pcont . str " => a -> " . pty . str " a\n" > . str "happyReturn1 = happyReturn\n" -> . str "happyError' :: " . str monad_context . str " => " -> . token . str " -> " +> . str "happyError' :: " . str monad_context . str " => (" +> . token . str ", [String]) -> " > . str monad_tycon > . str " a\n" > . str "happyError' tk = " > . str (if use_monad then "" else "HappyIdentity ") > . errorHandler . str " tk\n" -> . str "\n" An error handler specified with %error is passed the current token when used with %lexer, but happyError (the old way but kept for -compatibility) is not passed the current token. +compatibility) is not passed the current token. Also, the %errorhandlertype +directive determins the API of the provided function. > errorHandler = > case error_handler' of -> Just h -> str h +> Just h -> case error_sig' of +> ErrorHandlerTypeExpList -> str h +> ErrorHandlerTypeDefault -> str "(\\(tokens, _) -> " . str h . str " tokens)" > Nothing -> case lexer' of -> Nothing -> str "happyError" -> Just _ -> str "(\\token -> happyError)" +> Nothing -> str "(\\(tokens, _) -> happyError tokens)" +> Just _ -> str "(\\(tokens, explist) -> happyError)" > reduceArrElem n > = str "\t(" . shows n . str " , " @@ -775,13 +927,14 @@ > = interleave "\n\n" (map produceEntry (zip starts' [0..])) > . if null attributes' then id else produceAttrEntries starts' +> produceEntry :: ((String, t0, Int, t1), Int) -> String -> String > produceEntry ((name, _start_nonterm, accept_nonterm, _partial), no) > = (if null attributes' then str name else str "do_" . str name) > . maybe_tks > . str " = " > . str unmonad > . str "happySomeParser where\n" -> . str " happySomeParser = happyThen (happyParse " +> . str " happySomeParser = happyThen (happyParse " > . case target of > TargetHaskell -> str "action_" . shows no > TargetArrayBased @@ -906,8 +1059,8 @@ > (act:_) -> act -- pick the first one we see for now > > where reduces -> = [ act | (_,act@(LR'Reduce _ _)) <- actions ] -> ++ [ act | (_,(LR'Multiple _ act@(LR'Reduce _ _))) <- actions ] +> = [ act | (_, act@(LR'Reduce _ _)) <- actions ] +> ++ [ act | (_, LR'Multiple _ act@(LR'Reduce _ _)) <- actions ] ----------------------------------------------------------------------------- -- Generate packed parsing tables. @@ -956,26 +1109,34 @@ > mkTables -> :: ActionTable -> GotoTable -> Name -> Int -> Int -> Int -> Int -> -> ([Int] -- happyActOffsets -> ,[Int] -- happyGotoOffsets -> ,[Int] -- happyTable -> ,[Int] -- happyDefAction -> ,[Int] -- happyCheck +> :: ActionTable -> GotoTable -> Name -> Int -> Int -> Int -> Int -> (Int, Int) -> +> ( [Int] -- happyActOffsets +> , [Int] -- happyGotoOffsets +> , [Int] -- happyTable +> , [Int] -- happyDefAction +> , [Int] -- happyCheck +> , [Int] -- happyExpList +> , Int -- happyMinOffset > ) > > mkTables action goto first_nonterm' fst_term > n_terminals n_nonterminals n_starts -> = ( elems act_offs, -> elems goto_offs, -> take max_off (elems table), -> def_actions, -> take max_off (elems check) -> ) +> token_names_bound +> +> = ( elems act_offs +> , elems goto_offs +> , take max_off (elems table) +> , def_actions +> , take max_off (elems check) +> , elems explist +> , min_off +> ) > where > -> (table,check,act_offs,goto_offs,max_off) -> = runST (genTables (length actions) max_token sorted_actions) +> (table,check,act_offs,goto_offs,explist,min_off,max_off) +> = runST (genTables (length actions) +> max_token token_names_bound +> sorted_actions explist_actions) > > -- the maximum token number used in the parser > max_token = max n_terminals (n_starts+n_nonterminals) - 1 @@ -994,11 +1155,18 @@ > | (state, acts) <- assocs action, > let (err:_dummy:vec) = assocs acts > vec' = drop (n_starts+n_nonterminals) vec -> acts' = filter (notFail) (err:vec') +> acts' = filter notFail (err:vec') > default_act = getDefault acts' > acts'' = mkActVals acts' default_act > ] > +> explist_actions :: [(Int, [Int])] +> explist_actions = [ (state, concatMap f $ assocs acts) +> | (state, acts) <- assocs action ] +> where +> f (t, LR'Shift _ _ ) = [t - fst token_names_bound] +> f (_, _) = [] +> > -- adjust terminals by -(fst_term+1), so they start at 1 (error is 0). > -- (see ARRAY_NOTES) > adjust token | token == errorTok = 0 @@ -1026,51 +1194,60 @@ > mkGotoVals assocs' = > [ (token - first_nonterm', i) | (token, Goto i) <- assocs' ] > -> sorted_actions = reverse (sortBy cmp_state (actions++gotos)) +> sorted_actions = sortBy (flip cmp_state) (actions ++ gotos) > cmp_state (_,_,_,width1,tally1,_) (_,_,_,width2,tally2,_) > | width1 < width2 = LT > | width1 == width2 = compare tally1 tally2 > | otherwise = GT > data ActionOrGoto = ActionEntry | GotoEntry -> type TableEntry = (ActionOrGoto, -> Int{-stateno-}, -> Int{-default-}, -> Int{-width-}, -> Int{-tally-}, -> [(Int,Int)]) +> type TableEntry = ( ActionOrGoto +> , Int {-stateno-} +> , Int {-default-} +> , Int {-width-} +> , Int {-tally-} +> , [(Int,Int)] +> ) > genTables > :: Int -- number of actions > -> Int -- maximum token no. +> -> (Int, Int) -- token names bounds > -> [TableEntry] -- entries for the table -> -> ST s (UArray Int Int, -- table -> UArray Int Int, -- check -> UArray Int Int, -- action offsets -> UArray Int Int, -- goto offsets -> Int -- highest offset in table -> ) +> -> [(Int, [Int])] -- expected tokens lists +> -> ST s ( UArray Int Int -- table +> , UArray Int Int -- check +> , UArray Int Int -- action offsets +> , UArray Int Int -- goto offsets +> , UArray Int Int -- expected tokens list +> , Int -- lowest offset in table +> , Int -- highest offset in table +> ) > -> genTables n_actions max_token entries = do +> genTables n_actions max_token token_names_bound entries explist = do > > table <- newArray (0, mAX_TABLE_SIZE) 0 > check <- newArray (0, mAX_TABLE_SIZE) (-1) > act_offs <- newArray (0, n_actions) 0 > goto_offs <- newArray (0, n_actions) 0 > off_arr <- newArray (-max_token, mAX_TABLE_SIZE) 0 +> exp_array <- newArray (0, (n_actions * n_token_names + 15) `div` 16) 0 > -> max_off <- genTables' table check act_offs goto_offs -> off_arr entries max_token +> (min_off,max_off) <- genTables' table check act_offs goto_offs off_arr exp_array entries +> explist max_token n_token_names > > table' <- freeze table > check' <- freeze check > act_offs' <- freeze act_offs > goto_offs' <- freeze goto_offs -> return (table',check',act_offs',goto_offs',max_off+1) +> exp_array' <- freeze exp_array +> return (table',check',act_offs',goto_offs',exp_array',min_off,max_off+1) > where > n_states = n_actions - 1 > mAX_TABLE_SIZE = n_states * (max_token + 1) +> (first_token, last') = token_names_bound +> n_token_names = last' - first_token + 1 > genTables' @@ -1079,20 +1256,33 @@ > -> STUArray s Int Int -- action offsets > -> STUArray s Int Int -- goto offsets > -> STUArray s Int Int -- offset array +> -> STUArray s Int Int -- expected token list > -> [TableEntry] -- entries for the table +> -> [(Int, [Int])] -- expected tokens lists > -> Int -- maximum token no. -> -> ST s Int -- highest offset in table +> -> Int -- number of token names +> -> ST s (Int,Int) -- lowest and highest offsets in table > -> genTables' table check act_offs goto_offs off_arr entries max_token -> = fit_all entries 0 1 +> genTables' table check act_offs goto_offs off_arr exp_array entries +> explist max_token n_token_names +> = fill_exp_array >> fit_all entries 0 0 1 > where > -> fit_all [] max_off _ = return max_off -> fit_all (s:ss) max_off fst_zero = do -> (off, new_max_off, new_fst_zero) <- fit s max_off fst_zero +> fit_all [] min_off max_off _ = return (min_off, max_off) +> fit_all (s:ss) min_off max_off fst_zero = do +> (off, new_min_off, new_max_off, new_fst_zero) <- fit s min_off max_off fst_zero > ss' <- same_states s ss off > writeArray off_arr off 1 -> fit_all ss' new_max_off new_fst_zero +> fit_all ss' new_min_off new_max_off new_fst_zero +> +> fill_exp_array = +> forM_ explist $ \(state, tokens) -> +> forM_ tokens $ \token -> do +> let bit_nr = state * n_token_names + token +> let word_nr = bit_nr `div` 16 +> let word_offset = bit_nr `mod` 16 +> x <- readArray exp_array word_nr +> writeArray exp_array word_nr (setBit x word_offset) > > -- try to merge identical states. We only try the next state(s) > -- in the list, but the list is kind-of sorted so we shouldn't @@ -1109,16 +1299,19 @@ > -- fit a vector into the table. Return the offset of the vector, > -- the maximum offset used in the table, and the offset of the first > -- entry in the table (used to speed up the lookups a bit). -> fit (_,_,_,_,_,[]) max_off fst_zero = return (0,max_off,fst_zero) +> fit (_,_,_,_,_,[]) min_off max_off fst_zero = return (0,min_off,max_off,fst_zero) > > fit (act_or_goto, state_no, _deflt, _, _, state@((t,_):_)) -> max_off fst_zero = do +> min_off max_off fst_zero = do > -- start at offset 1 in the table: all the empty states > -- (states with just a default reduction) are mapped to > -- offset zero. > off <- findFreeOffset (-t+fst_zero) check off_arr state -> let new_max_off | furthest_right > max_off = furthest_right +> let new_min_off | furthest_left < min_off = furthest_left +> | otherwise = min_off +> new_max_off | furthest_right > max_off = furthest_right > | otherwise = max_off +> furthest_left = off > furthest_right = off + max_token > > -- trace ("fit: state " ++ show state_no ++ ", off " ++ show off ++ ", elems " ++ show state) $ do @@ -1126,7 +1319,7 @@ > writeArray (which_off act_or_goto) state_no off > addState off table check state > new_fst_zero <- findFstFreeSlot check fst_zero -> return (off, new_max_off, new_fst_zero) +> return (off, new_min_off, new_max_off, new_fst_zero) When looking for a free offest in the table, we use the 'check' table rather than the main table. The check table starts off with (-1) in @@ -1198,9 +1391,9 @@ > mkHappyIn n = str "happyIn" . shows n > mkHappyOut n = str "happyOut" . shows n -> type_param :: Int -> Maybe String -> ShowS -> type_param n Nothing = char 't' . shows n -> type_param _ (Just ty) = brack ty +> typeParam :: Int -> Maybe String -> ShowS +> typeParam n Nothing = char 't' . shows n +> typeParam _ (Just ty) = brack ty > specReduceFun :: Int -> Bool > specReduceFun = (<= 3) @@ -1210,10 +1403,10 @@ -- for placing in a string. > hexChars :: [Int] -> String -> hexChars acts = concat (map hexChar acts) +> hexChars = concatMap hexChar > hexChar :: Int -> String -> hexChar i | i < 0 = hexChar (i + 2^16) +> hexChar i | i < 0 = hexChar (i + 65536) > hexChar i = toHex (i `mod` 256) ++ toHex (i `div` 256) > toHex :: Int -> String @@ -1222,3 +1415,15 @@ > hexDig :: Int -> Char > hexDig i | i <= 9 = chr (i + ord '0') > | otherwise = chr (i - 10 + ord 'a') + +This guards against integers that are so large as to (when converted using +'hexChar') wrap around the maximum value of 16-bit numbers and then end up +larger than an expected minimum value. + +> checkedHexChars :: Int -> [Int] -> String +> checkedHexChars minValue = concatMap hexChar' +> where hexChar' i | checkHexChar minValue i = hexChar i +> | otherwise = error "grammar does not fit in 16-bit representation that is used with '--ghc'" + +> checkHexChar :: Int -> Int -> Bool +> checkHexChar minValue i = i <= 32767 || i - 65536 < minValue diff -Nru happy-1.19.5/src/ProduceGLRCode.lhs happy-1.19.8/src/ProduceGLRCode.lhs --- happy-1.19.5/src/ProduceGLRCode.lhs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/src/ProduceGLRCode.lhs 2017-10-12 07:46:11.000000000 +0000 @@ -19,8 +19,8 @@ > import GenUtils ( str, char, nl, brack, brack', interleave, maybestr ) > import Grammar > import Data.Array -> import Data.Char ( isSpace ) -> import Data.List ( nub, (\\), sort ) +> import Data.Char ( isSpace, isAlphaNum ) +> import Data.List ( nub, (\\), sort, find, tails ) > import Data.Version ( showVersion ) %----------------------------------------------------------------------------- @@ -130,15 +130,46 @@ > where > (_,_,ghcExts_opt) = options -> mod_name = reverse $ takeWhile (`notElem` "\\/") $ reverse basename +Extract the module name from the given module declaration, if it exists. + +> m_mod_decl = find isModKW . zip [0..] . tails . (' ':) =<< header +> isModKW (_, c0:'m':'o':'d':'u':'l':'e':c1:_) = not (validIDChar c0 || validIDChar c1) +> isModKW _ = False +> validIDChar c = isAlphaNum c || c `elem` "_'" +> validModNameChar c = validIDChar c || c == '.' > data_mod = mod_name ++ "Data" +> mod_name = case m_mod_decl of +> Just (_, md) -> takeWhile validModNameChar (dropWhile (not . validModNameChar) (drop 8 md)) + +Or use a default based upon the filename (original behaviour). + +> Nothing -> reverse . takeWhile (`notElem` "\\/") $ reverse basename + +Remove the module declaration from the header so that the remainder of +the header can be used in the generated code. + +> header_sans_mod = flip (maybe header) m_mod_decl $ \ (mi, _) -> do +> hdr <- header + +Extract the string that comes before the module declaration... + +> let (before, mod_decl) = splitAt mi hdr + +> let isWhereKW (c0:'w':'h':'e':'r':'e':c1:_) = not (validIDChar c0 || validIDChar c1) +> isWhereKW _ = False +> let where_after = dropWhile (not . isWhereKW) . tails . (++ "\n") $ mod_decl +> let after = drop 6 . concat . take 1 $ where_after + +...and combine it with the string that comes after the 'where' keyword. + +> return $ before ++ "\n" ++ after > (sem_def, sem_info) = mkGSemType options g > table_text = mkTbls tables sem_info (ghcExts_opt) g > header_parts = fmap (span (\x -> take 3 (dropWhile isSpace x) == "{-#") > . lines) -> header +> header_sans_mod > -- Split off initial options, if they are present > -- Assume these options ONLY related to code which is in > -- parser tail or in sem. rules @@ -157,7 +188,7 @@ > . nl > . let count_nls = length . filter (=='\n') -> pre_trailer = maybe 0 count_nls header -- check fmt below +> pre_trailer = maybe 0 count_nls header_sans_mod -- check fmt below > + count_nls base_defs > + 10 -- for the other stuff > post_trailer = pre_trailer + maybe 0 count_nls trailer + 4 @@ -197,8 +228,8 @@ > , "module " ++ mod_name ++ "(" > , case lexer g of > Nothing -> "" -> Just (lf,_) -> "\t" ++ lf ++ "," -> , "\t" ++ start +> Just (lf,_) -> " " ++ lf ++ "," +> , " " ++ start > , "" > , unlines pre > , imps @@ -221,9 +252,9 @@ > user_def_token_code tokenType > = str "type UserDefTok = " . str tokenType . nl > . str "instance TreeDecode " . brack tokenType . str " where" . nl -> . str "\tdecode_b f (Branch (SemTok t) []) = [happy_return t]" . nl +> . str " decode_b f (Branch (SemTok t) []) = [happy_return t]" . nl > . str "instance LabelDecode " . brack tokenType . str " where" . nl -> . str "\tunpack (SemTok t) = t" . nl +> . str " unpack (SemTok t) = t" . nl %----------------------------------------------------------------------------- @@ -384,8 +415,8 @@ plus, issues about how token info gets into TreeDecode sem values - which might be tricky to arrange. <> eq_inst = "instance Eq GSymbol where" -<> : "\tHappyTok i _ == HappyTok j _ = i == j" -<> : [ "\ti == j = fromEnum i == fromEnum j" +<> : " HappyTok i _ == HappyTok j _ = i == j" +<> : [ " i == j = fromEnum i == fromEnum j" @@ -421,7 +452,7 @@ > . interleave "\n" [ str " | " . str sym . str " " > | sym <- map fst syms ] > . str "instance Show GSem where" . nl -> . interleave "\n" [ str "\tshow " . str c . str "{} = " . str (show c) +> . interleave "\n" [ str " show " . str c . str "{} = " . str (show c) > | (_,c,_,_) <- map snd syms ] > syms = [ (c_name ++ " (" ++ ty ++ ")", (rty, c_name, mask, prod_info)) @@ -536,7 +567,7 @@ > . str (nodes filter_opt) > | (_ty, c_name, mask, prod_info) <- sem_info > , (ij, (pats,code), _) <- prod_info -> , let indent c = init $ unlines $ map (replicate 2 '\t'++) $ lines c +> , let indent c = init $ unlines $ map (replicate 4 ' '++) $ lines c > , let mcode = case monad_info of > Nothing -> code > Just (_,_,rtn) -> case code of @@ -602,7 +633,7 @@ > mk_inst (ty, cs_vs) > = str ("instance TreeDecode (" ++ ty ++ ") where ") . nl > . interleave "\n" -> [ char '\t' +> [ str " " > . str ("decode_b f (Branch (" ++ c_name ++ " s)") > . str (" (" ++ var_pat ++ ")) = ") > . cross_prod monad_info "s" (nodes filter_opt) @@ -647,7 +678,7 @@ > mk_inst (ty, cns) > = ("instance LabelDecode (" ++ ty ++ ") where ") -> : [ "\tunpack (" ++ c_name ++ " s) = s" +> : [ " unpack (" ++ c_name ++ " s) = s" > | (c_name, _mask) <- cns ] @@ -698,5 +729,3 @@ > mkHappyVar :: Int -> String -> String > mkHappyVar n = str "happy_var_" . shows n - - diff -Nru happy-1.19.5/templates/GenericTemplate.hs happy-1.19.8/templates/GenericTemplate.hs --- happy-1.19.5/templates/GenericTemplate.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/templates/GenericTemplate.hs 2017-10-12 07:46:11.000000000 +0000 @@ -2,9 +2,6 @@ #ifdef HAPPY_GHC #undef __GLASGOW_HASKELL__ -#define HAPPY_IF_GHC_GT_500 #if __GLASGOW_HASKELL__ > 500 -#define HAPPY_IF_GHC_GE_503 #if __GLASGOW_HASKELL__ >= 503 -#define HAPPY_ELIF_GHC_500 #elif __GLASGOW_HASKELL__ == 500 #define HAPPY_IF_GHC_GT_706 #if __GLASGOW_HASKELL__ > 706 #define HAPPY_ELSE #else #define HAPPY_ENDIF #endif @@ -44,8 +41,9 @@ #define IF_GHC(x) #endif -#if defined(HAPPY_ARRAY) data Happy_IntList = HappyCons FAST_INT Happy_IntList + +#if defined(HAPPY_ARRAY) #define CONS(h,t) (HappyCons (h) (t)) #else #define CONS(h,t) ((h):(t)) @@ -114,7 +112,7 @@ ",\taction: ") case action of ILIT(0) -> DEBUG_TRACE("fail.\n") - happyFail i tk st + happyFail (happyExpListPerState (IBOX(st) :: Int)) i tk st ILIT(-1) -> DEBUG_TRACE("accept.\n") happyAccept i tk st n | LT(n,(ILIT(0) :: FAST_INT)) -> DEBUG_TRACE("reduce (rule " ++ show rule @@ -126,8 +124,8 @@ ++ "\n") happyShift new_state i tk st where new_state = MINUS(n,(ILIT(1) :: FAST_INT)) - where off = indexShortOffAddr happyActOffsets st - off_i = PLUS(off,i) + where off = happyAdjustOffset (indexShortOffAddr happyActOffsets st) + off_i = PLUS(off, i) check = if GTE(off_i,(ILIT(0) :: FAST_INT)) then EQ(indexShortOffAddr happyCheck off_i, i) else False @@ -135,6 +133,8 @@ | check = indexShortOffAddr happyTable off_i | otherwise = indexShortOffAddr happyDefActions st +#endif /* HAPPY_ARRAY */ + #ifdef HAPPY_GHC indexShortOffAddr (HappyA# arr) off = Happy_GHC_Exts.narrow16Int# i @@ -147,12 +147,22 @@ indexShortOffAddr arr off = arr Happy_Data_Array.! off #endif +{-# INLINE happyLt #-} +happyLt x y = LT(x,y) + +#ifdef HAPPY_GHC +readArrayBit arr bit = + Bits.testBit IBOX(indexShortOffAddr arr ((unbox_int bit) `Happy_GHC_Exts.iShiftRA#` 4#)) (bit `mod` 16) + where unbox_int (Happy_GHC_Exts.I# x) = x +#else +readArrayBit arr bit = + Bits.testBit IBOX(indexShortOffAddr arr (bit `div` 16)) (bit `mod` 16) +#endif + #ifdef HAPPY_GHC data HappyAddr = HappyA# Happy_GHC_Exts.Addr# #endif -#endif /* HAPPY_ARRAY */ - ----------------------------------------------------------------------------- -- HappyState data type (not arrays) @@ -182,30 +192,30 @@ -- happyReduce is specialised for the common cases. happySpecReduce_0 i fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happySpecReduce_0 nt fn j tk st@(HAPPYSTATE(action)) sts stk = GOTO(action) nt j tk st CONS(st,sts) (fn `HappyStk` stk) happySpecReduce_1 i fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happySpecReduce_1 nt fn j tk _ sts@(CONS(st@HAPPYSTATE(action),_)) (v1`HappyStk`stk') = let r = fn v1 in happySeq r (GOTO(action) nt j tk st sts (r `HappyStk` stk')) happySpecReduce_2 i fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happySpecReduce_2 nt fn j tk _ CONS(_,sts@(CONS(st@HAPPYSTATE(action),_))) (v1`HappyStk`v2`HappyStk`stk') = let r = fn v1 v2 in happySeq r (GOTO(action) nt j tk st sts (r `HappyStk` stk')) happySpecReduce_3 i fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happySpecReduce_3 nt fn j tk _ CONS(_,CONS(_,sts@(CONS(st@HAPPYSTATE(action),_)))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk') = let r = fn v1 v2 v3 in happySeq r (GOTO(action) nt j tk st sts (r `HappyStk` stk')) happyReduce k i fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happyReduce k nt fn j tk st sts stk = case happyDrop MINUS(k,(ILIT(1) :: FAST_INT)) sts of sts1@(CONS(st1@HAPPYSTATE(action),_)) -> @@ -213,7 +223,7 @@ happyDoSeq r (GOTO(action) nt j tk st1 sts1 r) happyMonadReduce k nt fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happyMonadReduce k nt fn j tk st sts stk = case happyDrop k CONS(st,sts) of sts1@(CONS(st1@HAPPYSTATE(action),_)) -> @@ -221,16 +231,17 @@ happyThen1 (fn stk tk) (\r -> GOTO(action) nt j tk st1 sts1 (r `HappyStk` drop_stk)) happyMonad2Reduce k nt fn ERROR_TOK tk st sts stk - = happyFail ERROR_TOK tk st sts stk + = happyFail [] ERROR_TOK tk st sts stk happyMonad2Reduce k nt fn j tk st sts stk = case happyDrop k CONS(st,sts) of sts1@(CONS(st1@HAPPYSTATE(action),_)) -> let drop_stk = happyDropStk k stk #if defined(HAPPY_ARRAY) - off = indexShortOffAddr happyGotoOffsets st1 - off_i = PLUS(off,nt) + off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st1) + off_i = PLUS(off, nt) new_state = indexShortOffAddr happyTable off_i #else + _ = nt :: FAST_INT new_state = action #endif in @@ -249,8 +260,8 @@ happyGoto nt j tk st = DEBUG_TRACE(", goto state " ++ show IBOX(new_state) ++ "\n") happyDoAction j tk new_state - where off = indexShortOffAddr happyGotoOffsets st - off_i = PLUS(off,nt) + where off = happyAdjustOffset (indexShortOffAddr happyGotoOffsets st) + off_i = PLUS(off, nt) new_state = indexShortOffAddr happyTable off_i #else happyGoto action j tk st = action j j tk (HappyState action) @@ -260,10 +271,10 @@ -- Error recovery (ERROR_TOK is the error token) -- parse error if we are in recovery and we fail again -happyFail ERROR_TOK tk old_st _ stk@(x `HappyStk` _) = +happyFail explist ERROR_TOK tk old_st _ stk@(x `HappyStk` _) = let i = GET_ERROR_TOKEN(x) in -- trace "failing" $ - happyError_ i tk + happyError_ explist i tk {- We don't need state discarding for our restricted implementation of "error". In fact, it can cause some bogus parses, so I've disabled it @@ -278,7 +289,7 @@ -- Enter error recovery: generate an error token, -- save the old token and carry on. -happyFail i tk HAPPYSTATE(action) sts stk = +happyFail explist i tk HAPPYSTATE(action) sts stk = -- trace "entering error recovery" $ DO_ACTION(action,ERROR_TOK,tk,sts, MK_ERROR_TOKEN(i) `HappyStk` stk) diff -Nru happy-1.19.5/templates/GLR_Base.hs happy-1.19.8/templates/GLR_Base.hs --- happy-1.19.5/templates/GLR_Base.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/templates/GLR_Base.hs 2017-10-12 07:46:11.000000000 +0000 @@ -32,7 +32,7 @@ deriving Show instance Eq Branch where - b1 == b2 = b_nodes b1 == b_nodes b2 + b1 == b2 = b_nodes b1 == b_nodes b2 @@ -47,7 +47,7 @@ -- - "Decode_Result" is a synonym used to insert the monad type constr (or not) class TreeDecode a where - decode_b :: (ForestId -> [Branch]) -> Branch -> [Decode_Result a] + decode_b :: (ForestId -> [Branch]) -> Branch -> [Decode_Result a] decode :: TreeDecode a => (ForestId -> [Branch]) -> ForestId -> [Decode_Result a] decode f i@(_,_,HappyTok t) @@ -57,7 +57,7 @@ ---- generated by Happy, since it means expansion of synonym (not ok in H-98) --instance TreeDecode UserDefTok where --- decode_b f (Branch (SemTok t) []) = [happy_return t] +-- decode_b f (Branch (SemTok t) []) = [happy_return t] --- -- this is used to multiply the ambiguous possibilities from children @@ -75,10 +75,10 @@ -- - see documentation for further information class LabelDecode a where - unpack :: GSem -> a + unpack :: GSem -> a ---- generated by Happy, since it means expansion of synonym (not ok in H-98) --instance LabelDecode UserDefTok where --- unpack (SemTok t) = t +-- unpack (SemTok t) = t diff -Nru happy-1.19.5/templates/GLR_Lib.hs happy-1.19.8/templates/GLR_Lib.hs --- happy-1.19.5/templates/GLR_Lib.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/templates/GLR_Lib.hs 2017-10-12 07:46:11.000000000 +0000 @@ -19,15 +19,15 @@ {- supplied by Happy <> module XYZ ( -<> lexer -- conditional +<> lexer -- conditional -} - -- probable, but might want to parametrise + -- probable, but might want to parametrise , doParse - , TreeDecode(..), decode -- only for tree decode - , LabelDecode(..) -- only for label decode + , TreeDecode(..), decode -- only for tree decode + , LabelDecode(..) -- only for label decode - -- standard exports + -- standard exports , Tokens , GLRResult(..) , NodeMap @@ -42,7 +42,8 @@ import Data.Char import qualified Data.Map as Map -import Control.Monad (foldM) +import Control.Applicative (Applicative(..)) +import Control.Monad (foldM, ap) import Data.Maybe (fromJust) import Data.List (insertBy, nub, maximumBy, partition, find, groupBy, delete) #if defined(HAPPY_GHC) @@ -51,8 +52,9 @@ #endif #if defined(HAPPY_DEBUG) +import System.IO import System.IO.Unsafe -import Pretty +import Text.PrettyPrint #endif {- these inserted by Happy -} @@ -63,18 +65,30 @@ #ifdef HAPPY_GHC #define ILIT(n) n# +#define BANG ! #define IBOX(n) (I# (n)) #define FAST_INT Int# + +#if __GLASGOW_HASKELL__ >= 708 +#define ULT(n,m) (isTrue# (n <# m)) +#define GTE(n,m) (isTrue# (n >=# m)) +#define UEQ(n,m) (isTrue# (n ==# m)) +#else #define ULT(n,m) (n <# m) #define GTE(n,m) (n >=# m) #define UEQ(n,m) (n ==# m) +#endif + #define PLUS(n,m) (n +# m) #define MINUS(n,m) (n -# m) #define TIMES(n,m) (n *# m) #define NEGATE(n) (negateInt# (n)) #define IF_GHC(x) (x) + #else + #define ILIT(n) (n) +#define BANG #define IBOX(n) (n) #define FAST_INT Int #define ULT(n,m) (n < m) @@ -118,7 +132,7 @@ type NodeMap = [(ForestId, [Branch])] type RootNode = ForestId -type Tokens = [[(Int, GSymbol)]] -- list of ambiguous lexemes +type Tokens = [[(Int, GSymbol)]] -- list of ambiguous lexemes data GLRResult = ParseOK RootNode Forest -- forest with root @@ -131,15 +145,15 @@ forestResult :: Int -> Forest -> GLRResult forestResult length f = case roots of - [] -> ParseEOF f - [r] -> ParseOK r f - rs@(_:_) -> error $ "multiple roots in forest, = " ++ show rs - ++ unlines (map show ns_map) + [] -> ParseEOF f + [r] -> ParseOK r f + rs@(_:_) -> error $ "multiple roots in forest, = " ++ show rs + ++ unlines (map show ns_map) where ns_map = Map.toList f roots = [ r | (r@(0,sz,sym),_) <- ns_map - , sz == length - , sym == top_symbol ] + , sz == length + , sym == top_symbol ] ---------------------------------------------------------------------------- @@ -148,12 +162,12 @@ glr_parse toks = case runST Map.empty [0..] (tp toks) of (f,Left ts) -> ParseError ts f - -- Error within sentence + -- Error within sentence (f,Right ss) -> forestResult (length toks) f - -- Either good parse or EOF + -- Either good parse or EOF where - tp tss = doActions [initTS 0] - $ zipWith (\i ts -> [(i, t) | t <- ts]) [0..] + tp tss = doActions [initTS 0] + $ zipWith (\i ts -> [(i, t) | t <- ts]) [0..] $ [ [ HappyTok {-j-} t | (j,t) <- zip [0..] ts ] | ts <- tss ] ++ [[HappyEOF]] @@ -168,23 +182,23 @@ doActions :: [FStack] -> Tokens -> PM (Either Tokens [FStack]) -doActions ss [] -- no more tokens (this is ok) - = return (Right ss) -- return the stacks (may be empty) +doActions ss [] -- no more tokens (this is ok) + = return (Right ss) -- return the stacks (may be empty) doActions stks (tok:toks) = do - stkss <- sequence [ do + stkss <- sequence [ do stks' <- reduceAll [] tok_form stks shiftAll tok_form stks' | tok_form <- tok ] - let new_stks = merge $ concat stkss - DEBUG_TRACE(unlines $ ("Stacks after R*/S pass" ++ show tok) - : map show new_stks) - case new_stks of -- did this token kill stacks? - [] -> case toks of - [] -> return $ Right [] -- ok if no more tokens - _:_ -> return $ Left (tok:toks) -- not ok if some input left - _ -> doActions new_stks toks + let new_stks = merge $ concat stkss + DEBUG_TRACE(unlines $ ("Stacks after R*/S pass" ++ show tok) + : map show new_stks) + case new_stks of -- did this token kill stacks? + [] -> case toks of + [] -> return $ Right [] -- ok if no more tokens + _:_ -> return $ Left (tok:toks) -- not ok if some input left + _ -> doActions new_stks toks reduceAll :: [GSymbol] -> (Int, GSymbol) -> [FStack] -> PM [(FStack, Int)] @@ -200,28 +214,28 @@ this_state = top stk redAll rs = do - let reds = [ (bf fids,stk',m) - | (m,n,bf) <- rs - , not (n == 0 && m `elem` cyclic_names) -- remove done ones - , (fids,stk') <- pop n stk - ] - -- WARNING: incomplete if more than one Empty in a prod(!) - -- WARNING: can avoid by splitting emps/non-emps - DEBUG_TRACE(unlines $ ("Packing reds = " ++ show (length reds)) - : map show reds) - stks' <- foldM (pack i) stks reds - let new_cyclic = [ m | (m,0,_) <- rs - , UEQ(this_state, goto this_state m) - , m `notElem` cyclic_names ] - reduceAll (cyclic_names ++ new_cyclic) itok $ merge stks' + let reds = [ (bf fids,stk',m) + | (m,n,bf) <- rs + , not (n == 0 && m `elem` cyclic_names) -- remove done ones + , (fids,stk') <- pop n stk + ] + -- WARNING: incomplete if more than one Empty in a prod(!) + -- WARNING: can avoid by splitting emps/non-emps + DEBUG_TRACE(unlines $ ("Packing reds = " ++ show (length reds)) + : map show reds) + stks' <- foldM (pack i) stks reds + let new_cyclic = [ m | (m,0,_) <- rs + , UEQ(this_state, goto this_state m) + , m `notElem` cyclic_names ] + reduceAll (cyclic_names ++ new_cyclic) itok $ merge stks' shiftAll :: (Int, GSymbol) -> [(FStack, Int)] -> PM [FStack] shiftAll tok [] = return [] shiftAll (j,tok) stks = do - let end = j + 1 - let key = end `seq` (j,end,tok) - newNode key + let end = j + 1 + let key = end `seq` (j,end,tok) + newNode key let mss = [ (stk, st) | ss@((_,st):_) <- groupBy (\a b -> snd a == snd b) stks , stk <- merge $ map fst ss ] @@ -241,28 +255,28 @@ let s_i = endpoint stk let key = (s_i,e_i,m) DEBUG_TRACE( unlines - $ ("Pack at " ++ show key ++ " " ++ show fids) - : ("**" ++ show stk) - : map show stks) + $ ("Pack at " ++ show key ++ " " ++ show fids) + : ("**" ++ show stk) + : map show stks) duplicate <- addBranch key fids let stack_matches = [ s | s <- stks - , UEQ(top s, st) + , UEQ(top s, st) , let (k,s') = case ts_tail s of x:_ -> x - , stk == s' - , k == key - ] -- look for first obvious packing site + , stk == s' + , k == key + ] -- look for first obvious packing site let appears_in = not $ null stack_matches DEBUG_TRACE( unlines - $ ("Stack Matches: " ++ show (length stack_matches)) - : map show stack_matches) + $ ("Stack Matches: " ++ show (length stack_matches)) + : map show stack_matches) DEBUG_TRACE( if not (duplicate && appears_in) then "" else - unlines - $ ("DROP:" ++ show (IBOX(st),key) ++ " -- " ++ show stk) - : "*****" - : map show stks) + unlines + $ ("DROP:" ++ show (IBOX(st),key) ++ " -- " ++ show stk) + : "*****" + : map show stks) if duplicate && appears_in then return stks -- because already there @@ -270,10 +284,10 @@ nid <- getID case stack_matches of [] -> return $ insertStack (push key st nid stk) stks - -- No prior stacks + -- No prior stacks s:_ -> return $ insertStack (push key st nid stk) (delete s stks) - -- pack into an existing stack + -- pack into an existing stack where st = goto (top stk) m @@ -295,7 +309,7 @@ addBranch :: ForestId -> Branch -> PM Bool addBranch i b = do - f <- useS id + f <- useS id case Map.lookup i f of Nothing -> chgS $ \f -> (False, Map.insert i [b] f) Just bs | b `elem` bs -> return True @@ -308,7 +322,7 @@ getBranches i = useS $ \s -> Map.findWithDefault no_such_node i s where - no_such_node = error $ "No such node in Forest: " ++ show i + no_such_node = error $ "No such node in Forest: " ++ show i @@ -326,10 +340,10 @@ data TStack a - = TS { top :: FAST_INT -- state - , ts_id :: FAST_INT -- ID - , stoup :: !(Maybe a) -- temp holding place, for left rec. - , ts_tail :: ![(a,TStack a)] -- [(element on arc , child)] + = TS { top :: FAST_INT -- state + , ts_id :: FAST_INT -- ID + , stoup :: !(Maybe a) -- temp holding place, for left rec. + , ts_tail :: ![(a,TStack a)] -- [(element on arc , child)] } instance Show a => Show (TStack a) where @@ -338,9 +352,9 @@ #if defined(HAPPY_DEBUG) ++ "\n" ++ render (spp $ ts_tail ts) where - spp ss = nest 2 - $ vcat [ vcat [text (show (v,IBOX(top s))), spp (ts_tail s)] - | (v,s) <- ss ] + spp ss = nest 2 + $ vcat [ vcat [text (show (v,IBOX(top s))), spp (ts_tail s)] + | (v,s) <- ss ] #endif @@ -371,7 +385,7 @@ push x@(s_i,e_i,m) st IBOX(id) stk = TS st id stoup [(x,stk)] where - -- only fill stoup for cyclic states that don't consume input + -- only fill stoup for cyclic states that don't consume input stoup | s_i == e_i && UEQ(st, goto st m) = Just x | otherwise = Nothing @@ -382,8 +396,8 @@ pop 1 st@TS{stoup=Just x} = pop 1 st{stoup=Nothing} ++ [ ([x],st) ] pop n ts = [ (xs ++ [x] , stk') - | (x,stk) <- ts_tail ts - , (xs,stk') <- pop (n-1) stk ] + | (x,stk) <- ts_tail ts + , (xs,stk') <- pop (n-1) stk ] --- @@ -407,15 +421,15 @@ | IBOX(st) <- nub (map (\s -> IBOX(top s)) stks) , let ch = concat [ x | TS st2 _ _ x <- stks, UEQ(st,st2) ] ss = mkss [ s | TS st2 _ s _ <- stks, UEQ(st,st2) ] - IBOX(id) = head [ IBOX(i) | TS st2 i _ _ <- stks, UEQ(st,st2) ] - -- reuse of id is ok, since merge discards old stacks + (BANG IBOX(id)) = head [ IBOX(i) | TS st2 i _ _ <- stks, UEQ(st,st2) ] + -- reuse of id is ok, since merge discards old stacks ] where mkss s = case nub [ x | Just x <- s ] of [] -> Nothing [x] -> Just x xs -> error $ unlines $ ("Stoup merge: " ++ show xs) - : map show stks + : map show stks @@ -430,16 +444,20 @@ fmap f (MkST sf) = MkST $ \s i -> case sf s i of (a,s',i') -> (f a,s',i') +instance Applicative (ST s i) where + pure a = MkST $ \s i -> (a,s,i) + (<*>) = ap + instance Monad (ST s i) where - return a = MkST $ \s i -> (a,s,i) + return = pure MkST sf >>= k = MkST $ \s i -> - case sf s i of - (a,s',i') -> let (MkST sf') = k a in sf' s' i' + case sf s i of + (a,s',i') -> let (MkST sf') = k a in sf' s' i' runST :: s -> i -> ST s i a -> (s,a) runST s i (MkST sf) = case sf s i of - (a,s,_) -> (s,a) + (a,s,_) -> (s,a) chgS :: (s -> (a,s)) -> ST s i a chgS sf = MkST $ \s i -> let (a,s') = sf s in (a,s',i) diff -Nru happy-1.19.5/test.hs happy-1.19.8/test.hs --- happy-1.19.5/test.hs 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/test.hs 2017-10-12 07:46:11.000000000 +0000 @@ -1,4 +1,4 @@ -import System.Cmd (system) +import System.Process (system) import System.Exit (exitWith) main = system "make -k -C tests clean all" >>= exitWith diff -Nru happy-1.19.5/tests/issue91.y happy-1.19.8/tests/issue91.y --- happy-1.19.5/tests/issue91.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/issue91.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,35 @@ +-- See for more information +%name parse prod + +%tokentype { Tok } + +%monad { P } { bindP } { returnP } +%error { error "parse error" } +%lexer { lexer } { EOF } + +%token + IDENT { Identifier $$ } + +%% + +prod :: { () } + : IDENT { () } + +{ + +data Tok = EOF | Identifier String + +type P a = String -> (a, String) + +bindP :: P a -> (a -> P b) -> P b +bindP p f s = let (x,s') = p s in f x s' + +returnP :: a -> P a +returnP = (,) + +lexer :: (Tok -> P a) -> P a +lexer cont s = cont (case s of { "" -> EOF; _ -> Identifier s }) "" + +main = pure () + +} diff -Nru happy-1.19.5/tests/issue93.y happy-1.19.8/tests/issue93.y --- happy-1.19.5/tests/issue93.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/issue93.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,1246 @@ +-- See for more information +-- This is an example of a grammar that has more than 2^15 entries in `happyTable` (39817). +{ +import System.Exit +import Data.Char +} + +%name parseLit lit +%name parseAttr export_attribute +%name parseTy export_ty +%name parsePat pat +%name parseStmt stmt +%name parseExpr expr +%name parseItem mod_item +%name parseSourceFileContents source_file +%name parseBlock export_block +%name parseImplItem impl_item +%name parseTraitItem trait_item +%name parseTt token_tree +%name parseTokenStream token_stream +%name parseTyParam ty_param +%name parseLifetimeDef lifetime_def +%name parseWhereClause where_clause +%name parseGenerics generics + +%tokentype { Token } +%lexer { lexNonSpace `bindP` } { Eof } +%monad { P } { bindP } { returnP } + +%error { parseError } + +%expect 0 + +%token + + + '=' { Equal } + '<' { Less } + '>' { Greater } + '!' { Exclamation } + '~' { Tilde } + + '+' { Plus } + '-' { Minus } + '*' { Star } + '/' { Slash } + '%' { Percent } + '^' { Caret } + '&' { Ampersand } + '|' { Pipe } + + + '@' { At } + '...' { DotDotDot } + '..' { DotDot } + '.' { Dot } + ',' { Comma } + ';' { Semicolon } + '::' { ModSep } + ':' { Colon } + '->' { RArrow } + '<-' { LArrow } + '=>' { FatArrow } + '#' { Pound } + '$' { Dollar } + '?' { Question } + '#!' { Shebang } + + '||' { PipePipe } + '&&' { AmpersandAmpersand } + '>=' { GreaterEqual } + '>>=' { GreaterGreaterEqual } + '<<' { LessLess } + '>>' { GreaterGreater } + + '==' { EqualEqual } + '!=' { NotEqual } + '<=' { LessEqual } + '<<=' { LessLessEqual } + '-=' { MinusEqual } + '&=' { AmpersandEqual } + '|=' { PipeEqual } + '+=' { PlusEqual } + '*=' { StarEqual } + '/=' { SlashEqual } + '^=' { CaretEqual } + '%=' { PercentEqual } + + '(' { OpenParen } + '[' { OpenBracket } + '{' { OpenBrace } + ')' { CloseParen } + ']' { CloseBracket } + '}' { CloseBrace } + + + byte { ByteTok{} } + char { CharTok{} } + int { IntegerTok{} } + float { FloatTok{} } + str { StrTok{} } + byteStr { ByteStrTok{} } + rawStr { StrRawTok{} } + rawByteStr { ByteStrRawTok{} } + + + as { IdentTok "as" } + box { IdentTok "box" } + break { IdentTok "break" } + const { IdentTok "const" } + continue { IdentTok "continue" } + crate { IdentTok "crate" } + else { IdentTok "else" } + enum { IdentTok "enum" } + extern { IdentTok "extern" } + false { IdentTok "false" } + fn { IdentTok "fn" } + for { IdentTok "for" } + if { IdentTok "if" } + impl { IdentTok "impl" } + in { IdentTok "in" } + let { IdentTok "let" } + loop { IdentTok "loop" } + match { IdentTok "match" } + mod { IdentTok "mod" } + move { IdentTok "move" } + mut { IdentTok "mut" } + pub { IdentTok "pub" } + ref { IdentTok "ref" } + return { IdentTok "return" } + Self { IdentTok "Self" } + self { IdentTok "self" } + static { IdentTok "static" } + struct { IdentTok "struct" } + super { IdentTok "super" } + trait { IdentTok "trait" } + true { IdentTok "true" } + type { IdentTok "type" } + unsafe { IdentTok "unsafe" } + use { IdentTok "use" } + where { IdentTok "where" } + while { IdentTok "while" } + do { IdentTok "do" } + + abstract { IdentTok "abstract" } + alignof { IdentTok "alignof" } + become { IdentTok "become" } + final { IdentTok "final" } + macro { IdentTok "macro" } + offsetof { IdentTok "offsetof" } + override { IdentTok "override" } + priv { IdentTok "priv" } + proc { IdentTok "proc" } + pure { IdentTok "pure" } + sizeof { IdentTok "sizeof" } + typeof { IdentTok "typeof" } + unsized { IdentTok "unsized" } + virtual { IdentTok "virtual" } + yield { IdentTok "yield" } + + + default { IdentTok "default" } + union { IdentTok "union" } + catch { IdentTok "catch" } + + + outerDoc { OuterDoc } + innerDoc { InnerDoc } + + + IDENT { IdentTok{} } + '_' { Underscore } + + + LIFETIME { LifetimeTok _ } + + + ntItem { Interpolated 0 } + ntBlock { Interpolated 1 } + ntStmt { Interpolated 2 } + ntPat { Interpolated 3 } + ntExpr { Interpolated 4 } + ntTy { Interpolated 5 } + ntIdent { Interpolated 6 } + ntPath { Interpolated 7 } + ntTT { Interpolated 8 } + ntArm { Interpolated 9 } + ntImplItem { Interpolated 10 } + ntTraitItem { Interpolated 11 } + ntGenerics { Interpolated 12 } + ntWhereClause { Interpolated 13 } + ntArg { Interpolated 14 } + ntLit { Interpolated 15 } + +%nonassoc SEG +%nonassoc mut DEF EQ '::' +%nonassoc IDENT ntIdent default union catch self +%nonassoc box return break continue IMPLTRAIT LAMBDA +%right '=' '>>=' '<<=' '-=' '+=' '*=' '/=' '^=' '|=' '&=' '%=' +%right '<-' +%nonassoc SINGLERNG +%nonassoc INFIXRNG +%nonassoc POSTFIXRNG +%nonassoc PREFIXRNG +%nonassoc '..' '...' +%left '||' +%left '&&' +%left '==' '!=' '<' '>' '<=' '>=' +%left '|' +%left '^' +%left '&' +%left '<<' '>>' +%left '+' '-' +%left '*' '/' '%' +%nonassoc ':' as +%nonassoc UNARY +%nonassoc FIELD VIS PATH WHERE NOSEMI +%nonassoc '?' '.' +%nonassoc '{' ntBlock '[' '(' '!' ';' + +%% + +ident :: { Int } + : ntIdent { 0 } + | union { 1 } + | default { 2 } + | catch { 3 } + | IDENT { 4 } + +gt :: { Int } + : {- empty -} { 5 } + +some(p) :: { Int } + : some(p) p { 6 } + | p { 7 } + +many(p) :: { Int } + : some(p) { 8 } + | {- empty -} { 9 } + +sep_by1(p,sep) :: { Int } + : sep_by1(p,sep) sep p { 10 } + | p { 11 } + +sep_by(p,sep) :: { Int } + : sep_by1(p,sep) { 12 } + | {- empty -} { 13 } + +sep_by1T(p,sep) :: { Int } + : sep_by1(p,sep) sep { 14 } + | sep_by1(p,sep) { 15 } + +sep_byT(p,sep) :: { Int } + : sep_by1T(p,sep) { 16 } + | {- empty -} { 17 } + +source_file :: { Int } + : inner_attrs many(mod_item) { 18 } + | many(mod_item) { 19 } + +outer_attribute :: { Int } + : '#' '[' mod_path token_stream ']' { 20 } + | outerDoc { 21 } + +inner_attribute :: { Int } + : '#' '!' '[' mod_path token_stream ']' { 22 } + | '#!' '[' mod_path token_stream ']' { 23 } + | innerDoc { 24 } + +inner_attrs :: { Int } + : inner_attrs inner_attribute { 25 } + | inner_attribute { 26 } + +lit :: { Int } + : ntLit { 27 } + | byte { 28 } + | char { 29 } + | int { 30 } + | float { 31 } + | true { 32 } + | false { 33 } + | string { 34 } + +string :: { Int } + : str { 35 } + | rawStr { 36 } + | byteStr { 37 } + | rawByteStr { 38 } + +qual_path(segs) :: { Int } + : '<' qual_path_suf(segs) { 39 } + | lt_ty_qual_path as ty_path '>' '::' segs { 40 } + +qual_path_suf(segs) :: { Int } + : ty '>' '::' segs { 41 } + | ty as ty_path '>' '::' segs { 42 } + +lt_ty_qual_path :: { Int } + : '<<' qual_path_suf(path_segments_without_colons) { 43 } + +generic_values :: { Int } + : '<' sep_by1(lifetime,',') ',' sep_by1T(ty,',') gt '>' { 45 } + | '<' sep_by1(lifetime,',') ',' sep_by1T(binding,',') gt '>' { 46 } + | '<' sep_by1T(lifetime,',') gt '>' { 47 } + | '<' sep_by1(ty,',') ',' sep_by1T(binding,',') gt '>' { 48 } + | '<' sep_by1T(ty,',') gt '>' { 49 } + | '<' sep_by1T(binding,',') gt '>' { 50 } + | '<' gt '>' { 51 } + | lt_ty_qual_path ',' sep_by1T(ty,',') gt '>' { 53 } + | lt_ty_qual_path ',' sep_by1T(binding,',') gt '>' { 54 } + | lt_ty_qual_path gt '>' { 55 } + +binding :: { Int } + : ident '=' ty { 56 } + +ty_path :: { Int } + : ntPath { 57 } + | path_segments_without_colons { 58 } + | '::' path_segments_without_colons { 59 } + +ty_qual_path :: { Int } + : qual_path(path_segments_without_colons) { 60 } + +path_segments_without_colons :: { Int } + : sep_by1(path_segment_without_colons, '::') %prec SEG { 61 } + +path_segment_without_colons :: { Int } + : self_or_ident path_parameter1 { 62 } + +path_parameter1 :: { Int } + : generic_values { 63 } + | '(' sep_byT(ty,',') ')' { 64 } + | '(' sep_byT(ty,',') ')' '->' ty_no_plus { 65 } + | {- empty -} %prec IDENT { 66 } + +expr_path :: { Int } + : ntPath { 67 } + | path_segments_with_colons { 68 } + | '::' path_segments_with_colons { 69 } + +expr_qual_path :: { Int } + : qual_path(path_segments_with_colons) { 70 } + +path_segments_with_colons :: { Int } + : self_or_ident { 71 } + | path_segments_with_colons '::' self_or_ident { 72 } + | path_segments_with_colons '::' generic_values { 73 } + +mod_path :: { Int } + : ntPath { 74 } + | self_or_ident { 75 } + | '::' self_or_ident { 76 } + | mod_path '::' ident { 77 } + +lifetime :: { Int } + : LIFETIME { 78 } + +trait_ref :: { Int } + : ty_path { 79 } + +ty :: { Int } + : ty_no_plus { 80 } + | poly_trait_ref_mod_bound '+' sep_by1T(ty_param_bound_mod,'+') { 81 } + +ty_no_plus :: { Int } + : ntTy { 82 } + | no_for_ty { 83 } + | for_ty_no_plus { 84 } + +ty_prim :: { Int } + : no_for_ty_prim { 85 } + | for_ty_no_plus { 86 } + | poly_trait_ref_mod_bound '+' sep_by1T(ty_param_bound_mod,'+') { 87 } + +no_for_ty :: { Int } + : no_for_ty_prim { 88 } + | '(' ')' { 89 } + | '(' ty ')' { 90 } + | '(' ty ',' ')' { 91 } + | '(' ty ',' sep_by1T(ty,',') ')' { 92 } + | ty_qual_path { 93 } + +no_for_ty_prim :: { Int } + : '_' { 94 } + | '!' { 95 } + | '[' ty ']' { 96 } + | '*' ty_no_plus { 97 } + | '*' const ty_no_plus { 98 } + | '*' mut ty_no_plus { 99 } + | '&' ty_no_plus { 100 } + | '&' lifetime ty_no_plus { 101 } + | '&' mut ty_no_plus { 102 } + | '&' lifetime mut ty_no_plus { 103 } + | '&&' ty_no_plus { 104 } + | '&&' lifetime ty_no_plus { 105 } + | '&&' mut ty_no_plus { 106 } + | '&&' lifetime mut ty_no_plus { 107 } + | ty_path %prec PATH { 108 } + | ty_mac { 109 } + | unsafe extern abi fn fn_decl(arg_general) { 110 } + | unsafe fn fn_decl(arg_general) { 111 } + | extern abi fn fn_decl(arg_general) { 112 } + | fn fn_decl(arg_general) { 113 } + | typeof '(' expr ')' { 114 } + | '[' ty ';' expr ']' { 115 } + | '?' trait_ref { 116 } + | '?' for_lts trait_ref { 117 } + +for_ty_no_plus :: { Int } + : for_lts unsafe extern abi fn fn_decl(arg_general) { 118 } + | for_lts unsafe fn fn_decl(arg_general) { 119 } + | for_lts extern abi fn fn_decl(arg_general) { 120 } + | for_lts fn fn_decl(arg_general) { 121 } + | for_lts trait_ref { 122 } + +impl_ty :: { Int } + : impl sep_by1(ty_param_bound_mod,'+') %prec IMPLTRAIT { 123 } + +lifetime_mut :: { Int } + : lifetime mut { 124 } + | lifetime { 125 } + | mut { 126 } + | {- empty -} { 127 } + +fn_decl(arg) :: { Int } + : '(' sep_by1(arg,',') ',' '...' ')' ret_ty { 128 } + | '(' sep_byT(arg,',') ')' ret_ty { 129 } + +fn_decl_with_self_general :: { Int } + : '(' arg_self_general ',' sep_byT(arg_general,',') ')' ret_ty { 130 } + | '(' arg_self_general ')' ret_ty { 131 } + | '(' ')' ret_ty { 132 } + +fn_decl_with_self_named :: { Int } + : '(' arg_self_named ',' sep_by1(arg_named,',') ',' ')' ret_ty { 133 } + | '(' arg_self_named ',' sep_by1(arg_named,',') ')' ret_ty { 134 } + | '(' arg_self_named ',' ')' ret_ty { 135 } + | '(' arg_self_named ')' ret_ty { 136 } + | fn_decl(arg_named) { 137 } + +ty_param_bound :: { Int } + : lifetime { 138 } + | poly_trait_ref { 139 } + +poly_trait_ref_mod_bound :: { Int } + : poly_trait_ref { 140 } + | '?' poly_trait_ref { 141 } + +ty_param_bound_mod :: { Int } + : ty_param_bound { 142 } + | '?' poly_trait_ref { 143 } + +abi :: { Int } + : str { 144 } + | {- empty -} { 145 } + +ret_ty :: { Int } + : '->' ty_no_plus { 146 } + | '->' impl_ty { 147 } + | {- empty -} { 148 } + +poly_trait_ref :: { Int } + : trait_ref { 149 } + | for_lts trait_ref { 150 } + +for_lts :: { Int } + : for '<' sep_byT(lifetime_def,',') '>' { 151 } + +lifetime_def :: { Int } + : many(outer_attribute) lifetime ':' sep_by1T(lifetime,'+') { 152 } + | many(outer_attribute) lifetime { 153 } + +arg_named :: { Int } + : ntArg { 154 } + | pat ':' ty { 155 } + +arg_general :: { Int } + : ntArg { 156 } + | ty { 157 } + | '_' ':' ty { 158 } + | ident ':' ty { 159 } + | mut ident ':' ty { 160 } + | '&' '_' ':' ty { 161 } + | '&' ident ':' ty { 162 } + | '&&' '_' ':' ty { 163 } + | '&&' ident ':' ty { 164 } + +arg_self_general :: { Int } + : mut self { 165 } + | self ':' ty { 166 } + | mut self ':' ty { 167 } + | arg_general { 168 } + +arg_self_named :: { Int } + : self { 169 } + | mut self { 170 } + | '&' self { 171 } + | '&' lifetime self { 172 } + | '&' mut self { 173 } + | '&' lifetime mut self { 174 } + | self ':' ty { 175 } + | mut self ':' ty { 176 } + +lambda_arg :: { Int } + : ntArg { 177 } + | pat ':' ty { 178 } + | pat { 179 } + +pat :: { Int } + : ntPat { 180 } + | '_' { 181 } + | '&' mut pat { 182 } + | '&' pat { 183 } + | '&&' mut pat { 184 } + | '&&' pat { 185 } + | lit_expr { 186 } + | '-' lit_expr { 187 } + | box pat { 188 } + | binding_mode1 ident '@' pat { 189 } + | binding_mode1 ident { 190 } + | ident '@' pat { 191 } + | expr_path { 192 } + | expr_qual_path { 193 } + | lit_or_path '...' lit_or_path { 194 } + | expr_path '{' '..' '}' { 195 } + | expr_path '{' pat_fields '}' { 196 } + | expr_path '(' pat_tup ')' { 197 } + | expr_mac { 198 } + | '[' pat_slice ']' { 199 } + | '(' pat_tup ')' { 200 } + +pat_tup :: { Int } + : sep_by1(pat,',') ',' '..' ',' sep_by1(pat,',') { 201 } + | sep_by1(pat,',') ',' '..' ',' sep_by1(pat,',') ',' { 202 } + | sep_by1(pat,',') ',' '..' { 203 } + | sep_by1(pat,',') { 204 } + | sep_by1(pat,',') ',' { 205 } + | '..' ',' sep_by1(pat,',') { 206 } + | '..' ',' sep_by1(pat,',') ',' { 207 } + | '..' { 208 } + | {- empty -} { 209 } + +pat_slice :: { Int } + : sep_by1(pat,',') ',' '..' ',' sep_by1T(pat,',') { 210 } + | sep_by1(pat,',') ',' '..' { 211 } + | sep_by1(pat,',') '..' ',' sep_by1T(pat,',') { 212 } + | sep_by1(pat,',') '..' { 213 } + | sep_by1T(pat,',') { 214 } + | '..' ',' sep_by1T(pat,',') { 215 } + | '..' { 216 } + | {- empty -} { 217 } + +lit_or_path :: { Int } + : expr_path { 218 } + | expr_qual_path { 219 } + | '-' lit_expr { 220 } + | lit_expr { 221 } + +pat_fields :: { Int } + : sep_byT(pat_field,',') { 222 } + | sep_by1(pat_field,',') ',' '..' { 223 } + +pat_field :: { Int } + : binding_mode ident { 224 } + | box binding_mode ident { 225 } + | binding_mode ident ':' pat { 226 } + +binding_mode1 :: { Int } + : ref mut { 227 } + | ref { 228 } + | mut { 229 } + +binding_mode :: { Int } + : binding_mode1 { 230 } + | {- empty -} { 231 } + +gen_expression(lhs,rhs,rhs2) :: { Int } + : ntExpr { 232 } + | lit_expr { 233 } + | '[' sep_byT(expr,',') ']' { 234 } + | '[' inner_attrs sep_byT(expr,',') ']' { 235 } + | '[' expr ';' expr ']' { 236 } + | expr_mac { 237 } + | expr_path %prec PATH { 238 } + | expr_qual_path { 239 } + | '*' rhs %prec UNARY { 240 } + | '!' rhs %prec UNARY { 241 } + | '-' rhs %prec UNARY { 242 } + | '&' rhs %prec UNARY { 243 } + | '&' mut rhs %prec UNARY { 244 } + | '&&' rhs %prec UNARY { 245 } + | '&&' mut rhs %prec UNARY { 246 } + | box rhs %prec UNARY { 247 } + | left_gen_expression(lhs,rhs,rhs2) { 248 } + | '..' rhs2 %prec PREFIXRNG { 249 } + | '...' rhs2 %prec PREFIXRNG { 250 } + | '..' %prec SINGLERNG { 251 } + | '...' %prec SINGLERNG { 252 } + | return { 253 } + | return rhs { 254 } + | continue { 255 } + | continue lifetime { 256 } + | break { 257 } + | break rhs { 258 } + | break lifetime { 259 } + | break lifetime rhs %prec break { 260 } + | move lambda_args rhs %prec LAMBDA { 261 } + | lambda_args rhs %prec LAMBDA { 262 } + +left_gen_expression(lhs,rhs,rhs2) :: { Int } + : postfix_blockexpr(lhs) { 263 } + | lhs '[' expr ']' { 264 } + | lhs '(' sep_byT(expr,',') ')' { 265 } + | lhs ':' ty_no_plus { 266 } + | lhs as ty_no_plus { 267 } + | lhs '*' rhs { 268 } + | lhs '/' rhs { 269 } + | lhs '%' rhs { 270 } + | lhs '+' rhs { 271 } + | lhs '-' rhs { 272 } + | lhs '<<' rhs { 273 } + | lhs '>>' rhs { 274 } + | lhs '&' rhs { 275 } + | lhs '^' rhs { 276 } + | lhs '|' rhs { 277 } + | lhs '==' rhs { 278 } + | lhs '!=' rhs { 279 } + | lhs '<' rhs { 280 } + | lhs '>' rhs { 281 } + | lhs '<=' rhs { 282 } + | lhs '>=' rhs { 283 } + | lhs '&&' rhs { 284 } + | lhs '||' rhs { 285 } + | lhs '..' %prec POSTFIXRNG { 286 } + | lhs '...' %prec POSTFIXRNG { 287 } + | lhs '..' rhs2 %prec INFIXRNG { 288 } + | lhs '...' rhs2 %prec INFIXRNG { 289 } + | lhs '<-' rhs { 290 } + | lhs '=' rhs { 291 } + | lhs '>>=' rhs { 292 } + | lhs '<<=' rhs { 293 } + | lhs '-=' rhs { 294 } + | lhs '+=' rhs { 295 } + | lhs '*=' rhs { 296 } + | lhs '/=' rhs { 297 } + | lhs '^=' rhs { 298 } + | lhs '|=' rhs { 299 } + | lhs '&=' rhs { 300 } + | lhs '%=' rhs { 301 } + +postfix_blockexpr(lhs) :: { Int } + : lhs '?' { 302 } + | lhs '.' ident %prec FIELD { 303 } + | lhs '.' ident '(' sep_byT(expr,',') ')' { 304 } + | lhs '.' ident '::' '<' sep_byT(ty,',') '>' '(' sep_byT(expr,',') ')' { 305 } + | lhs '.' int { 306 } + +expr :: { Int } + : gen_expression(expr,expr,expr) { 307 } + | paren_expr { 308 } + | struct_expr { 309 } + | block_expr { 310 } + | lambda_expr_block { 311 } + +nostruct_expr :: { Int } + : gen_expression(nostruct_expr,nostruct_expr,nonstructblock_expr) { 312 } + | paren_expr { 313 } + | block_expr { 314 } + +nonstructblock_expr :: { Int } + : gen_expression(nonstructblock_expr,nostruct_expr,nonstructblock_expr) { 315 } + | paren_expr { 316 } + | block_like_expr { 317 } + | unsafe inner_attrs_block { 318 } + +nonblock_expr :: { Int } + : gen_expression(nonblock_expr,expr,expr) { 319 } + | paren_expr { 320 } + | struct_expr { 321 } + | lambda_expr_block { 322 } + +blockpostfix_expr :: { Int } + : postfix_blockexpr(block_like_expr) { 323 } + | postfix_blockexpr(vis_safety_block) { 324 } + | left_gen_expression(blockpostfix_expr,expr,expr) { 325 } + +lit_expr :: { Int } + : lit { 326 } + +block_expr :: { Int } + : block_like_expr { 327 } + | inner_attrs_block { 328 } + | unsafe inner_attrs_block { 329 } + + +block_like_expr :: { Int } + : if_expr { 330 } + | loop inner_attrs_block { 331 } + | lifetime ':' loop inner_attrs_block { 332 } + | for pat in nostruct_expr inner_attrs_block { 333 } + | lifetime ':' for pat in nostruct_expr inner_attrs_block { 334 } + | while nostruct_expr inner_attrs_block { 335 } + | lifetime ':' while nostruct_expr inner_attrs_block { 336 } + | while let pat '=' nostruct_expr inner_attrs_block { 337 } + | lifetime ':' while let pat '=' nostruct_expr inner_attrs_block { 338 } + | match nostruct_expr '{' '}' { 339 } + | match nostruct_expr '{' inner_attrs '}' { 340 } + | match nostruct_expr '{' arms '}' { 341 } + | match nostruct_expr '{' inner_attrs arms '}' { 342 } + | expr_path '!' '{' token_stream '}' { 343 } + | do catch inner_attrs_block { 344 } + +if_expr :: { Int } + : if nostruct_expr block else_expr { 345 } + | if let pat '=' nostruct_expr block else_expr { 346 } + +else_expr :: { Int } + : else block { 347 } + | else if_expr { 348 } + | {- empty -} { 349 } + +arms :: { Int } + : ntArm { 350 } + | ntArm arms { 351 } + | many(outer_attribute) sep_by1(pat,'|') arm_guard '=>' expr_arms { 352 } + +arm_guard :: { Int } + : {- empty -} { 353 } + | if expr { 354 } + +comma_arms :: { Int } + : {- empty -} { 355 } + | ',' { 356 } + | ',' arms { 357 } + +expr_arms :: { Int } + : nonblock_expr comma_arms { 358 } + | blockpostfix_expr comma_arms { 359 } + | vis_safety_block comma_arms { 360 } + | vis_safety_block arms { 361 } + | block_like_expr comma_arms { 362 } + | block_like_expr arms { 363 } + +paren_expr :: { Int } + : '(' ')' { 364 } + | '(' inner_attrs ')' { 365 } + | '(' expr ')' { 366 } + | '(' inner_attrs expr ')' { 367 } + | '(' expr ',' ')' { 368 } + | '(' inner_attrs expr ',' ')' { 369 } + | '(' expr ',' sep_by1T(expr,',') ')' { 370 } + | '(' inner_attrs expr ',' sep_by1T(expr,',') ')' { 371 } + +lambda_expr_block :: { Int } + : move lambda_args '->' ty_no_plus block { 372 } + | lambda_args '->' ty_no_plus block { 373 } + +lambda_args :: { Int } + : '||' { 374 } + | '|' sep_byT(lambda_arg,',') '|' { 375 } + +struct_expr :: { Int } + : expr_path '{' '..' expr '}' { 376 } + | expr_path '{' inner_attrs '..' expr '}' { 377 } + | expr_path '{' sep_by1(field,',') ',' '..' expr '}' { 378 } + | expr_path '{' inner_attrs sep_by1(field,',') ',' '..' expr '}' { 379 } + | expr_path '{' sep_byT(field,',') '}' { 380 } + | expr_path '{' inner_attrs sep_byT(field,',') '}' { 381 } + +field :: { Int } + : ident ':' expr { 382 } + | ident { 383 } + +vis_safety_block :: { Int } + : pub_or_inherited safety inner_attrs_block { 384 } + +vis_union_nonblock_expr :: { Int } + : union_expr { 385 } + | left_gen_expression(vis_union_nonblock_expr, expr, expr) { 386 } + +union_expr :: { Int } + : pub_or_inherited union { 387 } + +stmt :: { Int } + : ntStmt { 388 } + | many(outer_attribute) let pat ':' ty initializer ';' { 389 } + | many(outer_attribute) let pat initializer ';' { 390 } + | many(outer_attribute) nonblock_expr ';' { 391 } + | many(outer_attribute) block_like_expr ';' { 392 } + | many(outer_attribute) blockpostfix_expr ';' { 393 } + | many(outer_attribute) vis_union_nonblock_expr ';' { 394 } + | many(outer_attribute) block_like_expr %prec NOSEMI { 395 } + | many(outer_attribute) vis_safety_block ';' { 396 } + | many(outer_attribute) vis_safety_block %prec NOSEMI { 397 } + | gen_item(pub_or_inherited) { 398 } + | many(outer_attribute) expr_path '!' ident '[' token_stream ']' ';' { 399 } + | many(outer_attribute) expr_path '!' ident '(' token_stream ')' ';' { 400 } + | many(outer_attribute) expr_path '!' ident '{' token_stream '}' { 401 } + +pub_or_inherited :: { Int } + : pub %prec VIS { 402 } + | {- empty -} %prec VIS { 403 } + +stmtOrSemi :: { Int } + : ';' { 404 } + | stmt { 405 } + +stmts_possibly_no_semi :: { Int } + : stmtOrSemi stmts_possibly_no_semi { 406 } + | stmtOrSemi { 407 } + | many(outer_attribute) nonblock_expr { 408 } + | many(outer_attribute) blockpostfix_expr { 409 } + +initializer :: { Int } + : '=' expr { 410 } + | {- empty -} { 411 } + +block :: { Int } + : ntBlock { 412 } + | '{' '}' { 413 } + | '{' stmts_possibly_no_semi '}' { 414 } + +inner_attrs_block :: { Int } + : block { 415 } + | '{' inner_attrs '}' { 416 } + | '{' inner_attrs stmts_possibly_no_semi '}' { 417 } + +gen_item(vis) :: { Int } + : many(outer_attribute) vis static ident ':' ty '=' expr ';' { 418 } + | many(outer_attribute) vis static mut ident ':' ty '=' expr ';' { 419 } + | many(outer_attribute) vis const ident ':' ty '=' expr ';' { 420 } + | many(outer_attribute) vis type ident generics where_clause '=' ty ';' { 421 } + | many(outer_attribute) vis use view_path ';' { 422 } + | many(outer_attribute) vis safety extern crate ident ';' { 423 } + | many(outer_attribute) vis safety extern crate ident as ident ';' { 424 } + | many(outer_attribute) vis const safety fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 425 } + | many(outer_attribute) vis safety extern abi fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 426 } + | many(outer_attribute) vis safety fn ident generics fn_decl(arg_named) where_clause inner_attrs_block { 427 } + | many(outer_attribute) vis mod ident ';' { 428 } + | many(outer_attribute) vis mod ident '{' many(mod_item) '}' { 429 } + | many(outer_attribute) vis mod ident '{' inner_attrs many(mod_item) '}' { 430 } + | many(outer_attribute) vis safety extern abi '{' many(foreign_item) '}' { 431 } + | many(outer_attribute) vis safety extern abi '{' inner_attrs many(foreign_item) '}' { 432 } + | many(outer_attribute) vis struct ident generics struct_decl_args { 433 } + | many(outer_attribute) vis union ident generics struct_decl_args { 434 } + | many(outer_attribute) vis enum ident generics where_clause '{' sep_byT(enum_def,',') '}' { 435 } + | many(outer_attribute) vis safety trait ident generics where_clause '{' many(trait_item) '}' { 437 } + | many(outer_attribute) vis safety impl generics ty_prim where_clause '{' impl_items '}' { 438 } + | many(outer_attribute) vis default safety impl generics ty_prim where_clause '{' impl_items '}' { 439 } + | many(outer_attribute) vis safety impl generics '(' ty_no_plus ')' where_clause '{' impl_items '}' { 440 } + | many(outer_attribute) vis default safety impl generics '(' ty_no_plus ')' where_clause '{' impl_items '}' { 441 } + | many(outer_attribute) vis safety impl generics '!' trait_ref for ty where_clause '{' impl_items '}' { 442 } + | many(outer_attribute) vis default safety impl generics '!' trait_ref for ty where_clause '{' impl_items '}' { 443 } + | many(outer_attribute) vis safety impl generics trait_ref for ty where_clause '{' impl_items '}' { 444 } + | many(outer_attribute) vis default safety impl generics trait_ref for ty where_clause '{' impl_items '}' { 445 } + | many(outer_attribute) vis safety impl generics trait_ref for '..' '{' '}' { 446 } + +mod_item :: { Int } + : ntItem { 447 } + | gen_item(vis) { 448 } + | many(outer_attribute) expr_path '!' ident '[' token_stream ']' ';' { 449 } + | many(outer_attribute) expr_path '!' '[' token_stream ']' ';' { 450 } + | many(outer_attribute) expr_path '!' ident '(' token_stream ')' ';' { 451 } + | many(outer_attribute) expr_path '!' '(' token_stream ')' ';' { 452 } + | many(outer_attribute) expr_path '!' ident '{' token_stream '}' { 453 } + | many(outer_attribute) expr_path '!' '{' token_stream '}' { 454 } + +foreign_item :: { Int } + : many(outer_attribute) vis static ident ':' ty ';' { 455 } + | many(outer_attribute) vis static mut ident ':' ty ';' { 456 } + | many(outer_attribute) vis fn ident generics fn_decl(arg_named) where_clause ';' { 457 } + + + +generics :: { Int } + : ntGenerics { 458 } + | '<' sep_by1(lifetime_def,',') ',' sep_by1T(ty_param,',') gt '>' { 459 } + | '<' sep_by1T(lifetime_def,',') gt '>' { 460 } + | '<' sep_by1T(ty_param,',') gt '>' { 461 } + | '<' gt '>' { 462 } + | {- empty -} { 463 } + +ty_param :: { Int } + : many(outer_attribute) ident { 464 } + | many(outer_attribute) ident ':' sep_by1T(ty_param_bound_mod,'+') { 465 } + | many(outer_attribute) ident '=' ty { 466 } + | many(outer_attribute) ident ':' sep_by1T(ty_param_bound_mod,'+') '=' ty { 467 } + +struct_decl_args :: { Int } + : where_clause ';' { 468 } + | where_clause '{' sep_byT(struct_decl_field,',') '}' { 469 } + | '(' sep_byT(tuple_decl_field,',') ')' where_clause ';' { 470 } + +struct_decl_field :: { Int } + : many(outer_attribute) vis ident ':' ty { 471 } + +tuple_decl_field :: { Int } + : many(outer_attribute) vis ty { 472 } + +enum_def :: { Int } + : many(outer_attribute) ident '{' sep_byT(struct_decl_field,',') '}' { 473 } + | many(outer_attribute) ident '(' sep_byT(tuple_decl_field,',') ')' { 474 } + | many(outer_attribute) ident initializer { 475 } + +where_clause :: { Int } + : {- empty -} { 476 } + | ntWhereClause { 477 } + | where sep_by(where_predicate,',') %prec WHERE { 478 } + | where sep_by1(where_predicate,',') ',' %prec WHERE { 479 } + +where_predicate :: { Int } + : lifetime { 480 } + | lifetime ':' sep_by1T(lifetime,'+') { 481 } + | no_for_ty %prec EQ { 482 } + | no_for_ty '=' ty { 483 } + | no_for_ty ':' sep_by1T(ty_param_bound_mod,'+') { 484 } + | for_lts no_for_ty { 485 } + | for_lts no_for_ty ':' sep_by1T(ty_param_bound_mod,'+') { 486 } + +impl_items :: { Int } + : many(impl_item) { 487 } + | inner_attrs many(impl_item) { 488 } + +impl_item :: { Int } + : many(outer_attribute) vis def type ident '=' ty ';' { 489 } + | many(outer_attribute) vis def const ident ':' ty '=' expr ';' { 490 } + | many(outer_attribute) def mod_mac { 491 } + +trait_item :: { Int } + : ntTraitItem { 494 } + | many(outer_attribute) const ident ':' ty initializer ';' { 495 } + | many(outer_attribute) mod_mac { 496 } + | many(outer_attribute) type ident ';' { 497 } + | many(outer_attribute) type ident '=' ty ';' { 498 } + | many(outer_attribute) type ident ':' sep_by1T(ty_param_bound_mod,'+') ';' { 499 } + +safety :: { Int } + : {- empty -} { 503 } + | unsafe { 504 } + +ext_abi :: { Int } + : {- empty -} { 505 } + | extern abi { 506 } + +vis :: { Int } + : {- empty -} %prec VIS { 507 } + | pub %prec VIS { 508 } + | pub '(' crate ')' { 509 } + | pub '(' in mod_path ')' { 510 } + | pub '(' super ')' { 511 } + | pub '(' self ')' { 512 } + +def :: { Int } + : {- empty -} %prec DEF { 513 } + | default { 514 } + +view_path :: { Int } + : '::' sep_by1(self_or_ident,'::') { 515 } + | '::' sep_by1(self_or_ident,'::') as ident { 516 } + | '::' '*' { 517 } + | '::' sep_by1(self_or_ident,'::') '::' '*' { 518 } + | '::' sep_by1(self_or_ident,'::') '::' '{' sep_byT(plist,',') '}' { 519 } + | '::' '{' sep_byT(plist,',') '}' { 520 } + | sep_by1(self_or_ident,'::') { 521 } + | sep_by1(self_or_ident,'::') as ident { 522 } + | '*' { 523 } + | sep_by1(self_or_ident,'::') '::' '*' { 524 } + | sep_by1(self_or_ident,'::') '::' '{' sep_byT(plist,',') '}' { 525 } + | '{' sep_byT(plist,',') '}' { 526 } + +self_or_ident :: { Int } + : ident { 527 } + | self { 528 } + | Self { 529 } + | super { 530 } + +plist :: { Int } + : self_or_ident { 531 } + | self_or_ident as ident { 532 } + +expr_mac :: { Int } + : expr_path '!' '[' token_stream ']' { 533 } + | expr_path '!' '(' token_stream ')' { 534 } + +ty_mac :: { Int } + : ty_path '!' '[' token_stream ']' { 535 } + | ty_path '!' '{' token_stream '}' { 536 } + | ty_path '!' '(' token_stream ')' { 537 } + +mod_mac :: { Int } + : mod_path '!' '[' token_stream ']' ';' { 538 } + | mod_path '!' '{' token_stream '}' { 539 } + | mod_path '!' '(' token_stream ')' ';' { 540 } + +token_stream :: { Int } + : {- empty -} { 541 } + | some(token_tree) { 542 } + +token_tree :: { Int } + : ntTT { 543 } + | '(' token_stream ')' { 544 } + | '{' token_stream '}' { 545 } + | '[' token_stream ']' { 546 } + | token { 547 } + +token :: { Int } + : '=' { 548 } + | '<' { 549 } + | '>' { 550 } + | '!' { 551 } + | '~' { 552 } + | '-' { 553 } + | '/' { 554 } + | '+' { 555 } + | '*' { 556 } + | '%' { 557 } + | '^' { 558 } + | '&' { 559 } + | '|' { 560 } + | '<<=' { 561 } + | '>>=' { 562 } + | '-=' { 563 } + | '&=' { 564 } + | '|=' { 565 } + | '+=' { 566 } + | '*=' { 567 } + | '/=' { 568 } + | '^=' { 569 } + | '%=' { 571 } + | '||' { 572 } + | '&&' { 573 } + | '==' { 574 } + | '!=' { 575 } + | '<=' { 576 } + | '>=' { 577 } + | '<<' { 578 } + | '>>' { 579 } + | '@' { 580 } + | '...' { 581 } + | '..' { 582 } + | '.' { 583 } + | ',' { 584 } + | ';' { 585 } + | '::' { 586 } + | ':' { 587 } + | '->' { 588 } + | '<-' { 589 } + | '=>' { 590 } + | '#' { 591 } + | '$' { 592 } + | '?' { 593 } + | '#!' { 594 } + | byte { 595 } + | char { 596 } + | int { 597 } + | float { 598 } + | str { 599 } + | byteStr { 600 } + | rawStr { 601 } + | rawByteStr { 602 } + | as { 603 } + | box { 604 } + | break { 605 } + | const { 606 } + | continue { 607 } + | crate { 608 } + | else { 609 } + | enum { 610 } + | extern { 611 } + | false { 612 } + | fn { 613 } + | for { 614 } + | if { 615 } + | impl { 616 } + | in { 617 } + | let { 618 } + | loop { 619 } + | match { 620 } + | mod { 621 } + | move { 622 } + | mut { 623 } + | pub { 624 } + | ref { 625 } + | return { 626 } + | Self { 627 } + | self { 628 } + | static { 629 } + | struct { 630 } + | super { 631 } + | trait { 632 } + | true { 633 } + | type { 634 } + | unsafe { 635 } + | use { 636 } + | where { 637 } + | while { 638 } + | abstract { 639 } + | alignof { 640 } + | become { 641 } + | do { 642 } + | final { 643 } + | macro { 644 } + | offsetof { 645 } + | override { 646 } + | priv { 647 } + | proc { 648 } + | pure { 649 } + | sizeof { 650 } + | typeof { 651 } + | unsized { 652 } + | virtual { 653 } + | yield { 654 } + | default { 655 } + | union { 656 } + | catch { 657 } + | outerDoc { 658 } + | innerDoc { 659 } + | IDENT { 660 } + | '_' { 661 } + | LIFETIME { 662 } + +export_attribute :: { Int } + : inner_attribute { 663 } + | outer_attribute { 664 } + +export_block :: { Int } + : ntBlock { 665 } + | safety '{' '}' { 666 } + | safety '{' stmts_possibly_no_semi '}' { 667 } + +export_ty :: { Int } + : ty { 668 } + | impl_ty { 669 } + + +{ + +type P a = String -> Either String (a, String) + +bindP :: P a -> (a -> P b) -> P b +bindP p f s = case p s of + Left m -> Left m + Right (x,s') -> f x s' + +returnP :: a -> P a +returnP x s = Right (x,s) + +parseError :: Show b => b -> P a +parseError b _ = Left ("Syntax error: the symbol `" ++ show b ++ "' does not fit here") + + +data Token + = Equal + | Less + | Greater + | Ampersand + | Pipe + | Exclamation + | Tilde + | Plus + | Minus + | Star + | Slash + | Percent + | Caret + | GreaterEqual + | GreaterGreaterEqual + | AmpersandAmpersand + | PipePipe + | LessLess + | GreaterGreater + | EqualEqual + | NotEqual + | LessEqual + | LessLessEqual + | MinusEqual + | AmpersandEqual + | PipeEqual + | PlusEqual + | StarEqual + | SlashEqual + | CaretEqual + | PercentEqual + | At + | Dot + | DotDot + | DotDotDot + | Comma + | Semicolon + | Colon + | ModSep + | RArrow + | LArrow + | FatArrow + | Pound + | Dollar + | Question + | OpenParen + | OpenBracket + | OpenBrace + | CloseParen + | CloseBracket + | CloseBrace + | IdentTok String + | Underscore + | LifetimeTok String + | Space + | InnerDoc + | OuterDoc + | Shebang + | Eof + | ByteTok String + | CharTok String + | IntegerTok String + | FloatTok String + | StrTok String + | StrRawTok String + | ByteStrTok String + | ByteStrRawTok String + | Interpolated Int + deriving Show + + +-- This is an intentionally simplfied tokenizer +lexNonSpace :: P Token +lexNonSpace "" = Right (Eof, "") +lexNonSpace ('.':cs) = Right (Dot, cs) +lexNonSpace ('+':cs) = Right (Plus, cs) +lexNonSpace (';':cs) = Right (Semicolon, cs) +lexNonSpace (',':cs) = Right (Comma, cs) +lexNonSpace ('=':cs) = Right (Equal, cs) +lexNonSpace ('{':cs) = Right (OpenBrace, cs) +lexNonSpace ('}':cs) = Right (CloseBrace, cs) +lexNonSpace ('(':cs) = Right (OpenParen, cs) +lexNonSpace (')':cs) = Right (CloseParen, cs) +lexNonSpace (c:cs) + | isSpace c = lexNonSpace cs + | isNumber c = let (tok,cs') = span isNumber (c:cs) in Right (IntegerTok tok, cs') + | isAlpha c = let (tok,cs') = span isAlphaNum (c:cs) in Right (IdentTok tok, cs') + | otherwise = Left ("Unexpected character: `" ++ [c] ++ "'") + + +main = case parseStmt "union.1 + 2;" of + Right (394, "") -> pure () + _ -> exitWith (ExitFailure 1) +} diff -Nru happy-1.19.5/tests/issue94.y happy-1.19.8/tests/issue94.y --- happy-1.19.5/tests/issue94.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/issue94.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,33 @@ +-- See for more information +%name parse prod + +%tokentype { Token } + +%monad { P } { bindP } { returnP } +%error { error "parse error" } +%lexer { lexer } { EOF } + +%token + IDENT { Identifier $$ } + +%% + +prod + : IDENT { () } + +{ +data Token = EOF | Identifier String + +type P a = String -> (a, String) + +bindP :: P a -> (a -> P b) -> P b +bindP p f s = let (x,s') = p s in f x s' + +returnP :: a -> P a +returnP = (,) + +lexer :: (Token -> P a) -> P a +lexer cont s = cont (case s of { "" -> EOF; _ -> Identifier s }) "" + +main = return () +} diff -Nru happy-1.19.5/tests/issue95.y happy-1.19.8/tests/issue95.y --- happy-1.19.5/tests/issue95.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/issue95.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,35 @@ +-- See for more information +%name parse prod + +%tokentype { Token } + +%monad { P } { bindP } { returnP } +%error { error "parse error" } +%lexer { lexer } { EOF } + +%token + IDENT { Identifier $$ } + +%% + +prod :: { () } + : IDENT {%% \_ -> returnP () } + +{ + +data Token = EOF | Identifier String + +type P a = String -> (a, String) + +bindP :: P a -> (a -> P b) -> P b +bindP p f s = let (x,s') = p s in f x s' + +returnP :: a -> P a +returnP = (,) + +lexer :: (Token -> P a) -> P a +lexer cont s = cont (case s of { "" -> EOF; _ -> Identifier s }) "" + +main = pure () + +} diff -Nru happy-1.19.5/tests/Makefile happy-1.19.8/tests/Makefile --- happy-1.19.5/tests/Makefile 2015-01-06 21:04:19.000000000 +0000 +++ happy-1.19.8/tests/Makefile 2017-10-12 07:46:11.000000000 +0000 @@ -1,6 +1,13 @@ HAPPY=../dist/build/happy/happy -HC=ghc -HC_OPTS=-hide-all-packages -package base -package array -package mtl -Werror +HC_OPTS=-hide-all-packages -package base -package array -package mtl -Wall -Werror + +ifeq ($(wildcard ../cabal.sandbox.config),) + HC = ghc + SANDBOX_CONFIG = +else + HC = cabal exec -- ghc + SANDBOX_CONFIG = cabal.sandbox.config +endif .PRECIOUS: %.n.hs %.g.hs %.o %.exe %.bin @@ -13,8 +20,10 @@ TESTS = Test.ly TestMulti.ly TestPrecedence.ly bug001.ly \ monad001.y monad002.ly precedence001.ly precedence002.y \ bogus-token.y bug002.y Partial.ly \ + issue91.y issue93.y issue94.y issue95.y \ AttrGrammar001.y AttrGrammar002.y \ - test_rules.y monaderror.y + test_rules.y monaderror.y monaderror-explist.y \ + typeclass_monad001.y typeclass_monad002.ly typeclass_monad_lexer.y ERROR_TESTS = error001.y @@ -56,7 +65,7 @@ %.agc.hs : %.y $(HAPPY) $(TEST_HAPPY_OPTS) -agc $< -o $@ -CLEAN_FILES += *.n.hs *.a.hs *.g.hs *.gc.hs *.ag.hs *.agc.hs *.info *.hi *.bin *.exe +CLEAN_FILES += *.n.hs *.a.hs *.g.hs *.gc.hs *.ag.hs *.agc.hs *.info *.hi *.bin *.exe *.o *.run.stdout *.run.stderr $(SANDBOX_CONFIG) ALL_TEST_HS = $(shell echo $(TESTS) | sed -e 's/\([^\. ]*\)\.\(l\)\{0,1\}y/\1.n.hs \1.a.hs \1.g.hs \1.gc.hs \1.ag.hs \1.agc.hs/g') @@ -75,13 +84,13 @@ check.%.y : %.y @echo "--> Checking $<..." $(HAPPY) $(TEST_HAPPY_OPTS) $< 1>$*.run.stdout 2>$*.run.stderr || true - @diff -u $*.stdout $*.run.stdout - @diff -u $*.stderr $*.run.stderr + @diff -u --ignore-all-space $*.stdout $*.run.stdout + @diff -u --ignore-all-space $*.stderr $*.run.stderr %$(HS_PROG_EXT) : %.hs $(HC) $(HC_OPTS) $($*_LD_OPTS) $< -o $@ -all :: $(CHECK_ERROR_TESTS) $(ALL_TESTS) +all :: $(SANDBOX_CONFIG) $(CHECK_ERROR_TESTS) $(ALL_TESTS) check-todo:: $(HAPPY) $(TEST_HAPPY_OPTS) -ad Test.ly @@ -101,3 +110,6 @@ clean: $(RM) $(CLEAN_FILES) + +cabal.sandbox.config: + cabal sandbox init --sandbox=../.cabal-sandbox diff -Nru happy-1.19.5/tests/monaderror-explist.y happy-1.19.8/tests/monaderror-explist.y --- happy-1.19.5/tests/monaderror-explist.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/monaderror-explist.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,70 @@ +{ +module Main where + +import Data.Char +import Control.Monad.Error +import System.Exit +import System.Environment (getProgName) +import Data.List (isPrefixOf) +} + +%name parseFoo +%tokentype { Token } +%errorhandlertype explist +%error { handleErrorExpList } + +%monad { ParseM } { (>>=) } { return } + +%token + 'S' { TokenSucc } + 'Z' { TokenZero } + 'T' { TokenTest } + +%% + +Exp : 'Z' { 0 } + | 'T' 'Z' Exp { $3 + 1 } + | 'S' Exp { $2 + 1 } + +{ + +type ParseM a = Either ParseError a +data ParseError + = ParseError (Maybe (Token, [String])) + | StringError String + deriving (Eq,Show) +instance Error ParseError where + strMsg = StringError + +data Token + = TokenSucc + | TokenZero + | TokenTest + deriving (Eq,Show) + +handleErrorExpList :: ([Token], [String]) -> ParseM a +handleErrorExpList ([], _) = throwError $ ParseError Nothing +handleErrorExpList (ts, explist) = throwError $ ParseError $ Just $ (head ts, explist) + +lexer :: String -> [Token] +lexer [] = [] +lexer (c:cs) + | isSpace c = lexer cs + | c == 'S' = TokenSucc:(lexer cs) + | c == 'Z' = TokenZero:(lexer cs) + | c == 'T' = TokenTest:(lexer cs) + | otherwise = error "lexer error" + +main :: IO () +main = do + test "Z Z" $ Left (ParseError (Just (TokenZero,[]))) + test "T S" $ Left (ParseError (Just (TokenSucc,["'Z'"]))) + + where + test inp exp = do + putStrLn $ "testing " ++ inp + let tokens = lexer inp + when (parseFoo tokens /= exp) $ do + print (parseFoo tokens) + exitWith (ExitFailure 1) +} diff -Nru happy-1.19.5/tests/typeclass_monad001.y happy-1.19.8/tests/typeclass_monad001.y --- happy-1.19.5/tests/typeclass_monad001.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/typeclass_monad001.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,93 @@ +-- Testing %monad without %lexer, using the IO monad. + +{ +module Main where + +import Control.Monad.Trans +import System.IO +import Data.Char +} + +%name calc +%tokentype { Token } + +%token num { TokenNum $$ } + '+' { TokenPlus } + '-' { TokenMinus } + '*' { TokenTimes } + '/' { TokenDiv } + '^' { TokenExp } + '\n' { TokenEOL } + '(' { TokenOB } + ')' { TokenCB } + +%left '-' '+' +%left '*' +%nonassoc '/' +%left NEG -- negation--unary minus +%right '^' -- exponentiation + +%monad { (MonadIO m) } { m } { (>>=) } { return } + +%% +input : {- empty string -} { () } + | input line { $1 } + +line : '\n' { () } + | exp '\n' {% hPutStr stdout (show $1) } + +exp : num { $1 } + | exp '+' exp { $1 + $3 } + | exp '-' exp { $1 - $3 } + | exp '*' exp { $1 * $3 } + | exp '/' exp { $1 / $3 } + | '-' exp %prec NEG { -$2 } +-- | exp '^' exp { $1 ^ $3 } + | '(' exp ')' { $2 } + +{ +main = do + calc (lexer "1 + 2 * 3 / 4\n") + +{- + -- check that non-associative operators can't be used together + r <- try (calc (lexer "1 / 2 / 3")) + case r of + Left e -> return () + Right _ -> ioError (userError "fail!") +-} + +data Token + = TokenExp + | TokenEOL + | TokenNum Double + | TokenPlus + | TokenMinus + | TokenTimes + | TokenDiv + | TokenOB + | TokenCB + +-- and a simple lexer that returns this datastructure. + +lexer :: String -> [Token] +lexer [] = [] +lexer ('\n':cs) = TokenEOL : lexer cs +lexer (c:cs) + | isSpace c = lexer cs + | isDigit c = lexNum (c:cs) +lexer ('+':cs) = TokenPlus : lexer cs +lexer ('-':cs) = TokenMinus : lexer cs +lexer ('*':cs) = TokenTimes : lexer cs +lexer ('/':cs) = TokenDiv : lexer cs +lexer ('^':cs) = TokenExp : lexer cs +lexer ('(':cs) = TokenOB : lexer cs +lexer (')':cs) = TokenCB : lexer cs + +lexNum cs = TokenNum (read num) : lexer rest + where (num,rest) = span isNum cs + isNum c = isDigit c || c == '.' + + +happyError tokens = liftIO (ioError (userError "parse error")) +} diff -Nru happy-1.19.5/tests/typeclass_monad002.ly happy-1.19.8/tests/typeclass_monad002.ly --- happy-1.19.5/tests/typeclass_monad002.ly 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/typeclass_monad002.ly 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,185 @@ +----------------------------------------------------------------------------- +Test for monadic Happy Parsers, Simon Marlow 1996. + +> { +> {-# OPTIONS_GHC -fglasgow-exts #-} +> -- -fglasgow-exts required because P is a type synonym, and Happy uses it +> -- unsaturated. +> import Data.Char +> } + +> %name calc +> %tokentype { Token } + +> %monad { (Monad m) } { P m } { thenP } { returnP } +> %lexer { lexer } { TokenEOF } + +> %token +> let { TokenLet } +> in { TokenIn } +> int { TokenInt $$ } +> var { TokenVar $$ } +> '=' { TokenEq } +> '+' { TokenPlus } +> '-' { TokenMinus } +> '*' { TokenTimes } +> '/' { TokenDiv } +> '(' { TokenOB } +> ')' { TokenCB } + +> %% + +> Exp :: {Exp} +> : let var '=' Exp in Exp {% \s l -> return (ParseOk (Let l $2 $4 $6)) } +> | Exp1 { Exp1 $1 } +> +> Exp1 :: {Exp1} +> : Exp1 '+' Term { Plus $1 $3 } +> | Exp1 '-' Term { Minus $1 $3 } +> | Term { Term $1 } +> +> Term :: {Term} +> : Term '*' Factor { Times $1 $3 } +> | Term '/' Factor { Div $1 $3 } +> | Factor { Factor $1 } +> + +> Factor :: {Factor} +> : int { Int $1 } +> | var { Var $1 } +> | '(' Exp ')' { Brack $2 } + +> { + +----------------------------------------------------------------------------- +The monad serves three purposes: + + * it passes the input string around + * it passes the current line number around + * it deals with success/failure. + +> data ParseResult a +> = ParseOk a +> | ParseFail String + +> type P m a = String -> Int -> m (ParseResult a) + +> thenP :: Monad m => P m a -> (a -> P m b) -> P m b +> m `thenP` k = \s l -> +> do +> res <- m s l +> case res of +> ParseFail s -> return (ParseFail s) +> ParseOk a -> k a s l + +> returnP :: Monad m => a -> P m a +> returnP a = \s l -> return (ParseOk a) + +----------------------------------------------------------------------------- + +Now we declare the datastructure that we are parsing. + +> data Exp = Let Int String Exp Exp | Exp1 Exp1 +> data Exp1 = Plus Exp1 Term | Minus Exp1 Term | Term Term +> data Term = Times Term Factor | Div Term Factor | Factor Factor +> data Factor = Int Int | Var String | Brack Exp + +The datastructure for the tokens... + +> data Token +> = TokenLet +> | TokenIn +> | TokenInt Int +> | TokenVar String +> | TokenEq +> | TokenPlus +> | TokenMinus +> | TokenTimes +> | TokenDiv +> | TokenOB +> | TokenCB +> | TokenEOF + +.. and a simple lexer that returns this datastructure. + +> lexer :: Monad m => (Token -> P m a) -> P m a +> lexer cont s = case s of +> [] -> cont TokenEOF [] +> ('\n':cs) -> \line -> lexer cont cs (line+1) +> (c:cs) +> | isSpace c -> lexer cont cs +> | isAlpha c -> lexVar (c:cs) +> | isDigit c -> lexNum (c:cs) +> ('=':cs) -> cont TokenEq cs +> ('+':cs) -> cont TokenPlus cs +> ('-':cs) -> cont TokenMinus cs +> ('*':cs) -> cont TokenTimes cs +> ('/':cs) -> cont TokenDiv cs +> ('(':cs) -> cont TokenOB cs +> (')':cs) -> cont TokenCB cs +> where +> lexNum cs = cont (TokenInt (read num)) rest +> where (num,rest) = span isDigit cs +> lexVar cs = +> case span isAlpha cs of +> ("let",rest) -> cont TokenLet rest +> ("in",rest) -> cont TokenIn rest +> (var,rest) -> cont (TokenVar var) rest + +> runCalc :: Monad m => String -> m Exp +> runCalc s = +> do +> res <- calc s 1 +> case res of +> ParseOk e -> return e +> ParseFail s -> error s + +----------------------------------------------------------------------------- +The following functions should be defined for all parsers. + +This is the overall type of the parser. + +> type Parse m = P m Exp +> calc :: Monad m => Parse m + +The next function is called when a parse error is detected. It has +the same type as the top-level parse function. + +> happyError :: P m a +> happyError = \s i -> error ( +> "Parse error in line " ++ show (i::Int) ++ "\n") + +----------------------------------------------------------------------------- + +Here we test our parser. + +> main = +> do +> res <- runCalc "1 + 2 + 3" +> case res of +> (Exp1 (Plus (Plus (Term (Factor (Int 1))) +> (Factor (Int 2))) (Factor (Int 3)))) -> +> do +> res <- runCalc "1 * 2 + 3" +> case res of +> (Exp1 (Plus (Term (Times (Factor (Int 1)) (Int 2))) +> (Factor (Int 3)))) -> +> do +> res <- runCalc "1 + 2 * 3" +> case res of +> (Exp1 (Plus (Term (Factor (Int 1))) +> (Times (Factor (Int 2)) (Int 3)))) -> +> do +> res <- runCalc "let x = 2 in x * (x - 2)" +> case res of +> (Let 1 "x" (Exp1 (Term (Factor (Int 2)))) +> (Exp1 (Term (Times (Factor (Var "x")) +> (Brack (Exp1 (Minus (Term (Factor (Var "x"))) +> (Factor (Int 2))))))))) -> +> print "Test works\n" +> _ -> quit +> _ -> quit +> _ -> quit +> _ -> quit +> quit = print "Test failed\n" +> } diff -Nru happy-1.19.5/tests/typeclass_monad_lexer.y happy-1.19.8/tests/typeclass_monad_lexer.y --- happy-1.19.5/tests/typeclass_monad_lexer.y 1970-01-01 00:00:00.000000000 +0000 +++ happy-1.19.8/tests/typeclass_monad_lexer.y 2017-10-12 07:46:11.000000000 +0000 @@ -0,0 +1,125 @@ +{ +import Control.Monad.Except +import Control.Monad.State +import Control.Monad.Trans + +} + +%name parse exp +%tokentype { Token } +%error { parseError } +%monad { (MonadIO m) } { Parser m } +%lexer { lexer } { EOF } +%token ID { Id _ } + NUM { Num _ } + PLUS { Plus } + MINUS { Minus } + TIMES { Times } + LPAREN { LParen } + RPAREN { RParen } + +%% + +exp :: { AST } + : exp PLUS prod + { Sum $1 $3 } + | prod + { $1 } + +prod :: { AST } + : prod TIMES neg + { Prod $1 $3 } + | neg + { $1 } + +neg :: { AST } + : MINUS neg + { Neg $2 } + | atom + { $1 } + +atom :: { AST } + : ID + { let Id str = $1 in Var str } + | NUM + { let Num n = $1 in Lit n } + | LPAREN exp RPAREN + { $2 } + +{ + +data Token = + Plus + | Minus + | Times + | LParen + | RParen + | Id String + | Num Int + | EOF + deriving (Eq, Ord, Show) + +data AST = + Sum AST AST + | Prod AST AST + | Neg AST + | Var String + | Lit Int + deriving (Eq, Ord) + +type Parser m = ExceptT () (Lexer m) + +type Lexer m = StateT [Token] m + +parseError :: MonadIO m => Token -> Parser m a +parseError tok = + do + liftIO (putStrLn ("Parse error at " ++ show tok)) + throwError () + +lexer :: MonadIO m => (Token -> Parser m a) -> Parser m a +lexer cont = + do + toks <- get + case toks of + [] -> cont EOF + first : rest -> + do + put rest + cont first + +parse :: (MonadIO m) => Parser m AST + +parser :: (MonadIO m) => + [Token] + -> m (Maybe AST) +parser input = + let + run :: (MonadIO m) => + Lexer m (Maybe AST) + run = + do + res <- runExceptT parse + case res of + Left () -> return Nothing + Right ast -> return (Just ast) + in do + (out, _) <- runStateT run input + return out + +main :: IO () +main = + let + input = [Id "x", Plus, + Minus, Num 1, Times, + LParen, Num 2, Plus, Id "y", RParen] + expected = Sum (Var "x") (Prod (Neg (Lit 1)) (Sum (Lit 2) (Var "y"))) + in do + res <- parser input + case res of + Nothing -> print "Test failed\n" + Just actual + | expected == actual -> print "Test works\n" + | otherwise -> print "Test failed\n" + +} \ No newline at end of file