urn_lexer.go 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. // Generated from /home/leodido/workspaces/go/src/github.com/leodido/go-urn/grammar/Urn.g4 by ANTLR 4.7.
  2. package grammar
  3. import (
  4. "fmt"
  5. "unicode"
  6. "github.com/antlr/antlr4/runtime/Go/antlr"
  7. )
  8. // Suppress unused import error
  9. var _ = fmt.Printf
  10. var _ = unicode.IsLetter
  11. var serializedLexerAtn = []uint16{
  12. 3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 7, 90, 8,
  13. 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9,
  14. 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4,
  15. 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 3, 2,
  16. 3, 2, 3, 3, 3, 3, 5, 3, 40, 10, 3, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6,
  17. 3, 7, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 6, 8, 55, 10, 8, 13, 8, 14, 8,
  18. 56, 3, 9, 3, 9, 6, 9, 61, 10, 9, 13, 9, 14, 9, 62, 3, 10, 3, 10, 3, 10,
  19. 3, 10, 3, 11, 3, 11, 3, 12, 5, 12, 72, 10, 12, 3, 13, 3, 13, 5, 13, 76,
  20. 10, 13, 3, 14, 3, 14, 3, 15, 3, 15, 5, 15, 82, 10, 15, 3, 16, 3, 16, 3,
  21. 17, 3, 17, 3, 17, 5, 17, 89, 10, 17, 2, 2, 18, 3, 3, 5, 4, 7, 5, 9, 6,
  22. 11, 7, 13, 2, 15, 2, 17, 2, 19, 2, 21, 2, 23, 2, 25, 2, 27, 2, 29, 2, 31,
  23. 2, 33, 2, 3, 2, 10, 4, 2, 87, 87, 119, 119, 4, 2, 84, 84, 116, 116, 4,
  24. 2, 80, 80, 112, 112, 3, 2, 50, 59, 4, 2, 67, 92, 99, 124, 10, 2, 35, 35,
  25. 38, 38, 41, 46, 48, 48, 61, 61, 63, 63, 66, 66, 97, 97, 4, 2, 67, 72, 99,
  26. 104, 6, 2, 37, 37, 39, 39, 49, 49, 65, 65, 2, 87, 2, 3, 3, 2, 2, 2, 2,
  27. 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 3,
  28. 35, 3, 2, 2, 2, 5, 39, 3, 2, 2, 2, 7, 41, 3, 2, 2, 2, 9, 43, 3, 2, 2, 2,
  29. 11, 45, 3, 2, 2, 2, 13, 47, 3, 2, 2, 2, 15, 51, 3, 2, 2, 2, 17, 60, 3,
  30. 2, 2, 2, 19, 64, 3, 2, 2, 2, 21, 68, 3, 2, 2, 2, 23, 71, 3, 2, 2, 2, 25,
  31. 75, 3, 2, 2, 2, 27, 77, 3, 2, 2, 2, 29, 81, 3, 2, 2, 2, 31, 83, 3, 2, 2,
  32. 2, 33, 88, 3, 2, 2, 2, 35, 36, 5, 13, 7, 2, 36, 4, 3, 2, 2, 2, 37, 40,
  33. 5, 15, 8, 2, 38, 40, 5, 17, 9, 2, 39, 37, 3, 2, 2, 2, 39, 38, 3, 2, 2,
  34. 2, 40, 6, 3, 2, 2, 2, 41, 42, 7, 60, 2, 2, 42, 8, 3, 2, 2, 2, 43, 44, 7,
  35. 47, 2, 2, 44, 10, 3, 2, 2, 2, 45, 46, 7, 34, 2, 2, 46, 12, 3, 2, 2, 2,
  36. 47, 48, 9, 2, 2, 2, 48, 49, 9, 3, 2, 2, 49, 50, 9, 4, 2, 2, 50, 14, 3,
  37. 2, 2, 2, 51, 54, 5, 25, 13, 2, 52, 55, 5, 25, 13, 2, 53, 55, 5, 9, 5, 2,
  38. 54, 52, 3, 2, 2, 2, 54, 53, 3, 2, 2, 2, 55, 56, 3, 2, 2, 2, 56, 54, 3,
  39. 2, 2, 2, 56, 57, 3, 2, 2, 2, 57, 16, 3, 2, 2, 2, 58, 61, 5, 33, 17, 2,
  40. 59, 61, 5, 19, 10, 2, 60, 58, 3, 2, 2, 2, 60, 59, 3, 2, 2, 2, 61, 62, 3,
  41. 2, 2, 2, 62, 60, 3, 2, 2, 2, 62, 63, 3, 2, 2, 2, 63, 18, 3, 2, 2, 2, 64,
  42. 65, 7, 39, 2, 2, 65, 66, 5, 29, 15, 2, 66, 67, 5, 29, 15, 2, 67, 20, 3,
  43. 2, 2, 2, 68, 69, 9, 5, 2, 2, 69, 22, 3, 2, 2, 2, 70, 72, 9, 6, 2, 2, 71,
  44. 70, 3, 2, 2, 2, 72, 24, 3, 2, 2, 2, 73, 76, 5, 21, 11, 2, 74, 76, 5, 23,
  45. 12, 2, 75, 73, 3, 2, 2, 2, 75, 74, 3, 2, 2, 2, 76, 26, 3, 2, 2, 2, 77,
  46. 78, 9, 7, 2, 2, 78, 28, 3, 2, 2, 2, 79, 82, 5, 21, 11, 2, 80, 82, 9, 8,
  47. 2, 2, 81, 79, 3, 2, 2, 2, 81, 80, 3, 2, 2, 2, 82, 30, 3, 2, 2, 2, 83, 84,
  48. 9, 9, 2, 2, 84, 32, 3, 2, 2, 2, 85, 89, 5, 27, 14, 2, 86, 89, 5, 31, 16,
  49. 2, 87, 89, 5, 25, 13, 2, 88, 85, 3, 2, 2, 2, 88, 86, 3, 2, 2, 2, 88, 87,
  50. 3, 2, 2, 2, 89, 34, 3, 2, 2, 2, 12, 2, 39, 54, 56, 60, 62, 71, 75, 81,
  51. 88, 2,
  52. }
  53. var lexerDeserializer = antlr.NewATNDeserializer(nil)
  54. var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
  55. var lexerChannelNames = []string{
  56. "DEFAULT_TOKEN_CHANNEL", "HIDDEN",
  57. }
  58. var lexerModeNames = []string{
  59. "DEFAULT_MODE",
  60. }
  61. var lexerLiteralNames = []string{
  62. "", "", "", "':'", "'-'", "' '",
  63. }
  64. var lexerSymbolicNames = []string{
  65. "", "Urn", "Part", "Colon", "Hyphen", "Whitespace",
  66. }
  67. var lexerRuleNames = []string{
  68. "Urn", "Part", "Colon", "Hyphen", "Whitespace", "URN", "IDENTIFIER", "CHARS",
  69. "HEX", "NUMBER", "LETTER", "ALPHA_NUMERIC", "OTHER", "HEX_DIGIT", "RESERVED",
  70. "TRANSLATION",
  71. }
  72. type UrnLexer struct {
  73. *antlr.BaseLexer
  74. channelNames []string
  75. modeNames []string
  76. // TODO: EOF string
  77. }
  78. var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState))
  79. func init() {
  80. for index, ds := range lexerAtn.DecisionToState {
  81. lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
  82. }
  83. }
  84. func NewUrnLexer(input antlr.CharStream) *UrnLexer {
  85. l := new(UrnLexer)
  86. l.BaseLexer = antlr.NewBaseLexer(input)
  87. l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
  88. l.channelNames = lexerChannelNames
  89. l.modeNames = lexerModeNames
  90. l.RuleNames = lexerRuleNames
  91. l.LiteralNames = lexerLiteralNames
  92. l.SymbolicNames = lexerSymbolicNames
  93. l.GrammarFileName = "Urn.g4"
  94. // TODO: l.EOF = antlr.TokenEOF
  95. return l
  96. }
  97. // UrnLexer tokens.
  98. const (
  99. UrnLexerUrn = 1
  100. UrnLexerPart = 2
  101. UrnLexerColon = 3
  102. UrnLexerHyphen = 4
  103. UrnLexerWhitespace = 5
  104. )