Class: Parser

Inherits:
Object
  • Object
show all
Defined in:
lib/rdparse.rb

Defined Under Namespace

Classes: LexToken, ParseError

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(language_name, debug_bool, locale, &block) ⇒ Parser

Returns a new instance of Parser.



129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# File 'lib/rdparse.rb', line 129

def initialize(language_name, debug_bool, locale, &block)
  @debug_mode = debug_bool
  @logger = Logger.new(STDOUT) if @debug_mode
  @lex_tokens = []
  @rules = {}
  @start = nil
  @language_name = language_name
  @file_string = ''

  create_tokens_from_locale(locale)

  false_value = @token_list['(false|true)'].split('|')[0]# Hacky solution to convoluted tokens
  true_value = @token_list['(false|true)'].split('|')[1]
  true_value = true_value[0..-2]
  false_value = false_value[1..]

  ScopeManager.new(true_value, false_value) # Hacky solution to convoluted tokens

  instance_eval(&block)
end

Instance Attribute Details

#loggerObject (readonly)

Returns the value of attribute logger.



124
125
126
# File 'lib/rdparse.rb', line 124

def logger
  @logger
end

#posObject

Returns the value of attribute pos.



123
124
125
# File 'lib/rdparse.rb', line 123

def pos
  @pos
end

#rulesObject (readonly)

Returns the value of attribute rules.



124
125
126
# File 'lib/rdparse.rb', line 124

def rules
  @rules
end

#stringObject (readonly)

Returns the value of attribute string.



124
125
126
# File 'lib/rdparse.rb', line 124

def string
  @string
end

Instance Method Details

#create_tokens_from_locale(locale) ⇒ Object

Recreate the tokenlist using the chosen locale file



151
152
153
154
155
156
157
158
159
160
161
162
163
# File 'lib/rdparse.rb', line 151

def create_tokens_from_locale(locale)
  if locale == 'default'
    lang_file = File.read("#{LOCALES_PATH}/#{locale}")
    token_pairs = File.readlines("#{LOCALES_PATH}/#{lang_file}")
  else
    token_pairs = File.readlines("#{LOCALES_PATH}/#{locale}")
  end
  @token_list = Hash.new
  token_pairs.each do |pair|
    default_value, locale_value = pair.split(' ')
    @token_list[default_value] = locale_value
  end
end

#expect(tok) ⇒ Object

Return the next token in the queue



222
223
224
225
226
227
228
229
230
231
232
# File 'lib/rdparse.rb', line 222

def expect(tok)
  return tok if tok == :empty
  t = next_token
  if @pos - 1 > @max_pos
    @max_pos = @pos - 1
    @expected = []
  end
  return t if tok === t
  @expected << tok if @max_pos == @pos - 1 && !@expected.include?(tok)
  return nil
end

#next_tokenObject



216
217
218
219
# File 'lib/rdparse.rb', line 216

def next_token
  @pos += 1
  return @tokens[@pos - 1]
end

#parse(string) ⇒ Object



191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
# File 'lib/rdparse.rb', line 191

def parse(string)
  # First, split the string according to the "token" instructions given.
  # Afterwards @tokens contains all tokens that are to be parsed.
  @file_string = string
  tokenize(string)

  # These variables are used to match if the total number of tokens
  # are consumed by the parser
  @pos = 0
  @max_pos = 0
  @expected = []
  # Parse (and evaluate) the tokens received
  result = @start.parse
  # If there are unparsed extra tokens, signal error
  if @pos != @tokens.size

    if @tokens[@max_pos] == '(' || @tokens[@max_pos] == ')'
      raise ParseError, 'Mismatched parenthesis! In Emacs: M-x check-parens RET'
    end

    return ErrorHandler.find_faulty_line(@max_pos, @file_string, @tokens, @token_list)
  end
  return result
end

#to_sObject



234
235
236
# File 'lib/rdparse.rb', line 234

def to_s
  "Parser for #{@language_name}"
end

#tokenize(string) ⇒ Object

Tokenize the string into small pieces



166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# File 'lib/rdparse.rb', line 166

def tokenize(string)
  @tokens = []
  @string = string.clone
  until string.empty?
    # Unless any of the valid tokens of our language are the prefix of
    # 'string', we fail with an exception
    raise ParseError, "unable to lex '#{string}" unless @lex_tokens.any? do |tok|
      match = tok.pattern.match(string)
      # The regular expression of a token has matched the beginning of 'string'
      if match
        @logger.debug("Token #{match[0]} consumed as #{match[0]}") if @debug_mode
        # Also, evaluate this expression by using the block
        # associated with the token
        @tokens << tok.block.call(match.to_s) if tok.block
        # consume the match and proceed with the rest of the string
        string = match.post_match
        true
      else
        # this token pattern did not match, try the next
        false
      end # if
    end # raise
  end # until
end