Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions lib/prism/lex_compat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -758,8 +758,9 @@ def result
end
end

# Drop the EOF token from the list
tokens = tokens[0...-1]
# Drop the EOF token from the list. The EOF token may not be
# present if the source was syntax invalid
tokens = tokens[0...-1] if tokens.dig(-1, 1) == :on_eof

# We sort by location because Ripper.lex sorts.
tokens.sort_by! do |token|
Expand Down Expand Up @@ -804,7 +805,7 @@ def insert_on_sp(tokens, source, data_loc, bom, eof_token)
next_whitespace_index += 1
first_whitespace = sp_value[0...continuation_index]
continuation = sp_value[continuation_index...next_whitespace_index]
second_whitespace = sp_value[next_whitespace_index..]
second_whitespace = sp_value[next_whitespace_index..] || ""

new_tokens << [[sp_line, sp_column], :on_sp, first_whitespace, prev_token_state] unless first_whitespace.empty?
new_tokens << [[sp_line, sp_column + continuation_index], :on_sp, continuation, prev_token_state]
Expand All @@ -819,7 +820,7 @@ def insert_on_sp(tokens, source, data_loc, bom, eof_token)
prev_token_end = start_offset + token[2].bytesize
end

unless data_loc # no trailing :on_sp with __END__ as it is always preceded by :on_nl
if !data_loc && eof_token # no trailing :on_sp with __END__ as it is always preceded by :on_nl
end_offset = eof_token.location.end_offset
if prev_token_end < end_offset
new_tokens << [
Expand Down
11 changes: 11 additions & 0 deletions test/prism/ruby/ripper_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,17 @@ def test_lexer
assert_raise(SyntaxError) { Translation::Ripper::Lexer.new("1 +").lex(raise_errors: true) }
end


# On syntax invalid code the output doesn't always match up
# In these cases we just want to make sure that it doesn't raise.
def test_lex_invalid_syntax
assert_nothing_raised do
Translation::Ripper.lex('scan/\p{alpha}/')
end

assert_equal(Ripper.lex('if;)'), Translation::Ripper.lex('if;)'))
end

def test_tokenize
source = "foo;1;BAZ"
assert_equal(Ripper.tokenize(source), Translation::Ripper.tokenize(source))
Expand Down