summaryrefslogtreecommitdiff
path: root/src/cuchaz/enigma/analysis/Lexer.java
blob: 602e3a9b526c746e293832012cb0b0a1dcb62a12 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
/*******************************************************************************
 * Copyright (c) 2014 Jeff Martin.
 * All rights reserved. This program and the accompanying materials
 * are made available under the terms of the GNU Public License v3.0
 * which accompanies this distribution, and is available at
 * http://www.gnu.org/licenses/gpl.html
 * 
 * Contributors:
 *     Jeff Martin - initial API and implementation
 ******************************************************************************/
package cuchaz.enigma.analysis;

import java.util.Iterator;

import jsyntaxpane.SyntaxDocument;
import jsyntaxpane.Token;
import jsyntaxpane.TokenType;
import jsyntaxpane.lexers.JavaLexer;

public class Lexer implements Iterable<Token>
{
	private SyntaxDocument m_doc;
	private Iterator<Token> m_iter;
	
	public Lexer( CharSequence source )
	{
		m_doc = new SyntaxDocument( new JavaLexer() );
		m_doc.append( source.toString() );
		m_iter = m_doc.getTokens( 0, m_doc.getLength() );
	}
	
	@Override
	public Iterator<Token> iterator( )
	{
		return m_iter;
	}
	
	public String getText( Token token )
	{
		return token.getString( m_doc );
	}
	
	public Token getFirstIdentifier( )
	{
		for( Token token : this )
		{
			if( token.type == TokenType.IDENTIFIER )
			{
				return token;
			}
		}
		return null;
	}
	
	public Token getFirstIdentifierMatching( CharSequence val )
	{
		for( Token token : this )
		{
			if( token.type == TokenType.IDENTIFIER && getText( token ).equals( val.toString() ) )
			{
				return token;
			}
		}
		return null;
	}
	
	public Token getLastIdentifier( )
	{
		Token lastToken = null;
		for( Token token : this )
		{
			if( token.type == TokenType.IDENTIFIER )
			{
				lastToken = token;
			}
		}
		return lastToken;
	}
}