diff options
| author | 2014-08-10 01:03:40 -0400 | |
|---|---|---|
| committer | 2014-08-10 01:03:40 -0400 | |
| commit | d24d2b9ad9b5c895020b56f700a72906346482e5 (patch) | |
| tree | da360c07209e6e327325db53dbb4df05e77cb7e9 /src/cuchaz/enigma/analysis/SourceIndex.java | |
| parent | added sorting for deobfuscated classes (diff) | |
| download | enigma-fork-d24d2b9ad9b5c895020b56f700a72906346482e5.tar.gz enigma-fork-d24d2b9ad9b5c895020b56f700a72906346482e5.tar.xz enigma-fork-d24d2b9ad9b5c895020b56f700a72906346482e5.zip | |
completely re-wrote token recognizer to bootstrap from Procyon's AST
changed imports to guava instead of whatever collections library happened to be on my classpath
Diffstat (limited to 'src/cuchaz/enigma/analysis/SourceIndex.java')
| -rw-r--r-- | src/cuchaz/enigma/analysis/SourceIndex.java | 99 |
1 files changed, 65 insertions, 34 deletions
diff --git a/src/cuchaz/enigma/analysis/SourceIndex.java b/src/cuchaz/enigma/analysis/SourceIndex.java index de16308..398a50d 100644 --- a/src/cuchaz/enigma/analysis/SourceIndex.java +++ b/src/cuchaz/enigma/analysis/SourceIndex.java | |||
| @@ -10,70 +10,101 @@ | |||
| 10 | ******************************************************************************/ | 10 | ******************************************************************************/ |
| 11 | package cuchaz.enigma.analysis; | 11 | package cuchaz.enigma.analysis; |
| 12 | 12 | ||
| 13 | import java.util.Collection; | 13 | import java.util.List; |
| 14 | import java.util.Iterator; | ||
| 15 | import java.util.Map; | 14 | import java.util.Map; |
| 15 | import java.util.TreeMap; | ||
| 16 | 16 | ||
| 17 | import jsyntaxpane.Token; | 17 | import com.google.common.collect.Lists; |
| 18 | 18 | import com.google.common.collect.Maps; | |
| 19 | import com.google.common.collect.HashMultimap; | 19 | import com.strobel.decompiler.languages.Region; |
| 20 | import com.google.common.collect.Multimap; | 20 | import com.strobel.decompiler.languages.java.ast.AstNode; |
| 21 | 21 | ||
| 22 | import cuchaz.enigma.mapping.Entry; | 22 | import cuchaz.enigma.mapping.Entry; |
| 23 | 23 | ||
| 24 | public class SourceIndex implements Iterable<Map.Entry<Entry,Token>> | 24 | public class SourceIndex |
| 25 | { | 25 | { |
| 26 | private Multimap<Entry,Token> m_entryToTokens; | 26 | private String m_source; |
| 27 | private TreeMap<Token,Entry> m_tokens; | ||
| 28 | private List<Integer> m_lineOffsets; | ||
| 27 | 29 | ||
| 28 | public SourceIndex( ) | 30 | public SourceIndex( String source ) |
| 29 | { | 31 | { |
| 30 | m_entryToTokens = HashMultimap.create(); | 32 | m_source = source; |
| 33 | m_tokens = Maps.newTreeMap(); | ||
| 34 | m_lineOffsets = Lists.newArrayList(); | ||
| 35 | |||
| 36 | // count the lines | ||
| 37 | m_lineOffsets.add( 0 ); | ||
| 38 | for( int i=0; i<source.length(); i++ ) | ||
| 39 | { | ||
| 40 | if( source.charAt( i ) == '\n' ) | ||
| 41 | { | ||
| 42 | m_lineOffsets.add( i + 1 ); | ||
| 43 | } | ||
| 44 | } | ||
| 31 | } | 45 | } |
| 32 | 46 | ||
| 33 | public void add( Entry entry, Token token ) | 47 | public String getSource( ) |
| 34 | { | 48 | { |
| 35 | m_entryToTokens.put( entry, token ); | 49 | return m_source; |
| 36 | } | 50 | } |
| 37 | 51 | ||
| 38 | public Iterator<Map.Entry<Entry,Token>> iterator( ) | 52 | public Token getToken( AstNode node ) |
| 39 | { | 53 | { |
| 40 | return m_entryToTokens.entries().iterator(); | 54 | // get a token for this node's region |
| 55 | Region region = node.getRegion(); | ||
| 56 | if( region.getBeginLine() == 0 || region.getEndLine() == 0 ) | ||
| 57 | { | ||
| 58 | throw new IllegalArgumentException( "Invalid region: " + region ); | ||
| 59 | } | ||
| 60 | return new Token( | ||
| 61 | toPos( region.getBeginLine(), region.getBeginColumn() ), | ||
| 62 | toPos( region.getEndLine(), region.getEndColumn() ) | ||
| 63 | ); | ||
| 41 | } | 64 | } |
| 42 | 65 | ||
| 43 | public Collection<Token> tokens( ) | 66 | public void add( AstNode node, Entry entry ) |
| 44 | { | 67 | { |
| 45 | return m_entryToTokens.values(); | 68 | m_tokens.put( getToken( node ), entry ); |
| 46 | } | 69 | } |
| 47 | 70 | ||
| 48 | public Entry getEntry( Token token ) | 71 | public void add( Token token, Entry entry ) |
| 72 | { | ||
| 73 | m_tokens.put( token, entry ); | ||
| 74 | } | ||
| 75 | |||
| 76 | public Token getToken( int pos ) | ||
| 49 | { | 77 | { |
| 50 | // linear search is fast enough for now | 78 | Map.Entry<Token,Entry> mapEntry = m_tokens.floorEntry( new Token( pos, pos ) ); |
| 51 | for( Map.Entry<Entry,Token> entry : this ) | 79 | if( mapEntry == null ) |
| 52 | { | 80 | { |
| 53 | if( entry.getValue().equals( token ) ) | 81 | return null; |
| 54 | { | 82 | } |
| 55 | return entry.getKey(); | 83 | Token token = mapEntry.getKey(); |
| 56 | } | 84 | if( token.contains( pos ) ) |
| 85 | { | ||
| 86 | return token; | ||
| 57 | } | 87 | } |
| 58 | return null; | 88 | return null; |
| 59 | } | 89 | } |
| 60 | 90 | ||
| 61 | public Map.Entry<Entry,Token> getEntry( int pos ) | 91 | public Entry getEntry( Token token ) |
| 62 | { | 92 | { |
| 63 | // linear search is fast enough for now | 93 | if( token == null ) |
| 64 | for( Map.Entry<Entry,Token> entry : this ) | ||
| 65 | { | 94 | { |
| 66 | Token token = entry.getValue(); | 95 | return null; |
| 67 | if( pos >= token.start && pos <= token.end() ) | ||
| 68 | { | ||
| 69 | return entry; | ||
| 70 | } | ||
| 71 | } | 96 | } |
| 72 | return null; | 97 | return m_tokens.get( token ); |
| 98 | } | ||
| 99 | |||
| 100 | public Iterable<Token> tokens( ) | ||
| 101 | { | ||
| 102 | return m_tokens.keySet(); | ||
| 73 | } | 103 | } |
| 74 | 104 | ||
| 75 | public Collection<Token> getTokens( Entry entry ) | 105 | private int toPos( int line, int col ) |
| 76 | { | 106 | { |
| 77 | return m_entryToTokens.get( entry ); | 107 | // line and col are 1-based |
| 108 | return m_lineOffsets.get( line - 1 ) + col - 1; | ||
| 78 | } | 109 | } |
| 79 | } | 110 | } |