SqlAnalyzer1.01 源码
源码下载:https://files.cnblogs.com/files/heyang78/SqlAnalyzer-20200529-2.rar
现有功能:不带函数允许嵌套的select ...from... where...group by...having...order by...的解析。
核心类:
Token:
package com.heyang; public class Token{
public static final int TYPE_SELECT=1;
public static final int TYPE_TEXT=2;
public static final int TYPE_COMMA=3;
public static final int TYPE_FROM=4;
public static final int TYPE_WHERE=5;
public static final int TYPE_AND=6;
public static final int TYPE_EQUAL=7;
public static final int TYPE_OR=8;
public static final int TYPE_ORDER=9;
public static final int TYPE_BY=10;
public static final int TYPE_ASC=11;
public static final int TYPE_DESC=12;
public static final int TYPE_AS=13;
public static final int TYPE_GROUP=14;
public static final int TYPE_HAVING=15;
public static final int TYPE_LESSTHAN=16;
public static final int TYPE_GREATERTHAN=17;
public static final int TYPE_OPEN_PARENTHESIS=18;
public static final int TYPE_CLOSE_PARENTHESIS=19;
public static final int TYPE_CONNECT=20;
public static final int TYPE_LESSTHAN_OR_EQUAL=21;
public static final int TYPE_GREATERTHAN_OR_EQUAL=22;
public static final int TYPE_LESSTHAN_OR_GREATERTHAN=23;
public static final int TYPE_CASE=24;
public static final int TYPE_WHEN=25;
public static final int TYPE_THEN=26;
public static final int TYPE_ELSE=27;
public static final int TYPE_END=28;
public static final int TYPE_IS=29;
public static final int TYPE_NULL=30;
public static final int TYPE_TRUE=31;
public static final int TYPE_FALSE=32;
public static final int TYPE_PLUS=33;
public static final int TYPE_MINUS=34;
//public static final int TYPE_MULTI=35;
public static final int TYPE_DEVIDE=36;
public static final int TYPE_DISTINCT=37;
public static final int TYPE_OVER=38;
public static final int TYPE_STRING_CONCAT=39;
public static final int TYPE_ON=40;
public static final int TYPE_JOIN=41;
public static final int TYPE_INNER=42;
public static final int TYPE_LEFT=43;
public static final int TYPE_RIGHT=44;
public static final int TYPE_OUTER=45;
public static final int TYPE_FULL=46;
public static final int TYPE_WITHIN=47;
public static final int TYPE_PARTITION=48;
public static final int TYPE_LIKE=49;
public static final int TYPE_LIMIT=50; private int type;
private String text;
private int index;// Used to remember location public Token(char c,int type) {
this.text=String.valueOf(c);
this.type=type;
} public Token(String word,int type) {
this.text=word;
this.type=type;
} public String toString() {
return String.format("token(text=%s,type=%s,index=%d)", text,getTypeStr(),index);
} public String getTypeStr() {
if(type==TYPE_SELECT) {
return "KW:select";
}else if(type==TYPE_FROM) {
return "KW:from";
}else if(type==TYPE_COMMA) {
return "Comma";
}else if(type==TYPE_TEXT) {
return "Text";
}else if(type==TYPE_WHERE) {
return "KW:where";
}else if(type==TYPE_AND) {
return "KW:and";
}else if(type==TYPE_EQUAL) {
return "=";
}else if(type==TYPE_OR) {
return "KW:or";
}else if(type==TYPE_ORDER) {
return "KW:order";
}else if(type==TYPE_BY) {
return "KW:by";
}else if(type==TYPE_ASC) {
return "KW:asc";
}else if(type==TYPE_DESC) {
return "KW:desc";
}else if(type==TYPE_AS) {
return "KW:as";
}else if(type==TYPE_GROUP) {
return "KW:group";
}else if(type==TYPE_HAVING) {
return "KW:having";
}else if(type==TYPE_LESSTHAN) {
return "<";
}else if(type==TYPE_GREATERTHAN) {
return ">";
}else if(type==TYPE_OPEN_PARENTHESIS) {
return "(";
}else if(type==TYPE_CLOSE_PARENTHESIS) {
return ")";
}else if(type==TYPE_CONNECT) {
return "KW:connect";
}else if(type==TYPE_LESSTHAN_OR_EQUAL) {
return "<=";
}else if(type==TYPE_GREATERTHAN_OR_EQUAL) {
return ">=";
}else if(type==TYPE_LESSTHAN_OR_GREATERTHAN) {
return "<>";
}else if(type==TYPE_CASE) {
return "KW:case";
}else if(type==TYPE_WHEN) {
return "KW:when";
}else if(type==TYPE_THEN) {
return "KW:then";
}else if(type==TYPE_ELSE) {
return "KW:else";
}else if(type==TYPE_END) {
return "KW:end";
}else if(type==TYPE_IS) {
return "KW:is";
}else if(type==TYPE_NULL) {
return "KW:null";
}else if(type==TYPE_TRUE) {
return "KW:true";
}else if(type==TYPE_FALSE) {
return "KW:false";
}else if(type==TYPE_PLUS) {
return "+";
}else if(type==TYPE_MINUS) {
return "-";
}else if(type==TYPE_DEVIDE) {
return "/";
}else if(type==TYPE_DISTINCT) {
return "KW:distinct";
}else if(type==TYPE_OVER) {
return "KW:over";
}else if(type==TYPE_STRING_CONCAT) {
return "||";
}else if(type==TYPE_ON) {
return "KW:on";
}else if(type==TYPE_JOIN) {
return "KW:join";
}else if(type==TYPE_INNER) {
return "KW:inner";
}else if(type==TYPE_LEFT) {
return "KW:left";
}else if(type==TYPE_RIGHT) {
return "KW:right";
}else if(type==TYPE_OUTER) {
return "KW:outer";
}else if(type==TYPE_FULL) {
return "KW:full";
}else if(type==TYPE_WITHIN) {
return "KW:within";
}else if(type==TYPE_PARTITION) {
return "KW:partition";
}else if(type==TYPE_LIKE) {
return "KW:like";
}else if(type==TYPE_LIMIT) {
return "KW:limit";
} return null;
} public int getType() {
return type;
} public String getText() {
return text;
} public void setText(String text) {
this.text=text;
} public int getIndex() {
return index;
} public void setIndex(int index) {
this.index = index;
} public void setType(int type) {
this.type = type;
}
}
分词器类:
package com.heyang; import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List; public class Lexer {
private List<Token> tokens; public Lexer(String inputSql) {
String sql=pretreat(inputSql);
String swallowed=""; tokens=new ArrayList<Token>();
for(int i=0;i<sql.length();i++){
char c=sql.charAt(i); if(Character.isWhitespace(c)){
addTextToList(swallowed);
swallowed="";
}else if(c==','){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_COMMA));
}else if(c=='='){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_EQUAL));
}else if(c=='<'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='=') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("<=",Token.TYPE_LESSTHAN_OR_EQUAL));
i++;
}else if(next<sql.length() && sql.charAt(next)=='>') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("<>",Token.TYPE_LESSTHAN_OR_GREATERTHAN));
i++;
}else {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_LESSTHAN));
}
}else if(c=='>'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='=') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(">=",Token.TYPE_GREATERTHAN_OR_EQUAL));
i++;
}else {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_GREATERTHAN));
}
}else if(c=='|'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='|') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("||",Token.TYPE_STRING_CONCAT));
i++;
}
}else if(c=='('){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_OPEN_PARENTHESIS));
}else if(c==')'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_CLOSE_PARENTHESIS));
}else if(c=='+'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_PLUS));
}else if(c=='-'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_MINUS));
}else if(c=='/'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_DEVIDE));
}else {
swallowed+=c;
}
} setTokenIndexes();
} public void setTokenIndexes() {
int idx = 0;
for (Token t : tokens) {
idx++;
t.setIndex(idx);
}
} private int findTypeByText(String text) {
Object[][] arr= {
{"select", Token.TYPE_SELECT},
{"from", Token.TYPE_FROM},
{"where", Token.TYPE_WHERE},
{"and", Token.TYPE_AND},
{"or", Token.TYPE_OR},
{"order", Token.TYPE_ORDER},
{"by", Token.TYPE_BY},
{"asc", Token.TYPE_ASC},
{"desc", Token.TYPE_ASC},
{"asc", Token.TYPE_DESC},
{"as", Token.TYPE_AS},
{"group", Token.TYPE_GROUP},
{"having", Token.TYPE_HAVING},
{"connect", Token.TYPE_CONNECT},
{"case", Token.TYPE_CASE},
{"when", Token.TYPE_WHEN},
{"then", Token.TYPE_THEN},
{"else", Token.TYPE_ELSE},
{"end", Token.TYPE_END},
{"is", Token.TYPE_IS},
{"null", Token.TYPE_NULL},
{"true", Token.TYPE_TRUE},
{"false", Token.TYPE_FALSE},
{"distinct", Token.TYPE_DISTINCT},
{"over", Token.TYPE_OVER},
{"on", Token.TYPE_ON},
{"join", Token.TYPE_JOIN},
{"inner", Token.TYPE_INNER},
{"left", Token.TYPE_LEFT},
{"right", Token.TYPE_RIGHT},
{"outer", Token.TYPE_OUTER},
{"full", Token.TYPE_FULL},
{"within", Token.TYPE_WITHIN},
{"partition", Token.TYPE_PARTITION},
{"like", Token.TYPE_LIKE},
{"limit", Token.TYPE_LIMIT},
}; for(Object[] arrInner:arr) {
String keyword=String.valueOf(arrInner[0]);
if(keyword.equalsIgnoreCase(text)) {
return Integer.parseInt(arrInner[1].toString());
}
} return Token.TYPE_TEXT;
} private void addTextToList(String text) {
int type=findTypeByText(text);
addToken2List(text,type);
} private void addToken2List(String text,int type) {
if(text.trim().length()>0) {
tokens.add(new Token(text,type));
}
} public void printTokens() {
final String continuousStar = createRepeatedStr("-", 84);
final String layout = "%-20s %-20s %-20s %-20s %s";
StringBuilder sb = new StringBuilder(); sb.append(String.format(layout, "Index", "Type No","Text","Type Desc","\n"));
sb.append(continuousStar + "\n");
int index=0;
for(Token token:tokens) {
sb.append(String.format(layout, String.valueOf(index),String.valueOf(token.getType()), token.getText(),token.getTypeStr(),"\n"));
index++;
} System.out.println(sb.toString());
} public List<Token> getTokens() {
return tokens;
} private static String createRepeatedStr(String seed, int n) {
return String.join("", Collections.nCopies(n, seed));
} private String pretreat(String raw) {
return raw.trim()+" ";
} public static void main(String[] args) throws Exception{
String sql=removeExtraSpace(readSqlFromFile("c:\\temp\\13.sql"));
System.out.println(sql);
new Lexer(sql).printTokens();
} private static String readSqlFromFile(String filePath) throws Exception{
StringBuilder sb=new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
String line = null;
while( ( line = br.readLine() ) != null ) {
sb.append(line);
}
br.close();
return sb.toString();
} private static String removeExtraSpace(String raw) {
return raw.replaceAll("\\s{2,}", " ");
}
}
节点类,一条SQL就是一个节点:
package com.heyang; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; public class Node {
private String kwSelect;
private List<Field> fields;
private String kwFrom;
private List<Table> tables;
private String kwWhere;
private List<Condition> conditions;
private String kwGroupby;
private List<GroupBy> groupbys; private String kwHaving;
private List<Condition> havings; private String kwOrderby;
private List<Order> orders;
private int depth=0; public void addHavingCondition(Condition c) {
if(havings==null) {
havings=new ArrayList<Condition>();
} havings.add(c);
} public void addGroupby(GroupBy g) {
if(groupbys==null) {
groupbys=new ArrayList<GroupBy>();
} groupbys.add(g);
} public void addOrder(Order o) {
if(orders==null) {
orders=new ArrayList<Order>();
} orders.add(o);
} public void addCondition(Condition c) {
if(conditions==null) {
conditions=new ArrayList<Condition>();
} conditions.add(c);
} public String toString() {
String tabs=getIndentSpace(); StringBuilder sb=new StringBuilder(); // select
sb.append(tabs+kwSelect+"\n"); if(fields!=null) {
List<String> ls=new ArrayList<String>();
for(Field f:fields) {
ls.add(tabs+" "+f.toString());
} sb.append(String.join(",\n", ls));
} // from
sb.append("\n");
sb.append(tabs+kwFrom+"\n");
if(tables!=null) {
List<String> ls=new ArrayList<String>();
for(Table tb:tables) {
ls.add(tabs+" "+tb.toString()+"");
} sb.append(String.join(",\n", ls));
} // where
if(kwWhere!=null) {
sb.append("\n");
sb.append(tabs+kwWhere+"\n");
}
if(conditions!=null) {
List<String> ls=new ArrayList<String>();
for(Condition cd:conditions) {
ls.add(tabs+" "+cd.toString()+"");
} sb.append(String.join("\n", ls));
} // group by
if(kwGroupby!=null) {
sb.append("\n");
sb.append(tabs+kwGroupby+"\n");
}
if(groupbys!=null) {
List<String> ls=new ArrayList<String>();
for(GroupBy od:groupbys) {
ls.add(tabs+" "+od.toString()+"");
}
sb.append(String.join(",\n", ls));
} // having
if(kwHaving!=null) {
sb.append("\n");
sb.append(tabs+kwHaving+"\n");
}
if(havings!=null) {
List<String> ls=new ArrayList<String>();
for(Condition cd:havings) {
ls.add(tabs+" "+cd.toString()+"");
} sb.append(String.join("\n", ls));
} // order by
if(kwOrderby!=null) {
sb.append("\n");
sb.append(tabs+kwOrderby+"\n");
}
if(orders!=null) {
List<String> ls=new ArrayList<String>();
for(Order od:orders) {
ls.add(tabs+" "+od.toString()+"");
} sb.append(String.join(",\n", ls));
} return sb.toString();
} private String getIndentSpace() {
return String.join("", Collections.nCopies(this.depth, " "));
} private void adjustDepth() {
if(tables==null) {
return;
}
for(Table tb:tables) {
Node nd=tb.getTableNode();
if(nd!=null) {
nd.depth=this.depth+1;
nd.adjustDepth();
}
} if(fields==null) {
return;
}
for(Field fd:fields) {
Node nd=fd.getColumnNode();
if(nd!=null) {
nd.depth=this.depth+1;
nd.adjustDepth();
}
}
} public void addTable(Table t) {
if(tables==null) {
tables=new ArrayList<Table>();
} tables.add(t);
adjustDepth();
} public void addField(Field f) {
if(fields==null) {
fields=new ArrayList<Field>();
} fields.add(f);
adjustDepth();;
} public void setKwSelect(String s) {
this.kwSelect=s;
} public String getKwFrom() {
return kwFrom;
} public void setKwFrom(String kwFrom) {
this.kwFrom = kwFrom;
} public String getKwSelect() {
return kwSelect;
} public String getKwWhere() {
return kwWhere;
} public void setKwWhere(String kwWhere) {
this.kwWhere = kwWhere;
} public String getKwOrderby() {
return kwOrderby;
} public void setKwOrderby(String kwOrderby) {
this.kwOrderby = kwOrderby;
} public String getKwGroupby() {
return kwGroupby;
} public void setKwGroupby(String kwGroupby) {
this.kwGroupby = kwGroupby;
} public void setKwHaving(String kwHaving) {
this.kwHaving = kwHaving;
} public String getKwHaving() {
return kwHaving;
}
}
构建一棵树:
package com.heyang; import java.util.List; public class TreeBuilder {
private Node root;
private List<Token> tokens;
private int tokenIdx; public TreeBuilder(List<Token> tokens) throws Exception{
this.tokens=tokens;
this.tokenIdx=0; root=new Node();
parseNode(root); } private void parseNode(Node parent) throws Exception{
Token token; // select
token=fetchToken();
if(token.getType()!=Token.TYPE_SELECT) {
throw new Exception("Expected:'Select' actual:"+token.getText()+" "+token);
}
parent.setKwSelect(token.getText()); // find fields
for(;;) {
token=fetchToken(); Field field=new Field();
if(token.getType()==Token.TYPE_TEXT) {
String column=token.getText();
field.setColumn(column);
}else if(token.getType()==Token.TYPE_OPEN_PARENTHESIS) {
///
token=fetchToken();
if(token.getType()==Token.TYPE_SELECT) {
// 必须括号后是select才认为是子查询
Node columnNode=new Node();
field.setColumnNode(columnNode); returnToken();
parseNode(columnNode); token=fetchToken();
if(token.getType()!=Token.TYPE_CLOSE_PARENTHESIS) {
throw new Exception("Expected:')' actual:"+token.getText()+" "+token);
}
}else {
// 否则当函数处理
/*
returnToken(); token=fetchToken();
String function="";
while(token.getType()!=Token.TYPE_CLOSE_PARENTHESIS) {
function+=token.getText();
} field.setColumn(function+")");*/
} }else {
throw new Exception("Expected:text/child query actual:"+token.getText()+" "+token);
} String alias=null;
token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
alias=token.getText();
}else if(token.getType()==Token.TYPE_COMMA) {
returnToken();
}else if(token.getType()==Token.TYPE_AS) {
token=fetchToken();
if(token.getType()!=Token.TYPE_TEXT) {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
}
alias=token.getText();
}else {
returnToken();
}
field.setAlias(alias);
parent.addField(field); token=fetchToken();
if(token.getType()!=Token.TYPE_COMMA) {
returnToken();
break;
}
} // from
token=fetchToken();
if(token.getType()!=Token.TYPE_FROM) {
throw new Exception("Expected:'from' actual:"+token.getText()+" "+token);
}
parent.setKwFrom(token.getText()); // find tables
for(;;) {
Node tableNode=new Node();
Table table=new Table();
table.setTableNode(tableNode);
parent.addTable(table); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
table.setTableName(token.getText());
}else if(token.getType()==Token.TYPE_OPEN_PARENTHESIS) {
parseNode(tableNode); token=fetchToken();
if(token.getType()!=Token.TYPE_CLOSE_PARENTHESIS) {
throw new Exception("Expected:')' actual:"+token.getText()+" "+token);
}
} token=fetchToken();
if(token==null) {
return;// 只有读到空才允许返回
}else if(token.getType()==Token.TYPE_TEXT) {
table.setAlias(token.getText());
}else {
returnToken();
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()!=Token.TYPE_COMMA) {
returnToken();
break;
}
} // where
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_WHERE) {
parent.setKwWhere(token.getText());
}else {
returnToken();
} if(parent.getKwWhere()!=null) {
// find conditions
for(;;) {
Condition cond=new Condition();
parent.addCondition(cond); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setLeft(token.getText());
}else {
returnToken();
break;
} token=fetchToken();
if(token.getType()==Token.TYPE_EQUAL || token.getType()==Token.TYPE_LESSTHAN || token.getType()==Token.TYPE_GREATERTHAN) {
cond.setOprand(token.getText());
}else {
throw new Exception("Expected:oprand actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setRight(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
break;
}else if(token.getType()==Token.TYPE_AND || token.getType()==Token.TYPE_OR) {
cond.setJoint(token.getText());
}else {
returnToken();
break;
}
}
} // group by
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_GROUP) {
String group=token.getText(); token=fetchToken();
if(token.getType()==Token.TYPE_BY) {
String by=token.getText();
parent.setKwGroupby(group+" "+by);
}
else {
throw new Exception("Expected:'by' actual:"+token.getText()+" "+token);
} }else {
returnToken();
} // find groupbys
if(parent.getKwGroupby()!=null) {
for(;;) {
GroupBy groupby=new GroupBy();
parent.addGroupby(groupby); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
groupby.setItem(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
break;
}else if(token.getType()==Token.TYPE_COMMA) {
continue;
}else {
returnToken();
break;
}
}
} // having
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_HAVING) {
String having=token.getText();
parent.setKwHaving(having);
}else {
returnToken();
} if(parent.getKwHaving()!=null) {
// find havings
for(;;) {
Condition cond=new Condition();
parent.addHavingCondition(cond); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setLeft(token.getText());
}else {
returnToken();
break;
} token=fetchToken();
if(token.getType()==Token.TYPE_EQUAL || token.getType()==Token.TYPE_LESSTHAN || token.getType()==Token.TYPE_GREATERTHAN) {
cond.setOprand(token.getText());
}else {
throw new Exception("Expected:oprand actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setRight(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
break;
}else if(token.getType()==Token.TYPE_AND || token.getType()==Token.TYPE_OR) {
cond.setJoint(token.getText());
}else {
returnToken();
break;
}
}
} // order by
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_ORDER) {
String order=token.getText(); token=fetchToken();
if(token.getType()==Token.TYPE_BY) {
String by=token.getText();
parent.setKwOrderby(order+" "+by);
}
else {
throw new Exception("Expected:'by' actual:"+token.getText()+" "+token);
} }else {
returnToken();
} if(parent.getKwOrderby()!=null) {
// find orders
for(;;) {
Order order=new Order();
parent.addOrder(order); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
order.setColumn(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_ASC || token.getType()==Token.TYPE_DESC ) {
order.setAscDesc(token.getText());
}else if(token.getType()==Token.TYPE_COMMA) {
returnToken();
}else {
returnToken();
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_COMMA) {
continue;
}else {
returnToken();
break;
}
}
}
} private Token fetchToken() {
if(tokenIdx>=tokens.size()) {
return null;
}else {
Token t=tokens.get(tokenIdx);
tokenIdx++;
return t;
}
} private void returnToken() {
if(tokenIdx>0) {
tokenIdx--;
}
} public Node getRoot() {
return root;
}
}
整合:
package com.heyang; import com.heyang.util.BracketChecker;
import com.heyang.util.CommonUtil;
import com.heyang.util.Renderer; public class EntryPoint {
public static void main(String[] args) {
try {
// Read context from file
String text=CommonUtil.readTextFromFile("C:\\hy\\files\\sql\\21.sql");
System.out.println("原文="+text); // Is brackets balanced
BracketChecker checker=new BracketChecker();
boolean isBalanced=checker.isBalanced(text);
if(isBalanced==false) {
System.out.println(Renderer.paintBrown(checker.getErrMsg()));
return;
} // lex text to tokens
Lexer lex=new Lexer(text);
//lex.printTokens(); // Build tree
TreeBuilder tb=new TreeBuilder(lex.getTokens());
Node root=tb.getRoot();
System.out.println("整理后文本=\n"+root); }catch(Exception ex) {
System.out.println(Renderer.paintBrown(ex.getMessage()));
ex.printStackTrace();
}
}
}
测试用例地址:https://www.cnblogs.com/heyang78/p/12985801.html
--2020年5月29日--
SqlAnalyzer1.01 源码的更多相关文章
- FatFsVersion0.01源码分析
目录 一.API的函数功能简述 二.FATFS主要数据结构 1.FAT32文件系统的结构 2.FATFS主要数据结构 ① FATFS ② DIR ③ FIL ④ FILINFO ⑤ wi ...
- VUE 源码学习01 源码入口
VUE[version:2.4.1] Vue项目做了不少,最近在学习设计模式与Vue源码,记录一下自己的脚印!共勉!注:此处源码学习方式为先了解其大模块,从宏观再去到微观学习,以免一开始就研究细节然后 ...
- Zepto v1.0-1源码注解
/* Zepto v1.0-1-ga3cab6c - polyfill zepto detect event ajax form fx - zeptojs.com/license */ ;(funct ...
- SqlAnalyzer1.00源码
SQL解析的夙愿今天终于完成,但限于SQL远超算术表达式,Json,XML等的复杂度,只是解析了一部分.形成普适性的SQL解析,仍需倾注精力. 代码下载:https://files.cnblogs.c ...
- 《java.util.concurrent 包源码阅读》 结束语
<java.util.concurrent 包源码阅读>系列文章已经全部写完了.开始的几篇文章是根据自己的读书笔记整理出来的(当时只阅读了部分的源代码),后面的大部分都是一边读源代码,一边 ...
- UCanCode发布升级E-Form++可视化源码组件库2015全新版 (V23.01)!
2015年4月. 成都 UCanCode发布升级E-Form++可视化源码组件库2015全新版 (V23.01)! --- 全面性能提升,UCanCode有史以来最强大的版本发布! E-Form++可 ...
- UCanCode发布升级E-Form++可视化源码组件库2014 全新版 (V20.01)!
UCanCode发布升级E-Form++可视化源码组件库2014 全新版 (V20.01)! --- UCanCode有史以来最强大的版本发布! E-Form++可视化源码组件库企业版本2014最新版 ...
- ok6410 u-boot-2012.04.01移植二修改源码支持单板
继ok6410 u-boot-2012.04.01移植一后修改代码,对ok6410单板初始化,主要包括时钟.串口.NAND.DDR等初始化.这些工作在以前的裸板程序都写了,直接拿来用.我觉得先写裸板程 ...
- 【iScroll源码学习01】准备阶段 - 叶小钗
[iScroll源码学习01]准备阶段 - 叶小钗 时间 2013-12-29 18:41:00 博客园-原创精华区 原文 http://www.cnblogs.com/yexiaochai/p/3 ...
随机推荐
- PR基础
Windows->Workspace->Reset to saved layout 恢复工作区 Edit->Perferences->Auto Save 设置自动保存时间 资源 ...
- OpenCV开发笔记(六十九):红胖子8分钟带你使用传统方法识别已知物体(图文并茂+浅显易懂+程序源码)
若该文为原创文章,未经允许不得转载原博主博客地址:https://blog.csdn.net/qq21497936原博主博客导航:https://blog.csdn.net/qq21497936/ar ...
- Spring Boot 集成 Elasticsearch 实战
最近有读者问我能不能写下如何使用 Spring Boot 开发 Elasticsearch(以下简称 ES) 相关应用,今天就讲解下如何使用 Spring Boot 结合 ES. 可以在 ES 官方文 ...
- Java对象与类—对象与类
1.类 类(class)是构造对象的模板,具体点说类就是对具有相同性质,相同行为的一群对象的抽象说明.由类构造(construst)对象的过程称为创建类的实例(instance). 2.对象 对象是类 ...
- Apache Hudi表自动同步至阿里云数据湖分析DLA
1. 引入 Hudi 0.6.0版本之前只支持将Hudi表同步到Hive或者兼容Hive的MetaStore中,对于云上其他使用与Hive不同SQL语法MetaStore则无法支持,为解决这个问题,近 ...
- LeetCode343 整数拆分详解
题目详情 给定一个正整数 n,将其拆分为至少两个正整数的和,并使这些整数的乘积最大化. 返回你可以获得的最大乘积. 示例 1: 输入: 2 输出: 1 解释: 2 = 1 + 1, 1 × 1 = 1 ...
- 【ASP.NET Core学习】使用JWT认证授权
概述 认证授权是很多系统的基本功能 , 在以前PC的时代 , 通常是基于cookies-session这样的方式实现认证授权 , 在那个时候通常系统的用户量都不会很大, 所以这种方式也一直很好运行, ...
- Mac中的垃圾文件的清理
一 前言 最近发现mac的存储空间不够了,看一下系统的存储空间如下图所示,这个其他占了160+G的存储空间,那么这个其他到底包含什么东西呢?在网上查了很久,找到一种比较认可的说法是这样的: 不同Mac ...
- 【luogu4137】 Rmq Problem / mex - 莫队
题目描述 有一个长度为n的数组{a1,a2,…,an}.m次询问,每次询问一个区间内最小没有出现过的自然数. 思路 莫队水过去了 233 #include <bits/stdc++.h> ...
- 浏览器自动化的一些体会6 增强的webBrowser控件
这里谈两点 1.支持代理服务器切换 一种方法是修改注册表,不是太好的做法,而且,只能改全局设置,不能改局部(比如只让当前的webBrowser控件使用代理,而其他应用不用代理) 另外一个较好的方法,示 ...