SqlAnalyzer1.00源码
SQL解析的夙愿今天终于完成,但限于SQL远超算术表达式,Json,XML等的复杂度,只是解析了一部分。形成普适性的SQL解析,仍需倾注精力。
代码下载:https://files.cnblogs.com/files/heyang78/SqlAnalyzer-20200527-2.rar
测试用例:https://www.cnblogs.com/heyang78/p/12974097.html
核心诸类:
Token:
package com.heyang; public class Token{
public static final int TYPE_SELECT=1;
public static final int TYPE_TEXT=2;
public static final int TYPE_COMMA=3;
public static final int TYPE_FROM=4;
public static final int TYPE_WHERE=5;
public static final int TYPE_AND=6;
public static final int TYPE_EQUAL=7;
public static final int TYPE_OR=8;
public static final int TYPE_ORDER=9;
public static final int TYPE_BY=10;
public static final int TYPE_ASC=11;
public static final int TYPE_DESC=12;
public static final int TYPE_AS=13;
public static final int TYPE_GROUP=14;
public static final int TYPE_HAVING=15;
public static final int TYPE_LESSTHAN=16;
public static final int TYPE_GREATERTHAN=17;
public static final int TYPE_OPEN_PARENTHESIS=18;
public static final int TYPE_CLOSE_PARENTHESIS=19;
public static final int TYPE_CONNECT=20;
public static final int TYPE_LESSTHAN_OR_EQUAL=21;
public static final int TYPE_GREATERTHAN_OR_EQUAL=22;
public static final int TYPE_LESSTHAN_OR_GREATERTHAN=23;
public static final int TYPE_CASE=24;
public static final int TYPE_WHEN=25;
public static final int TYPE_THEN=26;
public static final int TYPE_ELSE=27;
public static final int TYPE_END=28;
public static final int TYPE_IS=29;
public static final int TYPE_NULL=30;
public static final int TYPE_TRUE=31;
public static final int TYPE_FALSE=32;
public static final int TYPE_PLUS=33;
public static final int TYPE_MINUS=34;
//public static final int TYPE_MULTI=35;
public static final int TYPE_DEVIDE=36;
public static final int TYPE_DISTINCT=37;
public static final int TYPE_OVER=38;
public static final int TYPE_STRING_CONCAT=39;
public static final int TYPE_ON=40;
public static final int TYPE_JOIN=41;
public static final int TYPE_INNER=42;
public static final int TYPE_LEFT=43;
public static final int TYPE_RIGHT=44;
public static final int TYPE_OUTER=45;
public static final int TYPE_FULL=46;
public static final int TYPE_WITHIN=47;
public static final int TYPE_PARTITION=48;
public static final int TYPE_LIKE=49;
public static final int TYPE_LIMIT=50; private int type;
private String text;
private int index;// Used to remember location public Token(char c,int type) {
this.text=String.valueOf(c);
this.type=type;
} public Token(String word,int type) {
this.text=word;
this.type=type;
} public String toString() {
return String.format("token(text=%s,type=%s,index=%d)", text,getTypeStr(),index);
} public String getTypeStr() {
if(type==TYPE_SELECT) {
return "KW:select";
}else if(type==TYPE_FROM) {
return "KW:from";
}else if(type==TYPE_COMMA) {
return "Comma";
}else if(type==TYPE_TEXT) {
return "Text";
}else if(type==TYPE_WHERE) {
return "KW:where";
}else if(type==TYPE_AND) {
return "KW:and";
}else if(type==TYPE_EQUAL) {
return "=";
}else if(type==TYPE_OR) {
return "KW:or";
}else if(type==TYPE_ORDER) {
return "KW:order";
}else if(type==TYPE_BY) {
return "KW:by";
}else if(type==TYPE_ASC) {
return "KW:asc";
}else if(type==TYPE_DESC) {
return "KW:desc";
}else if(type==TYPE_AS) {
return "KW:as";
}else if(type==TYPE_GROUP) {
return "KW:group";
}else if(type==TYPE_HAVING) {
return "KW:having";
}else if(type==TYPE_LESSTHAN) {
return "<";
}else if(type==TYPE_GREATERTHAN) {
return ">";
}else if(type==TYPE_OPEN_PARENTHESIS) {
return "(";
}else if(type==TYPE_CLOSE_PARENTHESIS) {
return ")";
}else if(type==TYPE_CONNECT) {
return "KW:connect";
}else if(type==TYPE_LESSTHAN_OR_EQUAL) {
return "<=";
}else if(type==TYPE_GREATERTHAN_OR_EQUAL) {
return ">=";
}else if(type==TYPE_LESSTHAN_OR_GREATERTHAN) {
return "<>";
}else if(type==TYPE_CASE) {
return "KW:case";
}else if(type==TYPE_WHEN) {
return "KW:when";
}else if(type==TYPE_THEN) {
return "KW:then";
}else if(type==TYPE_ELSE) {
return "KW:else";
}else if(type==TYPE_END) {
return "KW:end";
}else if(type==TYPE_IS) {
return "KW:is";
}else if(type==TYPE_NULL) {
return "KW:null";
}else if(type==TYPE_TRUE) {
return "KW:true";
}else if(type==TYPE_FALSE) {
return "KW:false";
}else if(type==TYPE_PLUS) {
return "+";
}else if(type==TYPE_MINUS) {
return "-";
}else if(type==TYPE_DEVIDE) {
return "/";
}else if(type==TYPE_DISTINCT) {
return "KW:distinct";
}else if(type==TYPE_OVER) {
return "KW:over";
}else if(type==TYPE_STRING_CONCAT) {
return "||";
}else if(type==TYPE_ON) {
return "KW:on";
}else if(type==TYPE_JOIN) {
return "KW:join";
}else if(type==TYPE_INNER) {
return "KW:inner";
}else if(type==TYPE_LEFT) {
return "KW:left";
}else if(type==TYPE_RIGHT) {
return "KW:right";
}else if(type==TYPE_OUTER) {
return "KW:outer";
}else if(type==TYPE_FULL) {
return "KW:full";
}else if(type==TYPE_WITHIN) {
return "KW:within";
}else if(type==TYPE_PARTITION) {
return "KW:partition";
}else if(type==TYPE_LIKE) {
return "KW:like";
}else if(type==TYPE_LIMIT) {
return "KW:limit";
} return null;
} public int getType() {
return type;
} public String getText() {
return text;
} public void setText(String text) {
this.text=text;
} public int getIndex() {
return index;
} public void setIndex(int index) {
this.index = index;
} public void setType(int type) {
this.type = type;
}
}
Lexer:
package com.heyang; import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List; // v1.00
public class Lexer {
private List<Token> tokens; public Lexer(String inputSql) {
String sql=pretreat(inputSql);
String swallowed=""; tokens=new ArrayList<Token>();
for(int i=0;i<sql.length();i++){
char c=sql.charAt(i); if(Character.isWhitespace(c)){
addTextToList(swallowed);
swallowed="";
}else if(c==','){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_COMMA));
}else if(c=='='){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_EQUAL));
}else if(c=='<'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='=') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("<=",Token.TYPE_LESSTHAN_OR_EQUAL));
i++;
}else if(next<sql.length() && sql.charAt(next)=='>') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("<>",Token.TYPE_LESSTHAN_OR_GREATERTHAN));
i++;
}else {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_LESSTHAN));
}
}else if(c=='>'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='=') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(">=",Token.TYPE_GREATERTHAN_OR_EQUAL));
i++;
}else {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_GREATERTHAN));
}
}else if(c=='|'){
int next=i+1;
if(next<sql.length() && sql.charAt(next)=='|') {
addTextToList(swallowed);
swallowed="";
tokens.add(new Token("||",Token.TYPE_STRING_CONCAT));
i++;
}
}else if(c=='('){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_OPEN_PARENTHESIS));
}else if(c==')'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_CLOSE_PARENTHESIS));
}else if(c=='+'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_PLUS));
}else if(c=='-'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_MINUS));
}else if(c=='/'){
addTextToList(swallowed);
swallowed="";
tokens.add(new Token(c,Token.TYPE_DEVIDE));
}else {
swallowed+=c;
}
} setTokenIndexes();
} public void setTokenIndexes() {
int idx = 0;
for (Token t : tokens) {
idx++;
t.setIndex(idx);
}
} private int findTypeByText(String text) {
Object[][] arr= {
{"select", Token.TYPE_SELECT},
{"from", Token.TYPE_FROM},
{"where", Token.TYPE_WHERE},
{"and", Token.TYPE_AND},
{"or", Token.TYPE_OR},
{"order", Token.TYPE_ORDER},
{"by", Token.TYPE_BY},
{"asc", Token.TYPE_ASC},
{"desc", Token.TYPE_ASC},
{"asc", Token.TYPE_DESC},
{"as", Token.TYPE_AS},
{"group", Token.TYPE_GROUP},
{"having", Token.TYPE_HAVING},
{"connect", Token.TYPE_CONNECT},
{"case", Token.TYPE_CASE},
{"when", Token.TYPE_WHEN},
{"then", Token.TYPE_THEN},
{"else", Token.TYPE_ELSE},
{"end", Token.TYPE_END},
{"is", Token.TYPE_IS},
{"null", Token.TYPE_NULL},
{"true", Token.TYPE_TRUE},
{"false", Token.TYPE_FALSE},
{"distinct", Token.TYPE_DISTINCT},
{"over", Token.TYPE_OVER},
{"on", Token.TYPE_ON},
{"join", Token.TYPE_JOIN},
{"inner", Token.TYPE_INNER},
{"left", Token.TYPE_LEFT},
{"right", Token.TYPE_RIGHT},
{"outer", Token.TYPE_OUTER},
{"full", Token.TYPE_FULL},
{"within", Token.TYPE_WITHIN},
{"partition", Token.TYPE_PARTITION},
{"like", Token.TYPE_LIKE},
{"limit", Token.TYPE_LIMIT},
}; for(Object[] arrInner:arr) {
String keyword=String.valueOf(arrInner[0]);
if(keyword.equalsIgnoreCase(text)) {
return Integer.parseInt(arrInner[1].toString());
}
} return Token.TYPE_TEXT;
} private void addTextToList(String text) {
int type=findTypeByText(text);
addToken2List(text,type);
} private void addToken2List(String text,int type) {
if(text.trim().length()>0) {
tokens.add(new Token(text,type));
}
} public void printTokens() {
final String continuousStar = createRepeatedStr("-", 84);
final String layout = "%-20s %-20s %-20s %-20s %s";
StringBuilder sb = new StringBuilder(); sb.append(String.format(layout, "Index", "Type No","Text","Type Desc","\n"));
sb.append(continuousStar + "\n");
int index=0;
for(Token token:tokens) {
sb.append(String.format(layout, String.valueOf(index),String.valueOf(token.getType()), token.getText(),token.getTypeStr(),"\n"));
index++;
} System.out.println(sb.toString());
} public List<Token> getTokens() {
return tokens;
} private static String createRepeatedStr(String seed, int n) {
return String.join("", Collections.nCopies(n, seed));
} private String pretreat(String raw) {
return raw.trim()+" ";
} public static void main(String[] args) throws Exception{
String sql=removeExtraSpace(readSqlFromFile("c:\\temp\\13.sql"));
System.out.println(sql);
new Lexer(sql).printTokens();
} private static String readSqlFromFile(String filePath) throws Exception{
StringBuilder sb=new StringBuilder();
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "UTF-8"));
String line = null;
while( ( line = br.readLine() ) != null ) {
sb.append(line);
}
br.close();
return sb.toString();
} private static String removeExtraSpace(String raw) {
return raw.replaceAll("\\s{2,}", " ");
}
}
Node:
package com.heyang; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; public class Node {
private String kwSelect;
private List<Field> fields;
private String kwFrom;
private List<Table> tables;
private String kwWhere;
private List<Condition> conditions;
private String kwOrderby;
private List<Order> orders;
private int depth=0; public void addOrder(Order o) {
if(orders==null) {
orders=new ArrayList<Order>();
} orders.add(o);
} public void addCondition(Condition c) {
if(conditions==null) {
conditions=new ArrayList<Condition>();
} conditions.add(c);
} public String toString() {
String tabs=getIndentSpace(); StringBuilder sb=new StringBuilder();
sb.append(tabs+kwSelect+"\n"); if(fields!=null) {
List<String> ls=new ArrayList<String>();
for(Field f:fields) {
ls.add(tabs+" "+f.toString());
} sb.append(String.join(",\n", ls));
}
sb.append("\n");
sb.append(tabs+kwFrom+"\n"); if(tables!=null) {
List<String> ls=new ArrayList<String>();
for(Table tb:tables) {
ls.add(tabs+" "+tb.toString()+"");
} sb.append(String.join(",\n", ls));
} if(kwWhere!=null) {
sb.append("\n");
sb.append(tabs+kwWhere+"\n");
}
if(conditions!=null) {
List<String> ls=new ArrayList<String>();
for(Condition cd:conditions) {
ls.add(tabs+" "+cd.toString()+"");
} sb.append(String.join("\n", ls));
} if(kwOrderby!=null) {
sb.append("\n");
sb.append(tabs+kwOrderby+"\n");
}
if(orders!=null) {
List<String> ls=new ArrayList<String>();
for(Order od:orders) {
ls.add(tabs+" "+od.toString()+"");
} sb.append(String.join(",\n", ls));
} return sb.toString();
} private String getIndentSpace() {
return String.join("", Collections.nCopies(this.depth, " "));
} private void adjustDepth() {
if(tables==null) {
return;
}
for(Table tb:tables) {
Node nd=tb.getTableNode();
if(nd!=null) {
nd.depth=this.depth+1;
nd.adjustDepth();
}
} if(fields==null) {
return;
}
for(Field fd:fields) {
Node nd=fd.getColumnNode();
if(nd!=null) {
nd.depth=this.depth+1;
nd.adjustDepth();
}
}
} public void addTable(Table t) {
if(tables==null) {
tables=new ArrayList<Table>();
} tables.add(t);
adjustDepth();
} public void addField(Field f) {
if(fields==null) {
fields=new ArrayList<Field>();
} fields.add(f);
adjustDepth();;
} public void setKwSelect(String s) {
this.kwSelect=s;
} public String getKwFrom() {
return kwFrom;
} public void setKwFrom(String kwFrom) {
this.kwFrom = kwFrom;
} public String getKwSelect() {
return kwSelect;
} public String getKwWhere() {
return kwWhere;
} public void setKwWhere(String kwWhere) {
this.kwWhere = kwWhere;
} public String getKwOrderby() {
return kwOrderby;
} public void setKwOrderby(String kwOrderby) {
this.kwOrderby = kwOrderby;
}
}
TreeBuilder:
package com.heyang; import java.util.List; public class TreeBuilder {
private Node root;
private List<Token> tokens;
private int tokenIdx; public TreeBuilder(List<Token> tokens) throws Exception{
this.tokens=tokens;
this.tokenIdx=0; root=new Node();
parseNode(root); } private void parseNode(Node parent) throws Exception{
Token token; // select
token=fetchToken();
if(token.getType()!=Token.TYPE_SELECT) {
throw new Exception("Expected:'Select' actual:"+token.getText()+" "+token);
}
parent.setKwSelect(token.getText()); // find fields
for(;;) {
token=fetchToken(); Field field=new Field();
if(token.getType()==Token.TYPE_TEXT) {
String column=token.getText();
field.setColumn(column);
}else if(token.getType()==Token.TYPE_OPEN_PARENTHESIS) {
Node columnNode=new Node();
field.setColumnNode(columnNode); parseNode(columnNode); token=fetchToken();
if(token.getType()!=Token.TYPE_CLOSE_PARENTHESIS) {
throw new Exception("Expected:')' actual:"+token.getText()+" "+token);
}
}else {
throw new Exception("Expected:text/child query actual:"+token.getText()+" "+token);
} String alias=null;
token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
alias=token.getText();
}else if(token.getType()==Token.TYPE_COMMA) {
returnToken();
}else if(token.getType()==Token.TYPE_AS) {
token=fetchToken();
if(token.getType()!=Token.TYPE_TEXT) {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
}
alias=token.getText();
}else {
returnToken();
}
field.setAlias(alias);
parent.addField(field); token=fetchToken();
if(token.getType()!=Token.TYPE_COMMA) {
returnToken();
break;
}
} // from
token=fetchToken();
if(token.getType()!=Token.TYPE_FROM) {
throw new Exception("Expected:'from' actual:"+token.getText()+" "+token);
}
parent.setKwFrom(token.getText()); // find tables
for(;;) {
Node tableNode=new Node();
Table table=new Table();
table.setTableNode(tableNode);
parent.addTable(table); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
table.setTableName(token.getText());
}else if(token.getType()==Token.TYPE_OPEN_PARENTHESIS) {
parseNode(tableNode); token=fetchToken();
if(token.getType()!=Token.TYPE_CLOSE_PARENTHESIS) {
throw new Exception("Expected:')' actual:"+token.getText()+" "+token);
}
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_TEXT) {
table.setAlias(token.getText());
}else {
returnToken();
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()!=Token.TYPE_COMMA) {
returnToken();
break;
}
} // where
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_WHERE) {
parent.setKwWhere(token.getText());
}else {
returnToken();
return;
} // find conditions
for(;;) {
Condition cond=new Condition();
parent.addCondition(cond); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setLeft(token.getText());
}else {
returnToken();
break;
} token=fetchToken();
if(token.getType()==Token.TYPE_EQUAL || token.getType()==Token.TYPE_LESSTHAN || token.getType()==Token.TYPE_GREATERTHAN) {
cond.setOprand(token.getText());
}else {
throw new Exception("Expected:oprand actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
cond.setRight(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
break;
}else if(token.getType()==Token.TYPE_AND || token.getType()==Token.TYPE_OR) {
cond.setJoint(token.getText());
}else {
returnToken();
break;
}
} // order by
token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_ORDER) {
String order=token.getText(); token=fetchToken();
if(token.getType()==Token.TYPE_BY) {
String by=token.getText();
parent.setKwOrderby(order+" "+by);
}
else {
throw new Exception("Expected:'by' actual:"+token.getText()+" "+token);
} }else {
returnToken();
return;
} // find orders
for(;;) {
Order order=new Order();
parent.addOrder(order); token=fetchToken();
if(token.getType()==Token.TYPE_TEXT) {
order.setColumn(token.getText());
}else {
throw new Exception("Expected:text actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_ASC || token.getType()==Token.TYPE_DESC ) {
order.setAscDesc(token.getText());
}else if(token.getType()==Token.TYPE_COMMA) {
returnToken();
}else {
throw new Exception("Expected:'asc/desc/,' actual:"+token.getText()+" "+token);
} token=fetchToken();
if(token==null) {
return;
}else if(token.getType()==Token.TYPE_COMMA) {
continue;
}
}
} private Token fetchToken() {
if(tokenIdx>=tokens.size()) {
return null;
}else {
Token t=tokens.get(tokenIdx);
tokenIdx++;
return t;
}
} private void returnToken() {
if(tokenIdx>0) {
tokenIdx--;
}
} public Node getRoot() {
return root;
}
}
统合类:
package com.heyang; import com.heyang.util.BracketChecker;
import com.heyang.util.CommonUtil;
import com.heyang.util.Renderer; public class EntryPoint {
public static void main(String[] args) {
try {
// Read context from file
String text=CommonUtil.readTextFromFile("C:\\hy\\files\\sql\\10.sql");
System.out.println("原文="+text); // Is brackets balanced
BracketChecker checker=new BracketChecker();
boolean isBalanced=checker.isBalanced(text);
if(isBalanced==false) {
System.out.println(Renderer.paintBrown(checker.getErrMsg()));
return;
} // lex text to tokens
Lexer lex=new Lexer(text);
//lex.printTokens(); // Build tree
TreeBuilder tb=new TreeBuilder(lex.getTokens());
Node root=tb.getRoot();
System.out.println("整理后文本=\n"+root); }catch(Exception ex) {
System.out.println(Renderer.paintBrown(ex.getMessage()));
ex.printStackTrace();
}
}
}
执行效果:
原文=select name as name,birthday as b,column c from tc c,tb b,(select a1,a2,a3 from (select * from tbc) tbc) ta
整理后文本=
select
name name,
birthday b,
column c
from
tc c,
tb b,
(
select
a1,
a2,
a3
from
(
select
*
from
tbc) tbc) ta
--2020.05.27--
SqlAnalyzer1.00源码的更多相关文章
- XmlAnalyzer1.00 源码
此工程用途:将xml同级属性/子节点按字母序排列重新输出. 源码下载: https://files.cnblogs.com/files/heyang78/XmlAnalyzer-20200526-1. ...
- SqlAnalyzer1.01 源码
源码下载:https://files.cnblogs.com/files/heyang78/SqlAnalyzer-20200529-2.rar 现有功能:不带函数允许嵌套的select ...fro ...
- 【iScroll源码学习00】模拟iScroll
前言 相信对移动端有了解的朋友对iScroll这个库非常熟悉吧,今天我们就来说下我们移动页面的iScroll化 iScroll是我们必学框架之一,我们这次先根据iScroll功能自己实现其功能,然后再 ...
- C# Excel导入、导出【源码下载】
本篇主要介绍C#的Excel导入.导出. 目录 1. 介绍:描述第三方类库NPOI以及Excel结构 2. Excel导入:介绍C#如何调用NPOI进行Excel导入,包含:流程图.NOPI以及C#代 ...
- 一篇文章看懂TPCx-BB(大数据基准测试工具)源码
TPCx-BB是大数据基准测试工具,它通过模拟零售商的30个应用场景,执行30个查询来衡量基于Hadoop的大数据系统的包括硬件和软件的性能.其中一些场景还用到了机器学习算法(聚类.线性回归等).为了 ...
- AFNetworking 3.0 源码解读 总结(干货)(下)
承接上一篇AFNetworking 3.0 源码解读 总结(干货)(上) 21.网络服务类型NSURLRequestNetworkServiceType 示例代码: typedef NS_ENUM(N ...
- Web应用之LAMP源码环境部署
一.LAMP环境的介绍 1.LAMP环境的重要性 思索许久,最终还是决定写一篇详细的LAMP的源码编译安装的实验文档,一来是为了给自己一个交代,把技术进行系统的归纳,将技术以极致的形式呈现出来,做为一 ...
- 【原】SDWebImage源码阅读(五)
[原]SDWebImage源码阅读(五) 本文转载请注明出处 —— polobymulberry-博客园 1. 前言 前面的代码并没有特意去讲SDWebImage的缓存机制,主要是想单独开一章节专门讲 ...
- AFNetworking 3.0 源码解读(三)之 AFURLRequestSerialization
这篇就讲到了跟请求相关的类了 关于AFNetworking 3.0 源码解读 的文章篇幅都会很长,因为不仅仅要把代码进行详细的的解释,还会大概讲解和代码相关的知识点. 上半篇: URI编码的知识 关于 ...
随机推荐
- spring data jpa 代码生成!!(精华帖)
通过数据库动态生成自己想要生成的各种模板,需要了解grovery. view -> Tool Windows -> Database + -> Data source -> M ...
- springboot配置字符编码
这边主要有两种方式 方式一.使用传统的Spring提供的字符编码过滤器(Filter的方式) 因为,字符编码过滤器在框架中已经有了,所以我们不需要自己写了.直接进行配置类的实现: @Configura ...
- .NET Core 微服务—API网关(Ocelot) 教程 [四]
前言: 上一篇 介绍了Ocelot网关和认证服务的结合使用,本篇继续介绍Ocelot相关请求聚合和Ocelot限流 一.请求聚合 Ocelot允许声明聚合路由,这样可以把多个正常的Routes打包并映 ...
- Django-model查询[为空、由某字符串开头、由某字符串结尾、包含某字符串],__isnull、__starswith、__endswith、__contains
使用属性+__isnull就可以判断此字段为空 a = DatasClass.objects.filter(name__isnull=True) 使用属性+__startswith可以判断属性由某字符 ...
- P1616疯狂的采药 完全背包
题目背景 此题为纪念 LiYuxiang 而生. 题目描述 LiYuxiang 是个天资聪颖的孩子,他的梦想是成为世界上最伟大的医师.为此,他想拜附近最有威望的医师为师.医师为了判断他的资质,给他出了 ...
- C#LeetCode刷题之#203-删除链表中的节点(Remove Linked List Elements)
问题 该文章的最新版本已迁移至个人博客[比特飞],单击链接 https://www.byteflying.com/archives/3826 访问. 删除链表中等于给定值 val 的所有节点. 输入: ...
- 《T-GCN: A Temporal Graph Convolutional Network for Traffic Prediction》 代码解读
论文链接:https://arxiv.org/abs/1811.05320 博客原作者Missouter,博客链接https://www.cnblogs.com/missouter/,欢迎交流. 解读 ...
- Mac中的垃圾文件的清理
一 前言 最近发现mac的存储空间不够了,看一下系统的存储空间如下图所示,这个其他占了160+G的存储空间,那么这个其他到底包含什么东西呢?在网上查了很久,找到一种比较认可的说法是这样的: 不同Mac ...
- SQLserver 查询某个表的字段及字段属性
SELECT C.name as [字段名],T.name as [字段类型] ,convert(bit,C.IsNullable) as [可否为空] ,convert(bit,case when ...
- 使用Kali中的Metasploit生成木马控制Windows系统
一.概念:Kali基于debin的数字取证系统,上面集成了很多渗透测试工具,其前身是BT5 R3(BtackTrack). 其中Metasploit是一个综合利用工具,极大的提高了攻击者的渗透效率,使 ...