hadoop源码剖析--$HADOOP_HOME/bin/hadoop脚本文件分析
1. $HADOOP_HOME/bin/ hadoop
#!/usr/bin/env bash# Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.# The Hadoop command script## Environment Variables## JAVA_HOME The java implementation to use. Overrides JAVA_HOME.## HADOOP_CLASSPATH Extra Java CLASSPATH entries.## HADOOP_USER_CLASSPATH_FIRST When defined, the HADOOP_CLASSPATH is # added in the beginning of the global# classpath. Can be defined, for example,# by doing # export HADOOP_USER_CLASSPATH_FIRST=true## HADOOP_HEAPSIZE The maximum amount of heap to use, in MB. # Default is 1000.## HADOOP_OPTS Extra Java runtime options.# # HADOOP_NAMENODE_OPTS These options are added to HADOOP_OPTS # HADOOP_CLIENT_OPTS when the respective command is run.# HADOOP_{COMMAND}_OPTS etc HADOOP_JT_OPTS applies to JobTracker # for e.g. HADOOP_CLIENT_OPTS applies to # more than one command (fs, dfs, fsck, # dfsadmin etc) ## HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.## HADOOP_ROOT_LOGGER The root appender. Default is INFO,console# bin=`dirname "$0"`//(1)获取$HADOOP_HOME/hadoop/bin所在目录
bin=`cd "$bin"; pwd`//(2)进入到$HADOOP_HOME/bin/hadoop/bin目录if[-e "$bin"/../libexec/hadoop-config.sh ];then//(3)执行hadoop-config.sh进行,进行配置设置."$bin"/../libexec/hadoop-config.sh
else."$bin"/hadoop-config.sh
fi cygwin=falsecase"`uname`"in
CYGWIN*) cygwin=true;;esac# if no args specified, show usageif[ $# = 0 ]; then
echo "Usage: hadoop [--config confdir] COMMAND"
echo "where COMMAND is one of:"
echo " namenode -format format the DFS filesystem"
echo " secondarynamenode run the DFS secondary namenode"
echo " namenode run the DFS namenode"
echo " datanode run a DFS datanode"
echo " dfsadmin run a DFS admin client"
echo " mradmin run a Map-Reduce admin client"
echo " fsck run a DFS filesystem checking utility"
echo " fs run a generic filesystem user client"
echo " balancer run a cluster balancing utility"
echo " fetchdt fetch a delegation token from the NameNode"
echo " jobtracker run the MapReduce job Tracker node"
echo " pipes run a Pipes job"
echo " tasktracker run a MapReduce task Tracker node"
echo " historyserver run job history servers as a standalone daemon"
echo " job manipulate MapReduce jobs"
echo " queue get information regarding JobQueues"
echo " version print the version"
echo " jar <jar> run a jar file"
echo " distcp <srcurl> <desturl> copy file or directories recursively"
echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
echo " classpath prints the class path needed to get the"
echo " Hadoop jar and the required libraries"
echo " daemonlog get/set the log level for each daemon"
echo " or"
echo " CLASSNAME run the class named CLASSNAME"
echo "Most commands print help when invoked w/o parameters."exit1fi# get arguments
COMMAND=$1
shift # Determine if we're starting a secure datanode, and if so, redefine appropriate variablesif["$COMMAND"=="datanode"]&&["$EUID"-eq 0]&&[-n "$HADOOP_SECURE_DN_USER"];then
HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
starting_secure_dn="true"fi# some Java parametersif["$JAVA_HOME"!=""];then#echo "run java in $JAVA_HOME"
JAVA_HOME=$JAVA_HOME
fiif["$JAVA_HOME"=""];then
echo "Error: JAVA_HOME is not set."exit1fi JAVA=$JAVA_HOME/bin/java
JAVA_HEAP_MAX=-Xmx1000m# check envvars which might override default args if["$HADOOP_HEAPSIZE"!=""];then#echo "run with heapsize $HADOOP_HEAPSIZE"
JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"#echo $JAVA_HEAP_MAXfi# CLASSPATH initially contains $HADOOP_CONF_DIR //(3)设置CLASSPATH
CLASSPATH="${HADOOP_CONF_DIR}"if["$HADOOP_USER_CLASSPATH_FIRST"!=""]&&["$HADOOP_CLASSPATH"!=""];then
CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}fi
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar # for developers, add Hadoop classes to CLASSPATHif[-d "$HADOOP_HOME/build/classes"];then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
fiif[-d "$HADOOP_HOME/build/webapps"];then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
fiif[-d "$HADOOP_HOME/build/test/classes"];then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
fiif[-d "$HADOOP_HOME/build/tools"];then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
fi# so that filenames w/ spaces are handled correctly in loops below
IFS=# for releases, add core hadoop jar & webapps to CLASSPATHif[-e $HADOOP_PREFIX/share/hadoop/hadoop-core-*];then# binary layoutif[-d "$HADOOP_PREFIX/share/hadoop/webapps"];then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop
fifor f in $HADOOP_PREFIX/share/hadoop/hadoop-core-*.jar;do
CLASSPATH=${CLASSPATH}:$f;done# add libs to CLASSPATHfor f in $HADOOP_PREFIX/share/hadoop/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done for f in $HADOOP_PREFIX/share/hadoop/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done for f in $HADOOP_PREFIX/share/hadoop/hadoop-tools-*.jar; do
TOOL_PATH=${TOOL_PATH}:$f;
done
else
# tarball layout
if [ -d "$HADOOP_HOME/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME
fi
for f in $HADOOP_HOME/hadoop-core-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done # add libs to CLASSPATH
for f in $HADOOP_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done for f in $HADOOP_HOME/hadoop-tools-*.jar; do
TOOL_PATH=${TOOL_PATH}:$f;
done
for f in $HADOOP_HOME/build/hadoop-tools-*.jar; do
TOOL_PATH=${TOOL_PATH}:$f;
done
fi # add user-specified CLASSPATH last
if [ "$HADOOP_USER_CLASSPATH_FIRST" = "" ] && [ "$HADOOP_CLASSPATH" != "" ]; then
CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
fi # default log directory & file
if [ "$HADOOP_LOG_DIR" = "" ]; then
HADOOP_LOG_DIR="$HADOOP_HOME/logs"
fi
if [ "$HADOOP_LOGFILE" = "" ]; then
HADOOP_LOGFILE='hadoop.log'
fi # default policy file for service-level authorization
if [ "$HADOOP_POLICYFILE" = "" ]; then
HADOOP_POLICYFILE="hadoop-policy.xml"
fi # restore ordinary behaviour
unset IFS # figure out which class to run //(4)根据不同命令确定CLASS
if [ "$COMMAND" = "classpath" ] ; then
if $cygwin; then
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
fi
echo $CLASSPATH
exit
elif [ "$COMMAND" = "namenode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
elif [ "$COMMAND" = "secondarynamenode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
elif [ "$COMMAND" = "datanode" ] ; then
CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
if [ "$starting_secure_dn" = "true" ]; then
HADOOP_OPTS="$HADOOP_OPTS -jvm server $HADOOP_DATANODE_OPTS"
else
HADOOP_OPTS="$HADOOP_OPTS -server $HADOOP_DATANODE_OPTS"
fi
elif [ "$COMMAND" = "fs" ] ; then
CLASS=org.apache.hadoop.fs.FsShell
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "dfs" ] ; then
CLASS=org.apache.hadoop.fs.FsShell
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "dfsadmin" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "mradmin" ] ; then
CLASS=org.apache.hadoop.mapred.tools.MRAdmin
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "fsck" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSck
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "balancer" ] ; then
CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
elif [ "$COMMAND" = "fetchdt" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DelegationTokenFetcher
elif [ "$COMMAND" = "jobtracker" ] ; then
CLASS=org.apache.hadoop.mapred.JobTracker
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
elif [ "$COMMAND" = "historyserver" ] ; then
CLASS=org.apache.hadoop.mapred.JobHistoryServer
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOB_HISTORYSERVER_OPTS"
elif [ "$COMMAND" = "tasktracker" ] ; then
CLASS=org.apache.hadoop.mapred.TaskTracker
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
elif [ "$COMMAND" = "job" ] ; then
CLASS=org.apache.hadoop.mapred.JobClient
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "queue" ] ; then
CLASS=org.apache.hadoop.mapred.JobQueueClient
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "pipes" ] ; then
CLASS=org.apache.hadoop.mapred.pipes.Submitter
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "version" ] ; then
CLASS=org.apache.hadoop.util.VersionInfo
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "jar" ] ; then
CLASS=org.apache.hadoop.util.RunJar
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "distcp" ] ; then
CLASS=org.apache.hadoop.tools.DistCp
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "daemonlog" ] ; then
CLASS=org.apache.hadoop.log.LogLevel
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "archive" ] ; then
CLASS=org.apache.hadoop.tools.HadoopArchives
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "sampler" ] ; then
CLASS=org.apache.hadoop.mapred.lib.InputSampler
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
else
CLASS=$COMMAND
fi # cygwin path translation
if $cygwin; then
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`
HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
fi #Determine the JAVA_PLATFORM //(5)获取系统平台
JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"` if [ "$JAVA_PLATFORM" = "Linux-amd64-64" ]; then
JSVC_ARCH="amd64"
else
JSVC_ARCH="i386"
fi # setup 'java.library.path' for native-hadoop code if necessary
JAVA_LIBRARY_PATH=''
if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" -o -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then if [ -d "$HADOOP_HOME/build/native" ]; then
JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
fi if [ -d "${HADOOP_HOME}/lib/native" ]; then
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
else
JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
fi
fi if [ -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib
fi
fi # cygwin path translation
if $cygwin; then
JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
fi HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}" #turn security logger on the namenode and jobtracker only
if [ $COMMAND = "namenode" ] || [ $COMMAND = "jobtracker" ]; then
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,DRFAS}"
else
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
fi if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH" //(6)设定使用hadoop本地库
fi
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE" # Check to see if we should start a secure datanode
if [ "$starting_secure_dn" = "true" ]; then
if [ "$HADOOP_PID_DIR" = "" ]; then
HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
else
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
fi exec "$HADOOP_HOME/libexec/jsvc.${JSVC_ARCH}" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
-errfile "$HADOOP_LOG_DIR/jsvc.err" \
-pidfile "$HADOOP_SECURE_DN_PID" \
-nodetach \
-user "$HADOOP_SECURE_DN_USER" \
-cp "$CLASSPATH" \
$JAVA_HEAP_MAX $HADOOP_OPTS \
org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
else
# run it
exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@" //运行具体的java class fi
2. $HADOOP_HOME/bin/ hadoop-config.sh
# Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.# included in all the hadoop scripts with source command# should not be executable directly# also should not be passed any arguments, since we need original $*# resolve links - $0 may be a softlinkthis="${BASH_SOURCE-$0}"
common_bin=$(cd -P --"$(dirname -- "$this")"&& pwd -P)
script="$(basename -- "$this")"this="$common_bin/$script"# convert relative path to absolute path
config_bin=`dirname "$this"`
script=`basename "$this"`
config_bin=`cd "$config_bin"; pwd`this="$config_bin/$script"# the root of the Hadoop installationexport HADOOP_PREFIX=`dirname "$this"`/..#check to see if the conf dir is given as an optional argumentif[ $# -gt 1 ]thenif["--config"="$1"]then
shift
confdir=$1
shift
HADOOP_CONF_DIR=$confdir
fifi# Allow alternate conf dir location.if[-e "${HADOOP_PREFIX}/conf/hadoop-env.sh"];then
DEFAULT_CONF_DIR="conf"else
DEFAULT_CONF_DIR="etc/hadoop"fi
HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}"#check to see it is specified whether to use the slaves or the# masters fileif[ $# -gt 1 ]thenif["--hosts"="$1"]then
shift
slavesfile=$1
shift
export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"fifiif[-f "${HADOOP_CONF_DIR}/hadoop-env.sh"];then."${HADOOP_CONF_DIR}/hadoop-env.sh"//(2)执行hadoop-env.sh,进行配置设置fiif["$HADOOP_HOME_WARN_SUPPRESS"=""]&&["$HADOOP_HOME"!=""];then
echo "Warning: \$HADOOP_HOME is deprecated."1>&2
echo 1>&2fi# Newer versions of glibc use an arena memory allocator that causes virtual# memory usage to explode. This interacts badly with the many threads that# we use in Hadoop. Tune the variable down to prevent vmem explosion.export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}export HADOOP_HOME=${HADOOP_PREFIX}export HADOOP_HOME_WARN_SUPPRESS=1
3. $HADOOP_HOME/conf/hadoop-env.sh
总结,hadoop命令在运行时一次分别执行hadoop-config.sh 和 hadoop-env.sh来设置配置和参数。最后将设置的配置和参数交给java类来运行,根据不同的参数定位不同的执行类和行为。
hadoop源码剖析--$HADOOP_HOME/bin/hadoop脚本文件分析的更多相关文章
- Eclipse导入Hadoop源码项目及编写Hadoop程序
一 Eclipse导入Hadoop源码项目 基本步骤: 1)在Eclipse新建一个java项目[hadoop-1.2.1] 2)将Hadoop压缩包解压目录src下的core,hdfs,mapred ...
- webpack4.0源码解析之打包后js文件分析
首先,init之后创建一个简单的webpack基本的配置,在src目录下创建两个js文件(一个主入口文件和一个非主入口文件)和一个html文件,package.json,webpack.config. ...
- Hadoop源码编译过程
一. 为什么要编译Hadoop源码 Hadoop是使用Java语言开发的,但是有一些需求和操作并不适合使用java,所以就引入了本地库(Native Libraries)的概念,通 ...
- Hadoop源码如何查看
如何查看hadoop源码 1解压hadoop安装压缩文件成为文件夹,再进入解压后的文件夹下的src文件夹,选中core,hdfs,mapred三个文件夹
- 安装Hadoop系列 — 导入Hadoop源码项目
将Hadoop源码导入Eclipse有个最大好处就是通过 "ctrl + shift + r" 可以快速打开Hadoop源码文件. 第一步:在Eclipse新建一个Java项目,h ...
- Hadoop源码的编译过程详细解读(各版本)
说在前面的话 重新试多几次.编译过程中会出现下载某个包的时间太久,这是由于连接网站的过程中会出现假死,按ctrl+c,重新运行编译命令. 如果出现缺少了某个文件的情况,则要先清理maven(使用命 ...
- hadoop源码编译
为何要自行编译hadoop源码,往往是由于官方提供的hadoop发行版都是基于32位操作系统,在操作hadoop时会发生warn. 准备软件: 1)JDK 2)Hadoop源码 3)Maven 4 ...
- Eclipse远程调试hadoop源码
1. 修改对应调试端口 之前的一篇blog里讲述了hadoop单机版调试的方法,那种调试只限于单机运行hadoop命令而已,对于运行整个hadoop环境而言是不可取的,因为hadoop会开启多个jav ...
- 编译Hadoop源码
背景: 在阅读hadoop源代码过程中会发现部分类或者函数无法找到,这是由于Hadoop2.0使用了Protocol Buffers定义了RPC协议, 而这些Protocol Buffers文件在Ma ...
随机推荐
- Hello SpringMVC
1. MVC框架能做哪些事情? 讲url映射到java类或者方法 封装用户提交的数据 处理请求-调用相关业务处理-封装相应数据 将相应数据进行渲染 jsp/html/freemaker等 ... 2. ...
- 用CSS3变形创建半圆形导航
http://www.xuanfengge.com/create-a-semicircle-with-css3-variant-navigation.html demo:http://tympanus ...
- Codeforces 667D World Tour【最短路+枚举】
垃圾csdn,累感不爱! 题目链接: http://codeforces.com/contest/667/problem/D 题意: 在有向图中找到四个点,使得这些点之间的最短距离之和最大. 分析: ...
- unix grep命令的大致实现
用到了strstr(a,b)函数和getline()函数,strstr(a,b)函数看是否能在字符串a中找到字符串b,若找到返回指向,若没找到返回NULL strstr实现可以看:Implement ...
- [Bzoj3206][Apio2013]道路费用(kruscal)(缩点)
3206: [Apio2013]道路费用 Time Limit: 20 Sec Memory Limit: 128 MBSubmit: 536 Solved: 252[Submit][Status ...
- Linux内核配置选项
http://blog.csdn.net/wdsfup/article/details/52302142 http://www.manew.com/blog-166674-12962.html Gen ...
- ASP.NET Core默认注入方式下如何注入多个实现(多种方式) - sky 胡萝卜星星 - CSDN博客
原文:ASP.NET Core默认注入方式下如何注入多个实现(多种方式) - sky 胡萝卜星星 - CSDN博客 版权声明:本文为starfd原创文章,转载请标明出处. https://blog.c ...
- Android应用开发 WebView与服务器端的Js交互
最近公司再添加功能的时候,有一部分功能是用的html,在一个浏览器或webview中展示出html即可.当然在这里我们当然用webview控件喽 WebApp的好处: 在应用里嵌套web的好处有这么几 ...
- 使用RTL-SDR,从打开一个车门到批量打开车门
在最近几年,入侵汽车在当代社会的黑客圈中成为热点,很多文章表明汽车产业还有很多东西等待完善,在本篇文章中,我会让你熟悉我一直研究的一些概念,以及如何在网状网络中使用一些便宜的部件渗透远程开门系统. 软 ...
- 在windows Server 2008 R2server上使用infopath不能将表单公布到sharepoint server的问题处理。
在server 2008 R2 中.你将做好的表单公布到Sharepoint 时候会报错: 这个情况在client是2008 R2 Server 就会出现这个结果. 在角色中启用桌面体验就可以