准备:
确保hadoop2.2.0集群正常运行
1.eclipse中建立mven工程,并编辑pom文件如下
 <dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>0.96.-hadoop2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>2.2.</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.7</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>

2.在src/main/resources根目录下拷入log4j.properties,通过log4j查看详细日志

log4j.rootLogger=debug, stdout, R
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%5p - %m%n
log4j.appender.R=org.apache.log4j.RollingFileAppender
log4j.appender.R.File=firestorm.log
log4j.appender.R.MaxFileSize=100KB
log4j.appender.R.MaxBackupIndex=
log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%p %t %c - %m%n
log4j.logger.com.codefutures=DEBUG

3.拷入一个可执行的hadoop程序,我用的是一个HdfsDAO,可以先保证HDFS操作能执行

package com.bigdata.hdfs;

import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf; public class HdfsDAO {
private static final String HDFS = "hdfs://192.168.11.37:9000/"; public HdfsDAO(Configuration conf) {
this(HDFS, conf);
} public HdfsDAO(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
}
private String hdfsPath;
private Configuration conf;
public static void main(String[] args) throws IOException {
JobConf conf = config();
HdfsDAO hdfs = new HdfsDAO(conf);
// hdfs.copyFile("datafile/item.csv", "/tmp/new");
// hdfs.ls("/tmp/new");
hdfs.ls("/");
} public static JobConf config(){
JobConf conf = new JobConf(HdfsDAO.class);
conf.setJobName("HdfsDAO");
conf.addResource("classpath:/hadoop/core-site.xml");
conf.addResource("classpath:/hadoop/hdfs-site.xml");
conf.addResource("classpath:/hadoop/mapred-site.xml");
return conf;
} public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
}
public void rmr(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
}
public void ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
}
System.out.println("==========================================================");
fs.close();
}
public void createFile(String file, String content) throws IOException {
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
byte[] buff = content.getBytes();
FSDataOutputStream os = null;
try {
os = fs.create(new Path(file));
os.write(buff, , buff.length);
System.out.println("Create: " + file);
} finally {
if (os != null)
os.close();
}
fs.close();
}
public void copyFile(String local, String remote) throws IOException {
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
}
public void download(String remote, String local) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
} public void cat(String remoteFile) throws IOException {
Path path = new Path(remoteFile);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FSDataInputStream fsdis = null;
System.out.println("cat: " + remoteFile);
try {
fsdis =fs.open(path);
IOUtils.copyBytes(fsdis, System.out, , false);
} finally {
IOUtils.closeStream(fsdis);
fs.close();
}
}
public void location() throws IOException {
// String folder = hdfsPath + "create/";
// String file = "t2.txt";
// FileSystem fs = FileSystem.get(URI.create(hdfsPath), new
// Configuration());
// FileStatus f = fs.getFileStatus(new Path(folder + file));
// BlockLocation[] list = fs.getFileBlockLocations(f, 0, f.getLen());
//
// System.out.println("File Location: " + folder + file);
// for (BlockLocation bl : list) {
// String[] hosts = bl.getHosts();
// for (String host : hosts) {
// System.out.println("host:" + host);
// }
// }
// fs.close();
} }

4.运行HdfsDAO

报错:
java.io.IOException: HADOOP_HOME or hadoop.home.dir are not set.
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:225)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:250)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:76)
at org.apache.hadoop.conf.Configuration.getTrimmedStrings(Configuration.java:1546)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:519)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:453)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:136)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2433)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
at HdfsDAO.copyFile(HdfsDAO.java:94)
at HdfsDAO.main(HdfsDAO.java:34)
ERROR - Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:278)
at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:300)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:293)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:76)
at org.apache.hadoop.conf.Configuration.getTrimmedStrings(Configuration.java:1546)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:519)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:453)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:136)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2433)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
at HdfsDAO.copyFile(HdfsDAO.java:94)
at HdfsDAO.main(HdfsDAO.java:34)

  

解决:
首先,在win7中设置环境变量HADOOP_HOME,指向win7中的hadoop2.2.0根目录。
然后,到 https://github.com/srccodes/hadoop-common-2.2.0-bin 去下载hadoop2.2.0的bin,里面有winutils.exe
将其拷贝到 $HADOOP_HOME/bin 下。
5.重新启动,顺利执行
DEBUG - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)
DEBUG - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)
DEBUG - UgiMetrics, User and group related metrics
DEBUG - Kerberos krb5 configuration not found, setting default realm to empty
DEBUG - Creating new Groups object
DEBUG - Trying to load the custom-built native-hadoop library...
DEBUG - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
DEBUG - java.library.path=D:\Program Files\Java\jre7\bin;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;C:\Program Files (x86)\Intel\iCLS Client\;C:\Program Files\Intel\iCLS Client\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.\;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files (x86)\Intel\OpenCL SDK\3.0\bin\x86;C:\Program Files (x86)\Intel\OpenCL SDK\3.0\bin\x64;D:\Program Files\Java\jdk1..0_40\bin;D:\Program Files\Java\jdk1..0_40\jre\bin;D:\Program Files\TortoiseSVN\bin;D:\Program Files (x86)\ant\bin;D:\Program Files\maven3\bin;.
WARN - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
DEBUG - Falling back to shell based
DEBUG - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
DEBUG - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=
DEBUG - hadoop login
DEBUG - hadoop login commit
DEBUG - using local user:NTUserPrincipal: Administrator
DEBUG - UGI loginUser:Administrator (auth:SIMPLE)
DEBUG - dfs.client.use.legacy.blockreader.local = false
DEBUG - dfs.client.read.shortcircuit = false
DEBUG - dfs.client.domain.socket.data.traffic = false
DEBUG - dfs.domain.socket.path =
DEBUG - StartupProgress, NameNode startup progress
DEBUG - multipleLinearRandomRetry = null
DEBUG - rpcKind=RPC_PROTOCOL_BUFFER, rpcRequestWrapperClass=class org.apache.hadoop.ipc.ProtobufRpcEngine$RpcRequestWrapper, rpcInvoker=org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker@1afde4a3
DEBUG - Both short-circuit local reads and UNIX domain socket are disabled.
DEBUG - The ping interval is ms.
DEBUG - Connecting to /192.168.0.160:
DEBUG - IPC Client () connection to /192.168.0.160: from Administrator: starting, having connections
DEBUG - IPC Client () connection to /192.168.0.160: from Administrator sending #
DEBUG - IPC Client () connection to /192.168.0.160: from Administrator got value #
DEBUG - Call: getListing took 136ms
ls: /
==========================================================
name: hdfs://192.168.0.160:8020/data, folder: true, size: 0
name: hdfs://192.168.0.160:8020/fulong, folder: true, size: 0
name: hdfs://192.168.0.160:8020/test, folder: true, size: 0
name: hdfs://192.168.0.160:8020/tmp, folder: true, size: 0
name: hdfs://192.168.0.160:8020/user, folder: true, size: 0
name: hdfs://192.168.0.160:8020/workspace, folder: true, size: 0
==========================================================
DEBUG - Stopping client
DEBUG - IPC Client () connection to /192.168.0.160: from Administrator: closed
DEBUG - IPC Client () connection to /192.168.0.160: from Administrator: stopped, remaining connections

6.测试hbase代码

package com.rockontrol.tryhbase;
import static org.junit.Assert.*; import java.io.IOException;
import java.io.InputStream;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Logger;
import org.junit.Test; public class TestUseHbase { private String table = "Tenant";
private String cfs[] = {"i"};
private final int availableProcessors =
Runtime.getRuntime().availableProcessors();
private ExecutorService exec =
Executors.newFixedThreadPool(availableProcessors*);
private Random rnd = new Random();
private final int ROW_KEY_LEN = Bytes.SIZEOF_LONG + Bytes.SIZEOF_BYTE;
private final String colId = "id";
private final String colStat = "stat";
private final String colCert = "cert"; private Configuration conf;
private HTablePool pool; private static final Logger logger =
Logger.getLogger(TestUseHbase.class); public TestUseHbase() throws Exception {
conf = new Configuration();
conf.addResource(getHbaseConfStream());
pool = new HTablePool(conf, );
} @Test
public void testSetupTable() throws Exception { HBaseAdmin admin = new HBaseAdmin(conf); try {
if (admin.tableExists(table)) {
logger.info("table already exists!");
} else {
HTableDescriptor tableDesc =new HTableDescriptor(table);
for(String cf : cfs) {
tableDesc.addFamily(new HColumnDescriptor(cf));
}
admin.createTable(tableDesc);
logger.info("table created!");
}
} finally {
admin.close();
}
} @Test
public void testPuts() throws Exception { final HTable htable = (HTable) pool.getTable(table);
// put random id
for (int i = ; i < ; i++) {
exec.execute(new Runnable() {
@Override
public void run() {
long authId = getAuthId();
byte[] rowkey = createRowKey(authId, (byte) );
htable.setAutoFlush(false);
Put put = new Put(rowkey);
put.add(cfs[].getBytes(), colId.getBytes(), String.valueOf(authId)
.getBytes());
put.add(cfs[].getBytes(), colStat.getBytes(), String.valueOf()
.getBytes());
try {
synchronized (htable) {
htable.put(put);
htable.flushCommits();
}
} catch (IOException e) {
logger.error("ERROR: insert authId=" + authId, e);
}
}
});
}
exec.shutdown(); int count = ;
while (!exec.awaitTermination(, TimeUnit.SECONDS)) {
logger.warn("thread pool is still running");
if (count++ > ) {
logger.warn("force to exit anyway...");
break;
}
} htable.flushCommits();
pool.putTable(htable); } @Test
public void testFullScan() throws Exception { HTable htable = (HTable) pool.getTable(table);
long last = Long.MIN_VALUE; ResultScanner rs = htable.getScanner(new Scan());
long authId = ;
byte stat = ;
String strAuthId;
String strStat;
for (Result r : rs) { KeyValue kvId = r.getColumnLatest(cfs[].getBytes(), colId.getBytes());
KeyValue kvStat = r.getColumnLatest(cfs[].getBytes(), colStat.getBytes());
if (kvId != null && kvStat != null) {
strAuthId = new String(kvId.getValue());
strStat = new String(kvStat.getValue());
authId = getIdByRowKey(kvId.getKey());
stat = getStatByRowKey(kvId.getKey());
assertTrue("last=" + last +
", current=" + authId, authId >= last); // incremental sorted
last = authId;
logger.info("authId=" + authId + ", stat=" + stat + ", value=[" + strAuthId
+ ", " + strStat + "]");
} else {
for (KeyValue kv : r.raw()) {
authId = getIdByRowKey(kv.getKey());
stat = getStatByRowKey(kv.getKey());
assertTrue("last=" + last +
", current=" + authId, authId >= last); // incremental sort
last = authId;
logger.info("authId=" + authId + ", stat=" + stat);
logger.info(new String(kv.getValue()));
}
}
} } @Test
public void testSpecScan() throws Exception {
HTable htable = (HTable) pool.getTable(table);
long specId = getAuthId();
byte[] rowkey = createRowKey(specId, (byte) ); // PUT
Put put = new Put(rowkey);
put.add(cfs[].getBytes(), colId.getBytes(), String.valueOf(specId)
.getBytes());
put.add(cfs[].getBytes(), colStat.getBytes(), String.valueOf()
.getBytes());
htable.put(put); // Get with rowkey
Get scan = new Get(rowkey);
Result r = htable.get(scan);
assertTrue(!r.isEmpty());
long id = ;
for(KeyValue kv : r.raw()) {
id = getIdByRowKey(kv.getKey());
assertEquals(specId, id);
logger.info("authId=" + id +
", cf=" + new String(kv.getFamily()) +
", key=" + new String(kv.getQualifier()) +
", value=" + new String(kv.getValue()));
} // Put with specId but stat and different column
rowkey = createRowKey(specId, (byte));
put = new Put(rowkey);
put.add(cfs[].getBytes(), colCert.getBytes(), "xyz".getBytes());
htable.put(put); // Get with rowkey prefix
Scan s = new Scan();
s.setFilter(new PrefixFilter(createRowKeyPrefix(specId)));
ResultScanner rs = htable.getScanner(s);
for(Result ret : rs) {
String strk = new String(ret.getRow());
logger.info("ret=" + strk);
for(KeyValue kv : ret.raw()) {
id = getIdByRowKey(kv.getKey());
assertEquals(specId, id);
logger.info("authId=" + id +
", stat=" + getStatByRowKey(kv.getKey()) +
", cf=" + new String(kv.getFamily()) +
", key=" + new String(kv.getQualifier()) +
", value=" + new String(kv.getValue()));
}
} // Get with start and end row
s = new Scan();
s.setStartRow(createRowKeyPrefix(specId));
s.setStopRow(createRowKeyPrefix(specId+));
rs = htable.getScanner(s);
for(Result ret : rs) {
String strk = new String(ret.getRow());
logger.info("ret=" + strk);
for(KeyValue kv : ret.raw()) {
id = getIdByRowKey(kv.getKey());
assertEquals(specId, id);
logger.info("authId=" + id +
", stat=" + getStatByRowKey(kv.getKey()) +
", cf=" + new String(kv.getFamily()) +
", key=" + new String(kv.getQualifier()) +
", value=" + new String(kv.getValue()));
}
}
} @Test
public void testBytesConv() throws Exception {
long a = ;
byte s = ;
byte[] data = new byte[];
int off = Bytes.putLong(data, , a);
Bytes.putByte(data, off, s);
long b = Bytes.toLong(data);
byte t = data[];
assertEquals(a, b);
assertEquals(s, t);
} private byte[] createRowKey(long authId, byte stat) {
byte[] rowkey = new byte[ROW_KEY_LEN];
int off = Bytes.putLong(rowkey, , authId);
Bytes.putByte(rowkey, off, stat);
return rowkey;
} private byte[] createRowKeyPrefix(long authId) {
byte[] prefix = new byte[Bytes.SIZEOF_LONG];
Bytes.putLong(prefix, , authId);
return prefix;
} private long getIdByRowKey(byte[] rowkey) {
// HACK
return Bytes.toLong(rowkey, Bytes.SIZEOF_SHORT);
} private byte getStatByRowKey(byte[] rowkey) {
// HACK
return rowkey[Bytes.SIZEOF_SHORT + ROW_KEY_LEN - ];
} private long getAuthId() {
long authId = rnd.nextLong();
authId = authId > ? authId : -authId;
return authId;
} private static InputStream getHbaseConfStream() throws Exception {
return TestUseHbase.class.getClassLoader().getResourceAsStream("hbase-site.xml");
} }

7.执行成功

-- ::  [ main: ] - [ DEBUG ]  field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)
-- :: [ main: ] - [ DEBUG ] field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], always=false, type=DEFAULT, sampleName=Ops)
-- :: [ main: ] - [ DEBUG ] UgiMetrics, User and group related metrics
-- :: [ main: ] - [ DEBUG ] Kerberos krb5 configuration not found, setting default realm to empty
-- :: [ main: ] - [ DEBUG ] Creating new Groups object
-- :: [ main: ] - [ DEBUG ] Trying to load the custom-built native-hadoop library...
-- :: [ main: ] - [ DEBUG ] Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
-- :: [ main: ] - [ DEBUG ] java.library.path=D:\Program Files\Java\jdk1..0_45\bin;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;D:\Perl64\bin;D:\Perl64\site\bin;C:\Program Files (x86)\Common Files\NetSarang;C:\Program Files (x86)\Intel\iCLS Client\;C:\Program Files\Intel\iCLS Client\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.\;C:\Program Files (x86)\Intel\OpenCL SDK\2.0\bin\x86;C:\Program Files (x86)\Intel\OpenCL SDK\2.0\bin\x64;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;D:\java\maven/bin;D:\Program Files\Java\jdk1.8.0/bin;d:\Program Files (x86)\YYXT\AudioEditorOCX;D:\Program Files\MySQL\MySQL Server 5.5\bin;D:\hadoop\apache-ant-1.9.\bin;D:\Program Files\nodejs\;D:\Program Files\TortoiseSVN\bin;D:\Perl64\bin;D:\Perl64\site\bin;C:\Users\lenovo\AppData\Roaming\npm;.
-- :: [ main: ] - [ WARN ] Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
-- :: [ main: ] - [ DEBUG ] Falling back to shell based
-- :: [ main: ] - [ DEBUG ] Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
-- :: [ main: ] - [ DEBUG ] Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=
-- :: [ main: ] - [ DEBUG ] hadoop login
-- :: [ main: ] - [ DEBUG ] hadoop login commit
-- :: [ main: ] - [ DEBUG ] using local user:NTUserPrincipal: lenovo
-- :: [ main: ] - [ DEBUG ] UGI loginUser:lenovo (auth:SIMPLE)
-- :: [ main: ] - [ INFO ] Client environment:zookeeper.version=3.4.-, built on // : GMT
-- :: [ main: ] - [ INFO ] Client environment:host.name=qiaokai-PC
-- :: [ main: ] - [ INFO ] Client environment:java.version=1.7.0_45
-- :: [ main: ] - [ INFO ] Client environment:java.vendor=Oracle Corporation
-- :: [ main: ] - [ INFO ] Client environment:java.home=D:\Program Files\Java\jdk1..0_45\jre
-- :: [ main: ] - [ INFO ] Client environment:java.class.path=D:\Users\lenovo\koalaSP\dbhbase\target\classes;D:\java\mavenRepo\org\apache\hbase\hbase-client\0.96.-hadoop2\hbase-client-0.96.-hadoop2.jar;D:\java\mavenRepo\org\apache\hbase\hbase-common\0.96.-hadoop2\hbase-common-0.96.-hadoop2.jar;D:\java\mavenRepo\commons-collections\commons-collections\3.2.\commons-collections-3.2..jar;D:\java\mavenRepo\org\apache\hbase\hbase-protocol\0.96.-hadoop2\hbase-protocol-0.96.-hadoop2.jar;D:\java\mavenRepo\commons-codec\commons-codec\1.7\commons-codec-1.7.jar;D:\java\mavenRepo\commons-io\commons-io\2.4\commons-io-2.4.jar;D:\java\mavenRepo\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;D:\java\mavenRepo\commons-logging\commons-logging\1.1.\commons-logging-1.1..jar;D:\java\mavenRepo\com\google\guava\guava\12.0.\guava-12.0..jar;D:\java\mavenRepo\com\google\code\findbugs\jsr305\1.3.\jsr305-1.3..jar;D:\java\mavenRepo\com\google\protobuf\protobuf-java\2.5.\protobuf-java-2.5..jar;D:\java\mavenRepo\io\netty\netty\3.6..Final\netty-3.6..Final.jar;D:\java\mavenRepo\org\apache\zookeeper\zookeeper\3.4.\zookeeper-3.4..jar;D:\java\mavenRepo\org\slf4j\slf4j-api\1.6.\slf4j-api-1.6..jar;D:\java\mavenRepo\org\slf4j\slf4j-log4j12\1.6.\slf4j-log4j12-1.6..jar;D:\java\mavenRepo\org\cloudera\htrace\htrace-core\2.04\htrace-core-2.04.jar;D:\java\mavenRepo\org\codehaus\jackson\jackson-mapper-asl\1.8.\jackson-mapper-asl-1.8..jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-common\2.2.\hadoop-common-2.2..jar;D:\java\mavenRepo\org\apache\commons\commons-math\2.1\commons-math-2.1.jar;D:\java\mavenRepo\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;D:\java\mavenRepo\commons-net\commons-net\3.1\commons-net-3.1.jar;D:\java\mavenRepo\com\sun\jersey\jersey-json\1.9\jersey-json-1.9.jar;D:\java\mavenRepo\org\codehaus\jettison\jettison\1.1\jettison-1.1.jar;D:\java\mavenRepo\stax\stax-api\1.0.\stax-api-1.0..jar;D:\java\mavenRepo\com\sun\xml\bind\jaxb-impl\2.2.-\jaxb-impl-2.2.-.jar;D:\java\mavenRepo\javax\xml\bind\jaxb-api\2.2.\jaxb-api-2.2..jar;D:\java\mavenRepo\javax\activation\activation\1.1\activation-1.1.jar;D:\java\mavenRepo\org\codehaus\jackson\jackson-jaxrs\1.8.\jackson-jaxrs-1.8..jar;D:\java\mavenRepo\org\codehaus\jackson\jackson-xc\1.8.\jackson-xc-1.8..jar;D:\java\mavenRepo\commons-el\commons-el\1.0\commons-el-1.0.jar;D:\java\mavenRepo\net\java\dev\jets3t\jets3t\0.6.\jets3t-0.6..jar;D:\java\mavenRepo\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;D:\java\mavenRepo\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;D:\java\mavenRepo\commons-beanutils\commons-beanutils\1.7.\commons-beanutils-1.7..jar;D:\java\mavenRepo\commons-beanutils\commons-beanutils-core\1.8.\commons-beanutils-core-1.8..jar;D:\java\mavenRepo\org\apache\avro\avro\1.7.\avro-1.7..jar;D:\java\mavenRepo\com\thoughtworks\paranamer\paranamer\2.3\paranamer-2.3.jar;D:\java\mavenRepo\org\xerial\snappy\snappy-java\1.0.4.1\snappy-java-1.0.4.1.jar;D:\java\mavenRepo\com\jcraft\jsch\0.1.\jsch-0.1..jar;D:\java\mavenRepo\org\apache\commons\commons-compress\1.4.\commons-compress-1.4..jar;D:\java\mavenRepo\org\tukaani\xz\1.0\xz-1.0.jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-auth\2.2.\hadoop-auth-2.2..jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-mapreduce-client-core\2.2.\hadoop-mapreduce-client-core-2.2..jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-yarn-common\2.2.\hadoop-yarn-common-2.2..jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-yarn-api\2.2.\hadoop-yarn-api-2.2..jar;D:\java\mavenRepo\com\google\inject\guice\3.0\guice-3.0.jar;D:\java\mavenRepo\javax\inject\javax.inject\\javax.inject-.jar;D:\java\mavenRepo\aopalliance\aopalliance\1.0\aopalliance-1.0.jar;D:\java\mavenRepo\com\sun\jersey\contribs\jersey-guice\1.9\jersey-guice-1.9.jar;D:\java\mavenRepo\com\google\inject\extensions\guice-servlet\3.0\guice-servlet-3.0.jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-annotations\2.2.\hadoop-annotations-2.2..jar;D:\java\mavenRepo\com\github\stephenc\findbugs\findbugs-annotations\1.3.-\findbugs-annotations-1.3.-.jar;D:\java\mavenRepo\junit\junit\4.11\junit-4.11.jar;D:\java\mavenRepo\org\hamcrest\hamcrest-core\1.3\hamcrest-core-1.3.jar;D:\java\mavenRepo\org\apache\hadoop\hadoop-hdfs\2.2.\hadoop-hdfs-2.2..jar;D:\java\mavenRepo\org\mortbay\jetty\jetty\6.1.\jetty-6.1..jar;D:\java\mavenRepo\org\mortbay\jetty\jetty-util\6.1.\jetty-util-6.1..jar;D:\java\mavenRepo\com\sun\jersey\jersey-core\1.9\jersey-core-1.9.jar;D:\java\mavenRepo\com\sun\jersey\jersey-server\1.9\jersey-server-1.9.jar;D:\java\mavenRepo\asm\asm\3.1\asm-3.1.jar;D:\java\mavenRepo\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;D:\java\mavenRepo\commons-daemon\commons-daemon\1.0.\commons-daemon-1.0..jar;D:\java\mavenRepo\javax\servlet\jsp\jsp-api\2.1\jsp-api-2.1.jar;D:\java\mavenRepo\log4j\log4j\1.2.\log4j-1.2..jar;D:\java\mavenRepo\javax\servlet\servlet-api\2.5\servlet-api-2.5.jar;D:\java\mavenRepo\org\codehaus\jackson\jackson-core-asl\1.8.\jackson-core-asl-1.8..jar;D:\java\mavenRepo\tomcat\jasper-runtime\5.5.\jasper-runtime-5.5..jar;D:\java\mavenRepo\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;D:\Program Files\Java\jdk1.8.0\lib\tools.jar
-- :: [ main: ] - [ INFO ] Client environment:java.library.path=D:\Program Files\Java\jdk1..0_45\bin;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;D:\Perl64\bin;D:\Perl64\site\bin;C:\Program Files (x86)\Common Files\NetSarang;C:\Program Files (x86)\Intel\iCLS Client\;C:\Program Files\Intel\iCLS Client\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.\;C:\Program Files (x86)\Intel\OpenCL SDK\2.0\bin\x86;C:\Program Files (x86)\Intel\OpenCL SDK\2.0\bin\x64;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;D:\java\maven/bin;D:\Program Files\Java\jdk1.8.0/bin;d:\Program Files (x86)\YYXT\AudioEditorOCX;D:\Program Files\MySQL\MySQL Server 5.5\bin;D:\hadoop\apache-ant-1.9.\bin;D:\Program Files\nodejs\;D:\Program Files\TortoiseSVN\bin;D:\Perl64\bin;D:\Perl64\site\bin;C:\Users\lenovo\AppData\Roaming\npm;.
-- :: [ main: ] - [ INFO ] Client environment:java.io.tmpdir=C:\Users\lenovo\AppData\Local\Temp\
-- :: [ main: ] - [ INFO ] Client environment:java.compiler=<NA>
-- :: [ main: ] - [ INFO ] Client environment:os.name=Windows
-- :: [ main: ] - [ INFO ] Client environment:os.arch=amd64
-- :: [ main: ] - [ INFO ] Client environment:os.version=6.1
-- :: [ main: ] - [ INFO ] Client environment:user.name=lenovo
-- :: [ main: ] - [ INFO ] Client environment:user.home=C:\Users\lenovo
-- :: [ main: ] - [ INFO ] Client environment:user.dir=D:\Users\lenovo\koalaSP\dbhbase
-- :: [ main: ] - [ INFO ] Initiating client connection, connectString=compute1: sessionTimeout= watcher=hconnection-0xda5a705, quorum=compute1:, baseZNode=/hbase
-- :: [ main: ] - [ DEBUG ] zookeeper.disableAutoWatchReset is false
-- :: [ main: ] - [ INFO ] Process identifier=hconnection-0xda5a705 connecting to ZooKeeper ensemble=compute1:
-- :: [ main-SendThread(compute1:): ] - [ INFO ] Opening socket connection to server compute1/192.168.11.39:. Will not attempt to authenticate using SASL (unknown error)
-- :: [ main-SendThread(compute1:): ] - [ INFO ] Socket connection established to compute1/192.168.11.39:, initiating session
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Session establishment request sent on compute1/192.168.11.39:
-- :: [ main-SendThread(compute1:): ] - [ INFO ] Session establishment complete on server compute1/192.168.11.39:, sessionid = 0x2483a55a18c0013, negotiated timeout =
-- :: [ main-EventThread: ] - [ DEBUG ] hconnection-0xda5a705, quorum=compute1:, baseZNode=/hbase Received ZooKeeper Event, type=None, state=SyncConnected, path=null
-- :: [ main-EventThread: ] - [ DEBUG ] hconnection-0xda5a705-0x2483a55a18c0013 connected
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/hbaseid,F response:: s{4294967310,4294967310,1409728069737,1409728069737,0,0,0,0,67,0,4294967310}
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/hbaseid,F response:: #ffffffff000146d61737465723a363030303033ffffff8036ffffff94ffffffabcfffffffd6750425546a2430643537303664662d653431622d343332382d383833342d356533643531363362393736,s{4294967310,4294967310,1409728069737,1409728069737,0,0,0,0,67,0,4294967310}
-- :: [ main: ] - [ DEBUG ] Codec=org.apache.hadoop.hbase.codec.KeyValueCodec@453d9468, compressor=null, tcpKeepAlive=true, tcpNoDelay=true, maxIdleTime=, maxRetries=, fallbackAllowed=false, ping interval=60000ms, bind address=null
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/meta-region-server,F response:: #ffffffff0001a726567696f6e7365727665723a3630303230ffffffff133cffffff88341c1effffffef50425546a15a8636f6d707574653110fffffff4ffffffd4318ffffffe4ffffffefffffffdcffffffd2ffffff8329100,s{4294967339,4294967339,1409728076023,1409728076023,0,0,0,0,60,0,4294967339}
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/meta-region-server,F response:: #ffffffff0001a726567696f6e7365727665723a3630303230ffffffff133cffffff88341c1effffffef50425546a15a8636f6d707574653110fffffff4ffffffd4318ffffffe4ffffffefffffffdcffffffd2ffffff8329100,s{4294967339,4294967339,1409728076023,1409728076023,0,0,0,0,60,0,4294967339}
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/meta-region-server,F response:: #ffffffff0001a726567696f6e7365727665723a3630303230ffffffff133cffffff88341c1effffffef50425546a15a8636f6d707574653110fffffff4ffffffd4318ffffffe4ffffffefffffffdcffffffd2ffffff8329100,s{4294967339,4294967339,1409728076023,1409728076023,0,0,0,0,60,0,4294967339}
-- :: [ main: ] - [ DEBUG ] Use SIMPLE authentication for service ClientService, sasl=false
-- :: [ main: ] - [ DEBUG ] Connecting to compute1/192.168.11.39:
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: starting, connections
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Get" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
-- :: [ main-SendThread(compute1:): ] - [ DEBUG ] Reading reply sessionid:0x2483a55a18c0013, packet:: clientPath:null serverPath:null finished:false header:: , replyHeader:: ,, request:: '/hbase/meta-region-server,F response:: #ffffffff0001a726567696f6e7365727665723a3630303230ffffffff133cffffff88341c1effffffef50425546a15a8636f6d707574653110fffffff4ffffffd4318ffffffe4ffffffefffffffdcffffffd2ffffff8329100,s{4294967339,4294967339,1409728076023,1409728076023,0,0,0,0,60,0,4294967339}
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true priority:
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: cell_block_meta { length: }, totalSize: bytes
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
-- :: [ main: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: wrote request header call_id: method_name: "Scan" request_param: true
-- :: [ IPC Client () connection to compute1/192.168.11.39: from lenovo: ] - [ DEBUG ] IPC Client () connection to compute1/192.168.11.39: from lenovo: got response header call_id: , totalSize: bytes
总结:
1.将hadoop-2.2.0.tar.gz解压一份放到win7的程序目录下,注意hadoop版本一定要和集群的版本一致,然后拷贝集群中的以下几个配置文件覆盖到win7本地的对应目录:
core-site.xml
hdfs-site.xml
mapred-site.xml
yarn-site.xml
 
2.在eclipse中新建java工程后,最好直接引入所有hadoop2.2.0相关的jar包,包括以下几个目录下的jar包:
share\hadoop\common
share\hadoop\hdfs
share\hadoop\mapreduce
share\hadoop\yarn
 
注:如果使用hadoop的eclipse插件,就无需该步骤,但2.2.0的插件需自行编译,编译过程参见我的另一篇博客:
 
3.需要在win7中设置环境变量%HADOOP_HOME%,并把%HADOOP_HOME%\bin加入PATH环境变量中
 
4.需要下载https://github.com/srccodes/hadoop-common-2.2.0-bin,解压后把下载的bin目录覆盖%HADOOP_HOME%\bin
 
5.注意参考hadoop集群的配置,Eclipse中的程序配置“hadoop地址:端口”的代码需和hadoop集群的配置一致
<property>
    <name>fs.default.name</name>
    <value>hdfs://singlehadoop:8020</value>
</property>
 
6.在hadoop集群的hdfs-site.xml中加入如下属性,关闭权限校验。
<property>     
    <name>dfs.permissions</name>    
    <value>false</value>
</property>
 
7.hbase文件设置

<property>
<name>hbase.zookeeper.quorum</name>
<value>compute1</value>
</property>

一定要配置 quorum 的值为 hostname,  节点个数必须为子节点而且要为奇数个。

在Windows 的 C:\Windows\System32\drivers\etc  目录下修改hosts映射文件,与集群服务器的映射文件保持一致。

192.168.14.20 CS020
192.168.14.16 CS016
192.168.11.37 master
192.168.11.39 compute1
192.168.11.40 thinkit-4

 

[开发]Win7环境下Eclipse连接Hadoop2.2.0的更多相关文章

  1. Win7环境下Eclipse连接Hadoop2.2.0

    准备: 确保hadoop2.2.0集群正常运行 1.eclipse中建立java工程,导入hadoop2.2.0相关jar包 2.在src根目录下拷入log4j.properties,通过log4j查 ...

  2. 【甘道夫】Win7环境下Eclipse连接Hadoop2.2.0

    准备: 确保hadoop2.2.0集群正常执行 1.eclipse中建立javaproject,导入hadoop2.2.0相关jar包 2.在src根文件夹下拷入log4j.properties,通过 ...

  3. 【甘道夫】Win7x64环境下编译Apache Hadoop2.2.0的Eclipse小工具

    目标: 编译Apache Hadoop2.2.0在win7x64环境下的Eclipse插件 环境: win7x64家庭普通版 eclipse-jee-kepler-SR1-win32-x86_64.z ...

  4. 关于64位win7环境下VS连接oracle数据库的问题

    本机环境:64位win7,安装了64位的oracle桌面类 服务器环境:64位windows server 2008,64位oracle服务器端 问题:本机用sql developer连数据库没有问题 ...

  5. windows环境下Eclipse开发MapReduce程序遇到的四个问题及解决办法

    按此文章<Hadoop集群(第7期)_Eclipse开发环境设置>进行MapReduce开发环境搭建的过程中遇到一些问题,饶了一些弯路,解决办法记录在此: 文档目的: 记录windows环 ...

  6. Win7环境下VS2010配置Cocos2d-x-2.1.4最新版本号的开发环境

    写这篇博客时2D游戏引擎Cocos2d-x的最新版本号为2.1.4,记得非常久曾经使用博客园博主子龙山人的一篇博文<Cocos2d-x win7+vs2010配置图文具体解释(亲測)>成功 ...

  7. 【转】Win7环境下VS2010配置Cocos2d-x-2.1.4最新版本的开发环境(亲测)

    http://blog.csdn.net/ccf19881030/article/details/9204801 很久以前使用博客园博主子龙山人的一篇博文<Cocos2d-x win7+vs20 ...

  8. Win7环境下VS2010配置Cocos2d-x-2.1.4最新版本的开发环境(亲测)

      写这篇博客时2D游戏引擎Cocos2d-x的最新版本为2.1.4,记得很久以前使用博客园博主子龙山人的一篇博文<Cocos2d-x win7+vs2010配置图文详解(亲测)>成功配置 ...

  9. WIN7环境下CUDA7.5的安装、配置和测试(Visual Studio 2010)

    以下基于"WIN7(64位)+Visual Studio 2010+CUDA7.5". 系统:WIN7,64位 开发平台:Visual Studio 2010 显卡:NVIDIA ...

随机推荐

  1. JQuery遍历指定id的div name值的几种方法

    JQuery遍历指定id的div name值的几种方法:方法一 $("#div1 :text").each(function () { var this_id = $(this). ...

  2. POJ——3984

    走迷宫问题,POJ上面的题 #include <stdio.h> #include <stdlib.h> #define SIZE 5 bool findpath = fals ...

  3. 比较C++中的4种类型转换方式

    C++的四种cast操作符的区别并非我的原创-------------------------------------------from:http://blog.csdn.net/hrbeuwhw/ ...

  4. STM32F0xx_ADC采集电压配置详细过程

    前言 关于ADC这一块的功能基本上也算是CortexM芯片的标配了.ST的每一块芯片都有这个功能,只是说因型号不同,通道数.位数等有所不同.STM8的芯片大多数都是10的,也就是说分辨率可达到:参考电 ...

  5. Media Player(APP)

    今天共享一下,以前做的影音播放器.比较简单.适合新手. 我上传了附件可以参考一下. PDF:http://files.cnblogs.com/files/hongguang-kim/MediaPlay ...

  6. 【javascript】html5中使用canvas编写头像上传截取功能

    [javascript]html5中使用canvas编写头像上传截取功能 本人对canvas很是喜欢,于是想仿照新浪微博头像上传功能(前端使用canvas) 本程序目前在谷歌浏览器和火狐浏览器测试可用 ...

  7. ubuntu14.04字符界面中文乱码及中文输入

    作为ubuntu用户字符界面是绝对不陌生的,尤其是维护管理服务器的朋友为了节省资源都是用的字符界面,但是默认字符界面中文目录文件都是乱码,根本无法打开编辑,那么怎么让字符界面显示中文目录文件,还有在字 ...

  8. springboot注解

    @RestController和@RequestMapping注解 我们的Example类上使用的第一个注解是 @RestController .这被称为一个构造型(stereotype)注解.它为阅 ...

  9. Ant学习---第二节:Ant添加文件夹和文件夹集的使用

    一.创建 java 项目(Eclipse 中),结构图如下: 1.创建 .java 文件,代码如下: package com.learn.ant; public class HelloWorld { ...

  10. linux - 自动删除n天前日志

    1.删除文件命令: find 对应目录 -mtime +天数 -name "文件名" -exec rm -rf {} \; 实例命令: find /opt/soft/log/ -m ...