<?xml version='1.0' encoding='utf-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<!--声明Hibernate配置文件的开始-->
<hibernate-configuration>
<!--表明以下的配置是针对session-factory配置的,SessionFactory是Hibernate中的一个类,这个类主要负责保存HIbernate的配置信息,以及对Session的操作-->
<session-factory>
<!--hibernate.dialect 只是Hibernate使用的数据库方言,就是要用Hibernate连接那种类型的数据库服务器。-->
<property name="hibernate.dialect">org.hibernate.dialect.MySQLDialect</property>
<!--配置数据库的驱动程序,Hibernate在连接数据库时,需要用到数据库的驱动程序-->
<property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
<!--设置数据库的连接url:jdbc:mysql://localhost/hibernate,其中localhost表示mysql服务器名称,此处为本机,hibernate是数据库名-->
<property name="hibernate.connection.url">jdbc:mysql://localhost:3306/log</property>
<!--连接数据库是用户名-->
<property name="hibernate.connection.username">root</property>
<!--连接数据库是密码-->
<property name="hibernate.connection.password">root</property>
<!--数据库连接池的大小-->
<property name="hibernate.connection.pool.size">1000</property>
<!--是否在后台显示Hibernate用到的SQL语句,开发时设置为true,便于差错,程序运行时可以在Eclipse的控制台显示Hibernate的执行Sql语句。项目部署后可以设置为false,提高运行效率-->
<property name="hibernate.show_sql">false</property>
<!--jdbc.fetch_size是指Hibernate每次从数据库中取出并放到JDBC的Statement中的记录条数。Fetch Size设的越大,读数据库的次数越少,速度越快,Fetch Size越小,读数据库的次数越多,速度越慢-->
<property name="jdbc.fetch_size">50</property>
<!--jdbc.batch_size是指Hibernate批量插入,删除和更新时每次操作的记录数。Batch Size越大,批量操作的向数据库发送Sql的次数越少,速度就越快,同样耗用内存就越大-->
<property name="jdbc.batch_size">50</property>
<!--jdbc.use_scrollable_resultset是否允许Hibernate用JDBC的可滚动的结果集。对分页的结果集。对分页时的设置非常有帮助-->
<property name="jdbc.use_scrollable_resultset">false</property>
<!--connection.useUnicode连接数据库时是否使用Unicode编码-->
<property name="Connection.useUnicode">true</property>
<!--connection.characterEncoding连接数据库时数据的传输字符集编码方式,最好设置为gbk,用gb2312有的字符不全-->
<property name="connection.characterEncoding">UTF-8</property> <!--指定映射文件为“hibernate/ch1/UserInfo.hbm.xml”-->
<mapping resource="com/hjc/backup/entity/LogInfo.hbm.xml" />
</session-factory>
</hibernate-configuration>      
  这里的配置是最基础的配置,没有添加其他任何组件。过段时间我会把使用连接池组件的配置也贴出来。
  为了方便以后自己温习,在这里贴出实体类LogInfo.java
package com.hjc.backup.entity;

import java.io.Serializable;

import com.hjc.backup.bean.Area;

public class LogInfo implements Serializable {

    private static final long serialVersionUID = 1L;

    private Long id;

    private String srcIp;

    private String srcPort;

    private String desIp;

    private String desPort;

    private String mac;

    private String date;

    private String content;

    private Area area;

    public String getSrcIp() {
return srcIp;
} public void setSrcIp(String srcIp) {
this.srcIp = srcIp;
} public String getDesIp() {
return desIp;
} public void setDesIp(String desIp) {
this.desIp = desIp;
} public String getMac() {
return mac;
} public void setMac(String mac) {
this.mac = mac;
} public String getContent() {
return content;
} public void setContent(String content) {
this.content = content;
} public String getDate() {
return date;
} public void setDate(String date) {
this.date = date;
} public Area getArea() {
return area;
} public void setArea(Area area) {
this.area = area;
} public Long getId() {
return id;
} public void setId(Long id) {
this.id = id;
} public String getSrcPort() {
return srcPort;
} public void setSrcPort(String srcPort) {
this.srcPort = srcPort;
} public String getDesPort() {
return desPort;
} public void setDesPort(String desPort) {
this.desPort = desPort;
}
}

  关联的枚举类Area.java

package com.hjc.backup.bean;

import java.io.Serializable;
import java.util.NoSuchElementException; public enum Area implements Serializable { XIAOSHAN("xiaoshan"), HUZHOU("huzhou"); private String key; private Area(String key) { this.key = key;
} public static Area getArea(String key) { for (Area area : Area.values()) { if (key.equals(area.getKey())) { return area;
}
}
throw new NoSuchElementException(key);
} public String getKey() { return key;
}
}

  为了将枚举类型和数据库varchar类型的映射起来,我添加了如下类HibernateVarCharEnum.java

package com.hjc.backup.bean;

import java.sql.Types;
import java.util.Properties; import org.hibernate.type.EnumType;
public class HibernateVarCharEnum extends EnumType { private static final long serialVersionUID = 1L; public void setParameterValues(Properties parameters) {
parameters.setProperty(TYPE, "" + Types.VARCHAR);
super.setParameterValues(parameters);
}
}

  最后是实体类的映射文件LogInfo.hbm.xml

<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd"> <hibernate-mapping package="com.hjc.backup.entity"><!-- 这里的package记得要写对啊。要不然运行也会出错的 -->
<class name="LogInfo" table="LogInfo"><!-- 如果表名跟类名一个样的话,那么table属性可以不写 -->
<id name="id" type="java.lang.Long">
<column name="id"/>
<generator class="native">
</generator>
</id>
<property name="srcIp"></property>
<property name="srcPort"></property>
<property name="desIp"></property>
<property name="desPort"></property>
<property name="mac"></property>
<property name="date"></property>
<property name="content"></property>
<property name="area">
<type name="com.hjc.backup.bean.HibernateVarCharEnum">
<param name="enumClass">com.hjc.backup.bean.Area</param>
</type>
</property>
</class>
</hibernate-mapping>

  OK了,基本配置就是如此了,下面是测试代码:

  DAO层类: HibernateDao.java

package com.hjc.backup.dao.hibernate;

import java.sql.SQLException;
import java.util.List; import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.cfg.Configuration; import com.hjc.backup.bean.Area;
import com.hjc.backup.entity.LogInfo; public class HibernateDao { private static Configuration configuration = new Configuration().configure(); public SessionFactory getSessionFactory() { return configuration.buildSessionFactory();
} public void closeSessionFactory(SessionFactory sessionFactory) { if (sessionFactory != null) { sessionFactory.close();
}
} public boolean getByLogInfo(String srcIp, String desIp, long startTimestamp, long endTimestamp, Area area) throws SQLException { String sql = "from LogInfo l where l.srcIp = ? and l.desIp = ? and unix_timestamp(l.date) >= ? and unix_timestamp(l.date) < ? and l.area = ?"; SessionFactory sessionFactory = getSessionFactory();
Session session = sessionFactory.openSession();
Transaction tr = session.beginTransaction();
Query query = session.createQuery(sql)
.setString(0, srcIp)
.setString(1, desIp)
.setLong(2, startTimestamp)
.setLong(3, endTimestamp)
.setString(4, area.getKey())
; @SuppressWarnings("unchecked")
List<LogInfo> logInfos = query.list();
tr.commit();
session.close();
closeSessionFactory(sessionFactory); return logInfos.isEmpty() ? true : false;
} public boolean saveLogoInfo(LogInfo logInfo) throws SQLException { SessionFactory sessionFactory = getSessionFactory();
Session session = sessionFactory.openSession();
Transaction tr = session.beginTransaction(); Long result = (Long) session.save(logInfo); tr.commit();
session.close();
closeSessionFactory(sessionFactory); return result == 0 ? false : true;
}
}

  测试类: LogService.java

package com.hjc.backup.service;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.List; import com.hjc.backup.bean.Area;
import com.hjc.backup.dao.hibernate.HibernateDao;
import com.hjc.backup.entity.LogInfo; public class LogService { private static HibernateDao hibernateDao = new HibernateDao(); public void saveLogFiles() throws ClassNotFoundException, SQLException, IOException{ Long count = 0L;
BufferedReader br = null;
String syslogPath = "D:\\syslog";
File syslog = new File(syslogPath); for (String dayFileName : syslog.list()) { String daFilePath = syslogPath + "\\" + dayFileName;
File dayFile = new File(daFilePath);
if (dayFile.isFile()) { continue;
}
for (String logFileName : dayFile.list()) { String logFilePath = syslogPath + "\\" + dayFileName + "\\" + logFileName;
System.out.println("正在分析日志: " + logFilePath);
File logFile = new File(logFilePath);
try {
// 设置读取格式
br = new BufferedReader(new InputStreamReader(new FileInputStream(logFile), "GBK"));
String line = "";
while ((line = br.readLine()) != null) { line = new String(line.getBytes(), "UTF-8"); int indexOfLen = line.indexOf("len");
int indexOfFrom = line.indexOf("from"); if (indexOfLen == -1 || indexOfFrom == -1) { continue;
} String srcIpAndPort = getSrcIpAndPortFromLog(line);
String srcIp = srcIpAndPort.split(":")[0];
String srcPort = srcIpAndPort.split(":")[1]; String desIpAndPort = getDesIpAndPortFromLog(line);
String desIp = desIpAndPort.split(":")[0];
String desPort = desIpAndPort.split(":")[1]; String date = getDateFromLog(line, indexOfLen, indexOfFrom);
String startDate = getStartDateFromLog(line, indexOfLen, indexOfFrom);
// 计算当天零点
// 11.21: 1384963200
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
long startTimeInMillis = sdf.parse(startDate).getTime();
long startTime = sdf.parse(startDate).getTime() / 1000;
// 计算明天零点
// 11.22: 1385049600
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(startTimeInMillis);
cal.set(Calendar.DATE, cal.get(Calendar.DATE) +1);
long endTime = cal.getTimeInMillis() / 1000; String mac = getMacFromLog(line); boolean result = hibernateDao.getByLogInfo(srcIp, desIp, startTime, endTime, Area.XIAOSHAN);
if (!result) { System.out.println("repeat log count: " + ++count + "; the content is " + line);
continue;
}
LogInfo logInfo = new LogInfo(); logInfo.setMac(mac);
logInfo.setSrcIp(srcIp);
logInfo.setSrcPort(srcPort);
logInfo.setDesIp(desIp);
logInfo.setDesPort(desPort);
logInfo.setDate(date);
logInfo.setContent(line);
logInfo.setArea(Area.XIAOSHAN); hibernateDao.saveLogoInfo(logInfo);
}
} catch (FileNotFoundException e) { e.printStackTrace();
} catch (IOException e) { e.printStackTrace();
} catch (ParseException e) { e.printStackTrace();
}
}
}
} /**
* 写入文本
*/
public void writeLogInfoToTxt(List<LogInfo> logInfoList) throws IOException { File LogInfo = null;
FileWriter fw = null;
BufferedWriter bw = null;
try {
LogInfo = new File("D:\\syslog\\logInfo.txt");
if (!LogInfo.exists()) { LogInfo.createNewFile();
}
fw = new FileWriter(LogInfo);
bw = new BufferedWriter(fw);
for (LogInfo logInfo : logInfoList) { bw.write("srcIP: " + logInfo.getSrcIp() + "\t\tdestIP: " + logInfo.getDesIp() + "\t\tMAC: " + logInfo.getMac() + "\t\tdate: " + logInfo.getDate());
bw.newLine();
}
} catch (IOException e) { e.printStackTrace();
} finally { if (bw != null) { bw.close();
}
if (fw != null) { fw.close();
}
}
} /**
* 从一行日志记录中获取源IP和端口号
*/
private String getSrcIpAndPortFromLog(String line) { String[] preLog = line.split("->")[0].split(",");
String ipAndPort = preLog[preLog.length - 1];
String srcIpAndPort = ipAndPort.substring(1, ipAndPort.length() - 1); return srcIpAndPort;
} /**
* 从一行日志记录中获取目标IP和端口号
*/
private String getDesIpAndPortFromLog(String line) { String[] preLog = line.split("->")[1].split(",");
String destIpAndPort = preLog[0]; return destIpAndPort;
} /**
* 从一行日志记录中获取MAC地址
*/
private String getMacFromLog(String line) { int indexOfMac = line.indexOf("src-mac");
String mac = line.substring(indexOfMac + 8, indexOfMac + 25); return mac;
} /**
* 从一行日志记录中获取日期时间
*/
public String getDateFromLog(String line, int indexOfLen, int indexOfFrom) throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String dateOfLog = line.substring(indexOfLen, indexOfFrom); int year = 2013;
int month = parseMonthType(dateOfLog.split(" ")[3].split("-")[1]);
int day = Integer.parseInt(dateOfLog.split(" ")[3].split("-")[0]);
String date = year + "-" + month + "-" + day + " " + dateOfLog.split(" ")[4]; return sdf.format(sdf.parse(date));
} /**
* 从一行日志记录中获取当天零点
*/
public String getStartDateFromLog(String line, int indexOfLen, int indexOfFrom) throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
String dateOfLog = line.substring(indexOfLen, indexOfFrom); int year = 2013;
int month = parseMonthType(dateOfLog.split(" ")[3].split("-")[1]);
int day = Integer.parseInt(dateOfLog.split(" ")[3].split("-")[0]);
String date = year + "-" + month + "-" + day; return sdf.format(sdf.parse(date));
} /**
* 转换记录中的月份
*/
public int parseMonthType(String month) { int monthOfNum = 0;
if ("Jan".equalsIgnoreCase(month)) { monthOfNum = 1;
} else if ("Feb".equalsIgnoreCase(month)) { monthOfNum = 2;
} else if ("Mar".equalsIgnoreCase(month)) { monthOfNum = 3;
} else if ("Apr".equalsIgnoreCase(month)) { monthOfNum = 4;
} else if ("May".equalsIgnoreCase(month)) { monthOfNum = 5;
} else if ("Jun".equalsIgnoreCase(month)) { monthOfNum = 6;
} else if ("Jul".equalsIgnoreCase(month)) { monthOfNum = 7;
} else if ("Aug".equalsIgnoreCase(month)) { monthOfNum = 8;
} else if ("Sep".equalsIgnoreCase(month)) { monthOfNum = 9;
} else if ("Oct".equalsIgnoreCase(month)) { monthOfNum = 10;
} else if ("Nov".equalsIgnoreCase(month)) { monthOfNum = 11;
} else if ("Dec".equalsIgnoreCase(month)) { monthOfNum = 12;
}
return monthOfNum;
} /**
* 测试类
*/
public static void main(String[] args) { LogService filesUtils = new LogService();
try { System.out.println("开始备份");
filesUtils.saveLogFiles();
System.out.println("结束备份");
} catch (IOException e) { e.printStackTrace();
} catch (SQLException e) { e.printStackTrace();
} catch (ClassNotFoundException e) { e.printStackTrace();
}
}
}

  该套代码还不够完善,存在的问题主要是: 程序一跑起来就会对数据库频繁的操作(大概分析到4000条记录),从而导致如下异常:

Exception in thread "main" org.hibernate.exception.JDBCConnectionException: Cannot open connection
at org.hibernate.exception.SQLStateConverter.convert(SQLStateConverter.java:99)
at org.hibernate.exception.JDBCExceptionHelper.convert(JDBCExceptionHelper.java:66)
at org.hibernate.exception.JDBCExceptionHelper.convert(JDBCExceptionHelper.java:52)
at org.hibernate.jdbc.ConnectionManager.openConnection(ConnectionManager.java:449)
at org.hibernate.jdbc.ConnectionManager.getConnection(ConnectionManager.java:167)
at org.hibernate.jdbc.JDBCContext.connection(JDBCContext.java:160)
at org.hibernate.transaction.JDBCTransaction.begin(JDBCTransaction.java:81)
at org.hibernate.impl.SessionImpl.beginTransaction(SessionImpl.java:1473)
at com.hjc.backup.dao.hibernate.HibernateDao.getByLogInfo(HibernateDao.java:38)
at com.hjc.backup.service.LogService.saveLogFiles(LogService.java:85)
at com.hjc.backup.service.LogService.main(LogService.java:277)
Caused by: com.mysql.jdbc.CommunicationsException: The driver was unable to create a connection due to an inability to establish the client portion of a socket. This is usually caused by a limit on the number of sockets imposed by the operating system. This limit is usually configurable. For Unix-based platforms, see the manual page for the 'ulimit' command. Kernel or system reconfiguration may also be required. For Windows-based platforms, see Microsoft Knowledge Base Article 196271 (Q196271).
at com.mysql.jdbc.Connection.createNewIO(Connection.java:2847)
at com.mysql.jdbc.Connection.<init>(Connection.java:1555)
at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:285)
at java.sql.DriverManager.getConnection(Unknown Source)
at java.sql.DriverManager.getConnection(Unknown Source)
at org.hibernate.connection.DriverManagerConnectionProvider.getConnection(DriverManagerConnectionProvider.java:133)
at org.hibernate.jdbc.ConnectionManager.openConnection(ConnectionManager.java:446)
... 7 more

  楼主对这个异常实在有点拙计,改天配个连接池看看能不能解决。

  好了,就到这里了。

  2013-12-28 17:56:20

												

Hibernate游记——装备篇《二》(基础配置示例)的更多相关文章

  1. Hibernate游记——装备篇《三》(连接池的使用)

    这里介绍几种最常见的连接池配置: [说明:在hibernate3.0中,已经不再支持dbcp了,hibernate的作者在hibernate.org中,明确指出在实践中发现dbcp有BUG,在某些种情 ...

  2. Hibernate游记——装备篇《一》(基础配置详解)

    Hibernate配置文件可以有两种格式,一种是 hibernate.properties ,另一种是 hibernate.cfg.xml 后者稍微方便一些,当增加hbm映射文件的时候,可以直接在 h ...

  3. 运维之思科篇——NAT基础配置

    一. NAT(网络地址转换) 1. 作用:通过将内部网络的私有IP地址翻译成全球唯一的公网IP地址,使内部网络可以连接到互联网等外部网络上. 2. 优点: 节省公有合法IP地址 处理地址重叠 增强灵活 ...

  4. HAProxy(三):Keeplived+HAProxy搭建高可用负载均衡动静分离架构基础配置示例

    一.安装环境 1.软件版本 HAProxy:1.5.18 Keepalived:1.3.5 Nginx:1.12.2 PHP:7.2 系统版本:CentOS 7.4 2.IP分配与架构图 3.安装软件 ...

  5. Hibernate游记——盘缠篇(jar包)

    需要的jar包我都放我百度网盘了 jar(包含注解需要的jar)包传送门:http://pan.baidu.com/s/1o60OYvg

  6. javaWeb项目(SSH框架+AJAX+百度地图API+Oracle数据库+MyEclipse+Tomcat)之二 基础Hibernate框架搭建篇

    我们在搭建完Struts框架之后,从前台想后端传送数据就显得非常简单了.Struts的功能不仅仅是一个拦截器,这只是它的核心功能,此外我们也可以自定义拦截器,和通过注解的方式来更加的简化代码. 接下来 ...

  7. Membership三步曲之入门篇 - Membership基础示例

    Membership 三步曲之入门篇 - Membership基础示例 Membership三步曲之入门篇 -  Membership基础示例 Membership三步曲之进阶篇 -  深入剖析Pro ...

  8. Hibernate 基础配置及常用功能(三)

    本章重点讲述Hibernate对象的三种状态以及如何配置二级缓存 有关Hibernate的三种状态如何相互转换网上都能查到,官方文档描述的也比较详细.这里主要是针对几个重点方法做代码演示. 一.状态转 ...

  9. Hibernate 基础配置及常用功能(一)

    本来是想等全部框架测试完以后再统一发布的,但是随着测试的一点点增加感觉把需要叙述的东西放在一起终将会是一场灾难.所以还是打算分成几章来描述,其中还包括一些有待解决的问题.短期很难腾出时间来仔细阅读Hi ...

随机推荐

  1. 【Python学习之七】面向对象高级编程——__slots__的使用

    1.Python中的属性和方法的绑定 正常情况下,当我们定义了一个class,创建了一个class的实例后,我们可以给该实例绑定任何属性和方法. (1)首先,定义一个class:  class Stu ...

  2. destoon后台权限-不给客户创始人权限并屏蔽部分功能

    1.根目录下后台入口文件admin.php $_founder = $CFG['founderid'] == $_userid ? $_userid : 0;  //  $CFG['founderid ...

  3. 字符串:HDU5371-Hotaru's problem(manacher 的应用)

    Hotaru's problem Time Limit: 4000/2000 MS (Java/Others) Memory Limit: 65536/65536 K (Java/Others) Pr ...

  4. Prolog&Epilog

    这篇博客会简单介绍一下Prolog&Epilog 然后再简单介绍下我对于程序在计算机中到底如何运行的一些理解(因为自己之前也从来没有接触过这些方面的知识,所以如果有讲的不对的地方希望大家能够帮 ...

  5. 算法学习记录-查找——二叉排序树(Binary Sort Tree)

    二叉排序树 也称为 二叉查找数. 它具有以下性质: 若它的左子树不空,则左子树上所有结点的值均小于它的根结点的值. 若它的右子树不空,则右子树上所有结点的值均大于它的根结点的值. 它的左.右子树也分别 ...

  6. 24、AES RSA加密处理记录

    一.加密过程解释 前提:发送方为A,接受方为B牢记:RSA为非对称加密,AES为对称加密.对称加密,属于传统的加密技术,加密和解密的秘钥都是相同的,AES的秘钥长度有128.192.256三种.非对称 ...

  7. re——正则表达式

    常用的表达式规则 '.' 默认匹配除\n之外的任意一个字符,若指定flag DOTALL,则匹配任意字符,包括换行 '^' 匹配字符开头,若指定flags MULTILINE,这种也可以匹配上(r&q ...

  8. x86 保护模式 十 分页管理机制

    x86   保护模式  十  分页管理机制 8.386开始支持分页管理机制 段机制实现虚拟地址到线性地址的转换,分页机制实现线性地址到物理地址的转换.如果不启用分页,那么线性就是物理地址 一  分页管 ...

  9. # Linux 命令学习记录

    Linux 命令学习记录 取指定文件夹下的任意一个文件,并用vim打开 vi $(ls -l|grep "^-"|head -n 1|awk '{print $9}') 统计给定文 ...

  10. 高并发下的HashMap,ConcurrentHashMap

    参照: http://mp.weixin.qq.com/s/dzNq50zBQ4iDrOAhM4a70A http://mp.weixin.qq.com/s/1yWSfdz0j-PprGkDgOomh ...