HubbleDotNet 使用类
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Hubble.SQLClient;
using System.Configuration;
using RJ.Entity.UI_A;
using System.Data;
using Hubble.Core.Analysis.HighLight;
using Hubble.Analyzer;
using RJ.DBConnection;
using RJ.DALS.UI_A;
using RJ.Common;
using RJ.Entity.UI_A.AboutRJ;
namespace RJ.DAL.UI_A
{
public class DHubble : UI_A_DALBaseClass
{
private static string _TitleAnalyzerName = null;
private static string _ContentAnalyzerName = null;
private const int CacheTimeout = 0; //In seconds
public static void GetAnalyzerName(HubbleAsyncConnection conn, string tableName)
{
if (_TitleAnalyzerName != null && _ContentAnalyzerName != null)
{
return;
}
string sql = string.Format("exec SP_Columns '{0}'", tableName.Replace("'", "''"));
HubbleCommand cmd = new HubbleCommand(sql, conn);
foreach (System.Data.DataRow row in cmd.Query().Tables[0].Rows)
{
if (row["FieldName"].ToString().Equals("Title", StringComparison.CurrentCultureIgnoreCase))
{
_TitleAnalyzerName = row["Analyzer"].ToString();
}
if (row["FieldName"].ToString().Equals("Content", StringComparison.CurrentCultureIgnoreCase))
{
_ContentAnalyzerName = row["Analyzer"].ToString();
}
}
}
/// <summary>
/// 搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchKnowList(int pageindex, int pagesize, string key, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "KnowSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("KnowSearch",
"documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
//if (key.Split('_')[1] == "1")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end * from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc",
// conn);
//}
//else if (key.Split('_')[1] == "2")
//{
// adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by publishTime desc",
// conn);
//}
//else
//{
adapter.SelectCommand = new HubbleCommand("select between @begin to @end ID,UNIID,DocumentName,publishTime from KnowSearch where ( documentname Contains @matchString or documentname match @matchString ) order by score desc,publishTime desc",
conn);
//}
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,UNIID,documentname,htmlurl,publishtime,addtime,SEO_D,PDF from DocumentSearch where ( documentname Contains @matchString or Content Contains @matchString or documentname match @matchString or Content match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
/// 软件搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchSoftWareList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "SoftwareSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("SoftwareSearch",
"DocumentName", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (VisitLevel ='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end id,documentname,publishtime,addtime,UNIID,IsCheckPartner from SoftwareSearch where (DocumentName Contains @matchString or DocumentName match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle software = new EArticle();
software.ID = int.Parse(row["ID"].ToString());
software.Name = row["DocumentName"].ToString();
software.UpdateTime = Convert.ToDateTime(row["AddTime"].ToString());
software.UNIID = row["UNIID"].ToString();
software.IsCheckPartner = Convert.ToBoolean(row["IsCheckPartner"]);
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
software.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
software.PublishTime = Convert.ToDateTime(row["Addtime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
software.TitleHighLighter = titleHighlighter.GetBestFragment(key, software.Name);
if (string.IsNullOrEmpty(software.TitleHighLighter))
{
software.TitleHighLighter = software.Name;
}
result.Add(software);
}
sw.Stop();
return result;
}
/// <summary>
///全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count, out DataTable dtcount)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_CN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_CN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
strSql.Append(" select ");
if (pagesize != 0)
{
strSql.Append(" between @begin to @end ID,Name,TypeID,SEO_D,TypeName,strURL,addTime from ");
}
strSql.Append(" WholeSearch_CN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) ");
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append(" and (VisitLevel ='' or visitlevel='JingBiaoLevel')");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append(" and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='' or visitlevel='JingBiaoLevel') ");
}
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
else
{
strSql.Insert(0, " [GroupBy('Count', 'ID', 'TypeID', 10)] ");
}
strSql.Append(" order by score desc, lorder");
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
if (ds.Tables.Count > 1)
{
dtcount = ds.Tables[1];
}
else
{
dtcount = null;
}
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
search.strURL = row["strURL"].ToString();
if (row["TypeID"].ToString() != "")
{
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
}
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 英文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> EN_SearchDocList(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch_EN",
"documentname", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
}
else
{
levelPara = isLevel.Split(',');
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,PDF,SEO_D,publishTime,addTime from DocumentSearch_EN where ( documentname Contains @matchString or documentname match @matchString or SEO_K Contains @matchString or SEO_K match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["addTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment(key, document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
/// <summary>
///英文全站搜索
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<ESearch> EN_SearchAllList(int pagesize, int pageindex, string key, string isLevel, int typeid, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "WholeSearch_EN");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("WholeSearch_EN",
"Name", key, int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
if (string.IsNullOrEmpty(isLevel))
{
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and VisitLevel =''");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
else
{
levelPara = isLevel.Split(',');
if (pagesize == 0)
{
strSql.Append("select ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
else
{
strSql.Append("select between @begin to @end ID,Name,TypeID,SEO_D,TypeName,addTime from ");
}
strSql.Append(" WholeSearch_EN where ( Name Contains @matchString or Name Match @matchString or SEO_D Contains @matchString or SEO_D Match @matchString) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') ");
if (typeid > 0)
{
strSql.Append(" and typeid=@typeid");
}
strSql.Append(" order by score desc, lorder");
}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pageindex - 1) * pagesize);
adapter.SelectCommand.Parameters.Add("@end", pageindex * pagesize - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
adapter.SelectCommand.Parameters.Add("@typeid", typeid);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<ESearch> result = new List<ESearch>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
ESearch search = new ESearch();
search.ID = int.Parse(row["ID"].ToString());
//search.UNIID = new Guid(row["UNIID"].ToString());
search.Name = Strings.NoHTML(row["Name"].ToString());
search.SEO_D = row["SEO_D"].ToString();
//search.content = Strings.NoHTML(row["content"].ToString());
search.TypeID = int.Parse(row["TypeID"].ToString());
search.TypeName = row["TypeName"].ToString();
//search.TXT = Strings.NoHTML(row["TXT"].ToString());
if (!string.IsNullOrEmpty(row["addTime"].ToString().Trim()))
{
search.addTime = Convert.ToDateTime(row["addTime"].ToString());
}
else
{
search.addTime = Convert.ToDateTime("9999-12-31");
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
search.Abstract = contentHighlighter.GetBestFragment(key, search.SEO_D);
if (string.IsNullOrEmpty(search.Abstract))
{
search.Abstract = search.SEO_D;
}
search.TitleHighLighter = titleHighlighter.GetBestFragment(key, search.Name);
if (string.IsNullOrEmpty(search.TitleHighLighter))
{
search.TitleHighLighter = search.Name;
}
result.Add(search);
}
return result;
}
/// <summary>
/// 中文文档搜索列表
/// </summary>
/// <param name="pageindex"></param>
/// <param name="pagesize"></param>
/// <param name="key"></param>
/// <param name="Count"></param>
/// <returns></returns>
public List<EArticle> SearchDocListNew(int pageindex, int pagesize, string key, string isLevel, out int Count)
{
string strConn = ConfigurationManager.ConnectionStrings["Search"].ToString();
DataSet ds = new DataSet();
//string sql = "select * from News where Title match '" + key + "'";
//ds = SQLHelper.ExecuteDataset(strConn, CommandType.Text, sql, null);
System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
sw.Start();
using (HubbleAsyncConnection conn = new HubbleAsyncConnection(strConn))
{
conn.Open();
GetAnalyzerName(conn, "DocumentSearch");
string wordssplitbyspace;
HubbleCommand matchCmd = new HubbleCommand(conn);
string matchString = matchCmd.GetKeywordAnalyzerStringFromServer("DocumentSearch",
"Documentname", (key.Split('_')[0]).ToString(), int.MaxValue, out wordssplitbyspace);
HubbleDataAdapter adapter = new HubbleDataAdapter();
StringBuilder strSql = new StringBuilder();
string[] levelPara;
//if (key.Split('_')[1] == "1")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// // strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by score desc");
// }
//}
//else if (key.Split('_')[1] == "2")
//{
// if (string.IsNullOrEmpty(isLevel))
// {
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by publishTime desc");
// }
// else
// {
// levelPara = isLevel.Split(',');
// //strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
// strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
// for (int i = 0; i < levelPara.Length; i++)
// {
// strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
// }
// strSql.Remove(strSql.Length - 3, 2);
// strSql.Append(" or VisitLevel='') and release='true' order by publishTime desc");
// }
//}
//else
//{
if (string.IsNullOrEmpty(isLevel))
{
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and VisitLevel ='' and release='true' order by score desc, publishTime desc");
}
else
{
levelPara = isLevel.Split(',');
//strSql.Append("select between @begin to @end * from V_DocumentSearch where ( documentname^100 Contains @matchString or Content^50 Contains @matchString or documentname^100 match @matchString or Content^50 match @matchString ) and (");
strSql.Append("select between @begin to @end ID,UNIID,DocumentName,SEO_D,PDF,htmlURL,gid_collection,publishTime,addTime from DocumentSearch where ( documentname Contains @matchString or documentname match @matchString ) and (");
for (int i = 0; i < levelPara.Length; i++)
{
strSql.Append(" VisitLevel=@VisitLevel" + i + " or ");
}
strSql.Remove(strSql.Length - 3, 2);
strSql.Append(" or VisitLevel='') and release='true' order by score desc, publishTime desc ");
}
//}
adapter.SelectCommand = new HubbleCommand(strSql.ToString(), conn);
adapter.SelectCommand.Parameters.Add("@begin", (pagesize - 1) * pageindex);
adapter.SelectCommand.Parameters.Add("@end", pagesize * pageindex - 1);
adapter.SelectCommand.Parameters.Add("@matchString", matchString);
if (!string.IsNullOrEmpty(isLevel))
{
for (int i = 0; i < isLevel.Split(',').Length; i++)
{
adapter.SelectCommand.Parameters.Add("@VisitLevel" + i, isLevel.Split(',')[i].Replace("'", ""));
}
}
adapter.SelectCommand.CacheTimeout = CacheTimeout;
ds = new System.Data.DataSet();
//adapter.Fill(ds);
HubbleCommand cmd = adapter.SelectCommand;
ds = cmd.Query(CacheTimeout);
//titleWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Title", docids, int.MaxValue);
//contentWordsPositions = cmd.GetWordsPositions(wordssplitbyspace, "News", "Content", docids, int.MaxValue);
}
Count = ds.Tables[0].MinimumCapacity;
List<EArticle> result = new List<EArticle>();
foreach (System.Data.DataRow row in ds.Tables[0].Rows)
{
EArticle document = new EArticle();
document.ID = int.Parse(row["ID"].ToString());
document.UNIID = row["UNIID"].ToString();
document.Name = row["DocumentName"].ToString();
document.SEO_D = row["SEO_D"].ToString();
document.PDF = row["PDF"].ToString();
document.htmlURL = row["htmlURL"].ToString();
document.gid_collection = row["gid_collection"].ToString();
document.TitleHighLighter = row["DocumentName"].ToString();
if (!string.IsNullOrEmpty(row["publishTime"].ToString().Trim()))
{
document.PublishTime = Convert.ToDateTime(row["publishTime"].ToString());
}
else
{
document.PublishTime = Convert.ToDateTime(row["AddTime"].ToString());
}
SimpleHTMLFormatter simpleHTMLFormatter =
new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter titleHighlighter;
Highlighter contentHighlighter;
titleHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
contentHighlighter =
new Highlighter(simpleHTMLFormatter, new PanGuAnalyzer());
titleHighlighter.FragmentSize = 100;
contentHighlighter.FragmentSize = 100;
string lightname = titleHighlighter.GetBestFragment((key.Split('_')[0]).ToString(), document.Name);
if (string.IsNullOrEmpty(lightname))
{
document.TitleHighLighter = document.Name;
}
else
{
document.TitleHighLighter = lightname;
}
result.Add(document);
}
sw.Stop();
return result;
}
}
}
HubbleDotNet 使用类的更多相关文章
- Java类的继承与多态特性-入门笔记
相信对于继承和多态的概念性我就不在怎么解释啦!不管你是.Net还是Java面向对象编程都是比不缺少一堂课~~Net如此Java亦也有同样的思想成分包含其中. 继承,多态,封装是Java面向对象的3大特 ...
- HubbleDotNet 开源全文搜索数据库项目--为数据库现有表或视图建立全文索引(三) 多表关联全文索引模式
关系型数据库中,多表关联是很常见的事情,HubbleDotNet 可以对部分情况的多表关联形式建立关联的全文索引,这样用户就不需要专门建一个大表 来解决多表关联时的全文索引问题. 下面以 为数据库现有 ...
- C++ 可配置的类工厂
项目中常用到工厂模式,工厂模式可以把创建对象的具体细节封装到Create函数中,减少重复代码,增强可读和可维护性.传统的工厂实现如下: class Widget { public: virtual i ...
- Android请求网络共通类——Hi_博客 Android App 开发笔记
今天 ,来分享一下 ,一个博客App的开发过程,以前也没开发过这种类型App 的经验,求大神们轻点喷. 首先我们要创建一个Andriod 项目 因为要从网络请求数据所以我们先来一个请求网络的共通类. ...
- ASP.NET MVC with Entity Framework and CSS一书翻译系列文章之第二章:利用模型类创建视图、控制器和数据库
在这一章中,我们将直接进入项目,并且为产品和分类添加一些基本的模型类.我们将在Entity Framework的代码优先模式下,利用这些模型类创建一个数据库.我们还将学习如何在代码中创建数据库上下文类 ...
- ASP.NET Core 折腾笔记二:自己写个完整的Cache缓存类来支持.NET Core
背景: 1:.NET Core 已经没System.Web,也木有了HttpRuntime.Cache,因此,该空间下Cache也木有了. 2:.NET Core 有新的Memory Cache提供, ...
- .NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类
.NET Core中间件的注册和管道的构建(2)---- 用UseMiddleware扩展方法注册中间件类 0x00 为什么要引入扩展方法 有的中间件功能比较简单,有的则比较复杂,并且依赖其它组件.除 ...
- Java基础Map接口+Collections工具类
1.Map中我们主要讲两个接口 HashMap 与 LinkedHashMap (1)其中LinkedHashMap是有序的 怎么存怎么取出来 我们讲一下Map的增删改查功能: /* * Ma ...
- PHP-解析验证码类--学习笔记
1.开始 在 网上看到使用PHP写的ValidateCode生成验证码码类,感觉不错,特拿来分析学习一下. 2.类图 3.验证码类部分代码 3.1 定义变量 //随机因子 private $char ...
随机推荐
- MySQL-with rollup函数运用 _20160930
在博客里http://www.cnblogs.com/Mr-Cxy/p/5898839.html提到了行转列, 如果想在下面这个表下面添加一行 总计 数据行SQL代码怎么实现 并且根据9月金额进行城市 ...
- nginx 反向代理配置
转载一篇特别好的nginx配置博文:http://www.cnblogs.com/hunttown/p/5759959.html
- Vue项目屏幕自适应方案
安装lib-flexible cnpm i lib-flexible -D Vue项目引入 lib-flexible. main.js: import 'lib-flexible/flexible' ...
- ceph应用情况分析
1.概述 ceph是分布式的开源存储系统,同时支持块存储.对象存储和文件系统,ceph可以满足高性能.高可靠性和高扩展等特性. 目前ceph作为开源分布式存储已经被大量使用,尤其是在云环境下的应用,下 ...
- [hdu4738]求桥模板
oj问题,待修改,存档. #include<stdio.h> #include<iostream> #include<cstdio> #include<sta ...
- iis部署错误:HTTP 错误 500.21 - Internal Server Error
将网站发布到IIS,访问发生如下错误: HTTP 错误 500.21 - Internal Server Error处理程序“PageHandlerFactory-Integr”在其模块列表中有一个错 ...
- Ubuntu 14.04中修复默认启用HDMI后没有声音的问题
声音问题在Ubuntu中是老生常谈了.先前我已经在修复Ubuntu中的“无声”问题一文中写到了多种方法,但是我在此正要谈及的声音问题跟在另外一篇文章中提到的有所不同. 因此,我安装了Ubuntu 14 ...
- HDU - 5875 Function(预处理)
Function The shorter, the simpler. With this problem, you should be convinced of this truth. Yo ...
- MATLAB进行假设检验
4.8.1 已知,单个正态总体的均值μ的假设检验(U检验法) 函数 ztest 格式 h = ztest(x,m,sigma) % x为正态总体的样本,m为均值μ0,sigma为标准差,显著 ...
- unity打包选项编辑器扩展
using UnityEngine; using UnityEditor; using UnityEditor.Callbacks; using System.IO; public class Pos ...