MYBLOG

欢迎来到小马哥的个人博客~

[原创]Lucene.net+盘古分词(一)

2020-03-08学海无涯

近期用到Lucene.net+盘古分词,特此在这里记录一下,原本自己做了一套DEMO,由于时间关系,今天就不在此上传源码了,等日后有时间了,将后分享给大家,好了,不多说了,放实例代码。


using Lucene.Net.Analysis;
using Lucene.Net.Analysis.PanGu;
using Lucene.Net.Analysis.Standard;
using Lucene.Net.Documents;
using Lucene.Net.Index;
using Lucene.Net.Search;
using Lucene.Net.Store;
using PanGu;
using System;
using System.Collections.Generic;
using System.Data;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Web;

namespace MyApp.AppFunction
{
    public class LucenePangu
    {
        public class IndexField
        {
            private string _FiledName;

            public string FiledName
            {
                get { return _FiledName; }
                set { _FiledName = value; }
            }

            private string _FiledValue;

            public string FiledValue
            {
                get { return _FiledValue; }
                set { _FiledValue = value; }
            }

            private Field.Store _FiledStore;

            public Field.Store FiledStore
            {
                get { return _FiledStore; }
                set { _FiledStore = value; }
            }
            private Field.Index _FiledIndex;

            public Field.Index FiledIndex
            {
                get { return _FiledIndex; }
                set { _FiledIndex = value; }
            }
            private Field.TermVector _FiledTermVector;

            public Field.TermVector FiledTermVector
            {
                get { return _FiledTermVector; }
                set { _FiledTermVector = value; }
            }
        }

        /// <summary>
        /// 创建索引文件库
        /// Field.Store.YES:存储字段值(未分词前的字段值)
        /// Field.Store.NO:不存储,存储与索引没有关系
        /// Field.Store.COMPRESS:压缩存储,用于长文本或二进制,但性能受损

        /// Field.Index.ANALYZED:分词建索引
        /// Field.Index.ANALYZED_NO_NORMS:分词建索引,但是Field的值不像通常那样被保存,而是只取一个byte,这样节约存储空间
        /// Field.Index.NOT_ANALYZED:不分词且索引
        /// Field.Index.NOT_ANALYZED_NO_NORMS:不分词建索引,Field的值去一个byte保存

        /// TermVector表示文档的条目(由一个Document和Field定位)和它们在当前文档中所出现的次数
        /// Field.TermVector.YES:为每个文档(Document)存储该字段的TermVector
        /// Field.TermVector.NO:不存储TermVector
        /// Field.TermVector.WITH_POSITIONS:存储位置
        /// Field.TermVector.WITH_OFFSETS:存储偏移量
        /// Field.TermVector.WITH_POSITIONS_OFFSETS:存储位置和偏移量
        /// </summary>
        /// <param name="IndexData">索引文件库文件夹</param>
        /// <param name="Dt_Data">数据</param>
        public bool CreateIndexByData(string IndexData, DataTable Dt_Data, List<IndexField> Fields)
        {
            try
            {
                string indexPath = HttpContext.Current.Server.MapPath(IndexData);
                FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NativeFSLockFactory());
                bool isExist = IndexReader.IndexExists(directory);
                if (isExist)
                {
                    if (IndexWriter.IsLocked(directory))
                    {
                        IndexWriter.Unlock(directory);
                    }
                }
                IndexWriter writer = new IndexWriter(directory, new PanGuAnalyzer(), !isExist, IndexWriter.MaxFieldLength.UNLIMITED);
                if (Dt_Data != null && Dt_Data.Rows.Count > 0)
                {
                    foreach (DataRow row in Dt_Data.Rows)
                    {
                        Document document = new Document();
                        if (Fields.Count > 0)
                        {
                            foreach (IndexField field in Fields)
                            {
                                document.Add(new Field(field.FiledName, row[field.FiledName].ToString(), field.FiledStore, field.FiledIndex, field.FiledTermVector));
                            }
                        }
                        writer.AddDocument(document);
                    }
                    writer.Close();
                    directory.Close();
                }
                return true;
            }
            catch (Exception)
            {
                return false;
            }
        }

        /// <summary>
        /// 更新索引文件
        /// </summary>
        /// <param name="IndexData"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="FiledValue"></param>
        /// <param name="Fields"></param>
        /// <returns></returns>
        public bool UpdateIndexByData(string IndexData, string QueryFiled, string FiledValue, List<IndexField> Fields)
        {
            try
            {
                string indexPath = HttpContext.Current.Server.MapPath(IndexData);
                FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NativeFSLockFactory());
                bool isExist = IndexReader.IndexExists(directory);
                if (isExist)
                {
                    if (IndexWriter.IsLocked(directory))
                    {
                        IndexWriter.Unlock(directory);
                    }
                }
                IndexWriter writer = new IndexWriter(directory, new PanGuAnalyzer(), !isExist, IndexWriter.MaxFieldLength.UNLIMITED);
                Document document = new Document();
                if (Fields.Count > 0)
                {
                    foreach (IndexField field in Fields)
                    {
                        document.Add(new Field(field.FiledName, field.FiledValue, field.FiledStore, field.FiledIndex, field.FiledTermVector));
                    }
                }
                writer.UpdateDocument(new Term(QueryFiled, FiledValue), document);
                writer.Close();
                directory.Close();
                return true;
            }
            catch (Exception)
            {
                return false;
            }
        }

        /// <summary>
        /// 基本检索
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <param name="RunTime"></param>
        /// <returns></returns>
        public DataTable SearchIndexData(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            Query query = new TermQuery(new Term(QueryFiled, SearchKey));
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                TotalCount = 0;
                RunTime = stopwatch.ElapsedMilliseconds;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey, doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }

        /// <summary>
        /// 检索索引文件
        /// </summary>
        /// 分词检索文件,按指定字间距进行分词检索
        /// <param name="ReturnFileds">需返回字段</param>
        /// <param name="IndexData">索引文件库文件夹</param>
        /// <param name="HightLightFileds">需高亮字段名称,可留空</param>
        /// <param name="SearchKey">关键词</param>
        /// <param name="QueryFiled">检索列</param>
        /// <param name="PageSize">页面数据条数</param>
        /// <param name="PageIndex">页码</param>
        /// <param name="TotalCount">总记录数</param>
        /// <param name="SortFiled">排序字段</param>
        /// <param name="Sort">正序降序:true为降序,false为升序</param>
        ///  <param name="Wordspacing">字间距</param>
        /// <returns></returns>
        public DataTable SearchIndexDataForWordspace(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, int Wordspacing, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            PhraseQuery query = new PhraseQuery();
            foreach (string word in SplitWords(SearchKey))
            {
                query.Add(new Term(QueryFiled, word));
            }
            query.SetSlop(Wordspacing);
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = 0;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey, doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }


        /// <summary>
        /// 对某个单词进行通配符查询,通配符包括’?’匹配一个任意字符和’*’匹配零个或多个任意字符
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <returns></returns>
        public DataTable SearchIndexDataForWildcard(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            Query query = new WildcardQuery(new Term(QueryFiled, SearchKey));
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                TotalCount = 0;
                RunTime = stopwatch.ElapsedMilliseconds;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey, doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }

        /// <summary>
        /// 逻辑查询
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <param name="RunTime"></param>
        /// <returns></returns>
        public DataTable SearchIndexDataForLogic(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            BooleanQuery query = new BooleanQuery();

            foreach (string word in SplitWords(SearchKey.TrimEnd()))
            {
                query.Add(new WildcardQuery(new Term(QueryFiled, "*" + word+"*")), BooleanClause.Occur.MUST);
                query.Add(new TermQuery(new Term(QueryFiled, word)), BooleanClause.Occur.SHOULD);
                query.Add(new PrefixQuery(new Term(QueryFiled, word)), BooleanClause.Occur.SHOULD);     
            }
            
            //BooleanClause.Occur.MUST                必须包含,类似于逻辑运算的与
            //BooleanClause.Occur.MUST _NOT           必须不包含,类似于逻辑运算的非
            //BooleanClause.Occur.SHOULD              可以包含,类似于逻辑运算的或

            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);

            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                TotalCount = 0;
                RunTime = stopwatch.ElapsedMilliseconds;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey.TrimEnd(), doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }

        /// <summary>
        /// 检索以“”开头的中文词语
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <returns></returns>
        public DataTable SearchIndexDataForFirst(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            PrefixQuery query = new PrefixQuery(new Term(QueryFiled, SearchKey.TrimEnd()));
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                TotalCount = 0;
                RunTime = stopwatch.ElapsedMilliseconds;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey.TrimEnd(), doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }

        /// <summary>
        /// 检索与输入单词相似的单词
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <returns></returns>
        public DataTable SearchIndexDataForSimilar(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            Query query = new FuzzyQuery(new Term(QueryFiled, SearchKey.TrimEnd()),0.5f);
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = 0;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey.TrimEnd(), doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }

        /// <summary>
        /// 按时间域进行检索数据
        /// </summary>
        /// <param name="ReturnFileds"></param>
        /// <param name="IndexData"></param>
        /// <param name="HightLightFileds"></param>
        /// <param name="SearchKey"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="PageSize"></param>
        /// <param name="PageIndex"></param>
        /// <param name="TotalCount"></param>
        /// <param name="SortFiled"></param>
        /// <param name="Sort"></param>
        /// <param name="FirstTime"></param>
        /// <param name="EndTime"></param>
        /// <returns></returns>
        public DataTable SearchIndexDataForTime(string[] ReturnFileds, string IndexData, string HightLightFileds, string SearchKey, string QueryFiled, int PageSize, int PageIndex, out int TotalCount, string SortFiled, bool Sort, string FirstTime, string EndTime, out long RunTime)
        {
            string indexPath = HttpContext.Current.Server.MapPath(IndexData);
            FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory());
            IndexReader reader = IndexReader.Open(directory, true);
            IndexSearcher searcher = new IndexSearcher(reader);
            Stopwatch stopwatch = Stopwatch.StartNew();
            RangeQuery query = new RangeQuery(new Term(QueryFiled, FirstTime), new Term(QueryFiled, EndTime), true);
            Sort sort = new Sort(new SortField(SortFiled, SortField.DOC, Sort));
            TopDocs docs = searcher.Search(query, (Filter)null, PageSize * PageIndex, sort);
            stopwatch.Stop();
            if (docs == null || docs.totalHits == 0)
            {
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = 0;
                return null;
            }
            else
            {
                int start = PageSize * (PageIndex - 1);
                int limit = PageSize;
                RunTime = stopwatch.ElapsedMilliseconds;
                TotalCount = docs.totalHits;
                DataTable Dt_Return = new DataTable();
                if (ReturnFileds.Length > 0)
                {
                    foreach (string i in ReturnFileds)
                    {
                        Dt_Return.Columns.Add(i);
                    }
                    for (int i = 0; i < docs.totalHits; i++)
                    {
                        if (i >= (PageIndex - 1) * PageSize && i < PageIndex * PageSize)
                        {
                            Document doc = searcher.Doc(docs.scoreDocs[i].doc);
                            DataRow row = Dt_Return.NewRow();
                            foreach (DataColumn Column in Dt_Return.Columns)
                            {
                                if (HightLightFileds == Column.ColumnName)
                                {
                                    row[Column.ColumnName] = HightLight(SearchKey.TrimEnd(), doc.Get(Column.ColumnName));
                                }
                                else
                                {
                                    row[Column.ColumnName] = doc.Get(Column.ColumnName);
                                }
                            }
                            Dt_Return.Rows.Add(row);
                        }
                    }
                }
                return Dt_Return;
            }
        }


        /// <summary>
        /// 删除索引
        /// </summary>
        /// <param name="IndexData"></param>
        /// <param name="QueryFiled"></param>
        /// <param name="FiledValue"></param>
        /// <returns></returns>
        public bool DeleteTrueIndexData(string IndexData, string QueryFiled, string FiledValue)
        {
            try
            {
                Lucene.Net.Store.Directory Directory = FSDirectory.GetDirectory(HttpContext.Current.Server.MapPath(IndexData), false);
                IndexWriter Writer = new IndexWriter(Directory, new PanGuAnalyzer(), false);
                Writer.DeleteDocuments(new Term(QueryFiled, FiledValue));
                Writer.Close();
                Directory.Close();
                return true;
            }
            catch (Exception)
            {
                return false;
            }
        }

        /// <summary>
        /// 还原回收站中的所有索引
        /// </summary>
        /// <param name="IndexData"></param>
        /// <returns></returns>
        public bool UnDeleteRecycleData(string IndexData)
        {
            try
            {
                Lucene.Net.Store.Directory Directory = FSDirectory.GetDirectory(HttpContext.Current.Server.MapPath(IndexData), false);
                IndexReader Reader = IndexReader.Open(Directory, false);
                Reader.UndeleteAll();
                Reader.Close();
                Directory.Close();
                return true;
            }
            catch (Exception)
            {
                return false;
            }
        }


        /// <summary>
        /// 优化索引文件,注意:此操作严重耗时,优化过程中需要两倍的存储空间,优化过程中会清空回收站
        /// </summary>
        /// <param name="IndexData"></param>
        /// <returns></returns>
        public bool OptimizeIndexData(string IndexData)
        {
            try
            {
                Lucene.Net.Store.Directory Directory = FSDirectory.GetDirectory(HttpContext.Current.Server.MapPath(IndexData), false);
                IndexWriter Writer = new IndexWriter(Directory, new PanGuAnalyzer(), false);
                Writer.Optimize();
                Writer.Close();
                Directory.Close();
                return true;
            }
            catch (Exception)
            {
                return false;
            }
        }

        /// <summary>
        /// 对输入的关键词进行盘古分词
        /// </summary>
        /// <param name="content">用户输入的关键词信息</param>
        /// <returns></returns>
        public static string[] SplitWords(string content)
        {
            List<string> strList = new List<string>();
            Analyzer analyzer = new PanGuAnalyzer();//指定使用盘古 PanGuAnalyzer 分词算法
            TokenStream tokenStream = analyzer.TokenStream("", new StringReader(content));
            Lucene.Net.Analysis.Token token = null;
            while ((token = tokenStream.Next()) != null)
            {
                strList.Add(token.TermText()); //得到分词后结果
            }
            return strList.ToArray();
        }

        //需要添加PanGu.HighLight.dll的引用
        /// <summary>
        /// 搜索结果高亮显示
        /// </summary>
        /// <param name="keyword"> 关键字 </param>
        /// <param name="content"> 搜索结果 </param>
        /// <returns> 高亮后结果 </returns>
        public static string HightLight(string keyword, string content)
        {
            //创建HTMLFormatter,参数为高亮单词的前后缀
            PanGu.HighLight.SimpleHTMLFormatter simpleHTMLFormatter =
                new PanGu.HighLight.SimpleHTMLFormatter("<font style=\"font-style:normal;color:#cc0000;\"><b>", "</b></font>");
            //创建 Highlighter ,输入HTMLFormatter 和 盘古分词对象Semgent
            PanGu.HighLight.Highlighter highlighter =
                            new PanGu.HighLight.Highlighter(simpleHTMLFormatter,
                            new Segment());
            //设置每个摘要段的字符数
            highlighter.FragmentSize = 1000;
            //获取最匹配的摘要段
            string HightLightFont = highlighter.GetBestFragment(keyword, content);
            if (string.IsNullOrEmpty(HightLightFont.Trim()))
            {
                HightLightFont = content;
            }
            return HightLightFont;
        }
    }
}