Hibernate-Search使用

Hibernate.cfg.xml

<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE hibernate-configuration PUBLIC
		"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
		"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
    <session-factory name="sessionFactory">
        
        <property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
        <property name="hibernate.connection.url">jdbc:mysql://127.0.0.1:3306/hibernate_search</property>
        <property name="hibernate.connection.username">travis</property>
        <property name="hibernate.dialect">org.hibernate.dialect.MySQL5InnoDBDialect</property>
        <property name="hibernate.hbm2ddl.auto" value="update" /> 
        
                <property name="hibernate.search.lucene_version" value="LUCENE_36"/>
		<property name="hibernate.search.default.directory_provider" value="filesystem"/>
		<property name="hibernate.search.default.indexBase" value="target/lucene/indexes"/>
		
		<event type="post-update">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event>
		
		<event type="post-insert">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event>
		
		<event type="post-delete">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event>
		
		<event type="post-collection-recreate">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event>
		
		<event type="post-collection-remove">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event>
		
		<event type="post-collection-update">
			<listener class="org.hibernate.search.event.FullTextIndexEventListener" />
		</event> 
		
    </session-factory>
</hibernate-configuration>

spring配置文件

<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:context="http://www.springframework.org/schema/context"
	xmlns:mvc="http://www.springframework.org/schema/mvc" xmlns:jdbc="http://www.springframework.org/schema/jdbc"
	xmlns:aop="http://www.springframework.org/schema/aop" xmlns:tx="http://www.springframework.org/schema/tx"
	xsi:schemaLocation="http://www.springframework.org/schema/mvc 
	http://www.springframework.org/schema/mvc/spring-mvc-3.1.xsd
	http://www.springframework.org/schema/beans 
	http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
	http://www.springframework.org/schema/aop 
	http://www.springframework.org/schema/aop/spring-aop-3.1.xsd
	http://www.springframework.org/schema/tx 
	http://www.springframework.org/schema/tx/spring-tx-3.1.xsd
	http://www.springframework.org/schema/jdbc 
	http://www.springframework.org/schema/jdbc/spring-jdbc-3.1.xsd
	http://www.springframework.org/schema/context 
	http://www.springframework.org/schema/context/spring-context-3.1.xsd">

	<context:annotation-config />

	<context:property-placeholder location="classpath:jdbc.properties" />

	<context:component-scan base-package="org.hibernate.search.hibernate.example">
		<context:exclude-filter type="annotation"
			expression="org.springframework.stereotype.Controller" />
	</context:component-scan>

	<!-- 阿里 Druid数据源 -->
	<!-- https://github.com/alibaba/druid/wiki/%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98 -->
	<bean id="hibernate4DataSource" class="com.alibaba.druid.pool.DruidDataSource"
		init-method="init" destroy-method="close">
		<property name="username" value="${username}"></property>
		<property name="password" value="${password}"></property>
		<property name="url" value="${log4jdbc.url}"></property>
		<property name="driverClassName" value="${log4jdbc.driverClassName}"></property>

		<!-- 配置初始化大小、最小、最大 -->
		<property name="initialSize" value="1" />
		<property name="minIdle" value="1" />
		<property name="maxActive" value="20" />

		<!-- 配置获取连接等待超时的时间 -->
		<property name="maxWait" value="60000" />

		<!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
		<property name="timeBetweenEvictionRunsMillis" value="60000" />

		<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
		<property name="minEvictableIdleTimeMillis" value="300000" />

		<property name="validationQuery" value="SELECT 'x'" />
		<property name="testWhileIdle" value="true" />
		<property name="testOnBorrow" value="false" />
		<property name="testOnReturn" value="false" />

		<!-- 打开PSCache,并且指定每个连接上PSCache的大小 -->
		<property name="poolPreparedStatements" value="true" />
		<property name="maxPoolPreparedStatementPerConnectionSize"
			value="20" />

		<!-- 配置监控统计拦截的filters -->
		<property name="filters" value="stat,log4j" />
	</bean>

	<!-- 数据初始化导入 -->

	<jdbc:initialize-database data-source="hibernate4DataSource">
		<jdbc:script location="classpath:hibernate_search.sql"
			encoding="GBK" />
	</jdbc:initialize-database>


	<bean id="hibernate4sessionFactory"
		class="org.springframework.orm.hibernate4.LocalSessionFactoryBean">
		<property name="dataSource" ref="hibernate4DataSource"></property>
		<property name="hibernateProperties">
			<props>
				<prop key="hibernate.dialect">org.hibernate.dialect.MySQL5InnoDBDialect</prop>
				<!-- <prop key="hibernate.show_sql">true</prop> -->
				<!-- <prop key="hibernate.hbm2ddl.auto">update</prop> -->
				<prop key="hibernate.jdbc.batch_size">20</prop>

				<!-- hibernate cache -->
				<prop key="hibernate.cache.use_query_cache">true</prop>
				<prop key="hibernate.cache.use_second_level_cache">true</prop>
				<prop key="hibernate.cache.provider_class">org.hibernate.cache.EhCacheProvider</prop>
				<prop key="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.EhCacheRegionFactory
				</prop>
				<prop key="net.sf.ehcache.configurationResourceName">ehcache.xml</prop>

				<!-- hibernate search configuration -->
				<prop key="hibernate.search.lucene_version">LUCENE_36</prop>
				<prop key="hibernate.search.default.directory_provider">filesystem</prop>
				<prop key="hibernate.search.default.indexBase">target/lucene/indexes</prop>

				<!-- hibernate search Index Optimization -->
				<prop key="hibernate.search.default.optimizer.operation_limit.max">1000</prop>
				<prop key="hibernate.search.default.optimizer.transaction_limit.max">100</prop>
			</props>
		</property>

		<property name="packagesToScan">
			<list>
				<value>org.hibernate.search.hibernate.example.model</value>
			</list>
		</property>

		<!-- <property name="annotatedClasses"> <list> <value>org.hibernate.search.hibernate.example.model.Author</value> 
			<value>org.hibernate.search.hibernate.example.model.Book</value> </list> 
			</property> -->

		<!-- <property name="mappingResources"> <list> <value>org/hibernate/search/hibernate/example/model/Author.hbm.xml</value> 
			<value>org/hibernate/search/hibernate/example/model/Book.hbm.xml</value> 
			</list> </property> -->

	</bean>

	<bean class="org.hibernate.search.hibernate.example.IndexManger"
		depends-on="hibernate4sessionFactory" />

	<bean id="hibernate4TransactionManager"
		class="org.springframework.orm.hibernate4.HibernateTransactionManager">
		<property name="sessionFactory" ref="hibernate4sessionFactory"></property>
	</bean>

	<aop:aspectj-autoproxy expose-proxy="true" />
	<tx:annotation-driven transaction-manager="hibernate4TransactionManager" />

	<tx:advice id="txAdvice" transaction-manager="hibernate4TransactionManager">
		<tx:attributes>
			<tx:method name="save*" propagation="REQUIRED" />
			<tx:method name="add*" propagation="REQUIRED" />
			<tx:method name="create*" propagation="REQUIRED" />
			<tx:method name="insert*" propagation="REQUIRED" />
			<tx:method name="update*" propagation="REQUIRED" />
			<tx:method name="merge*" propagation="REQUIRED" />
			<tx:method name="del*" propagation="REQUIRED" />
			<tx:method name="remove*" propagation="REQUIRED" />
			<tx:method name="put*" propagation="REQUIRED" />
			<tx:method name="use*" propagation="REQUIRED" />
			<!--hibernate4必须配置为开启事务 否则 getCurrentSession()获取不到 -->
			<tx:method name="get*" propagation="REQUIRED" read-only="true" />
			<tx:method name="count*" propagation="REQUIRED" read-only="true" />
			<tx:method name="find*" propagation="REQUIRED" read-only="true" />
			<tx:method name="list*" propagation="REQUIRED" read-only="true" />
			<tx:method name="query*" propagation="REQUIRED" read-only="true" />
			<tx:method name="load*" propagation="REQUIRED" read-only="true" />
			<tx:method name="*" read-only="true" />
		</tx:attributes>
	</tx:advice>

	<aop:config expose-proxy="true">
		<aop:pointcut
			expression="execution(* org.hibernate.search.hibernate.example.service.*.*(..))"
			id="pointcut" />
		<aop:advisor advice-ref="txAdvice" pointcut-ref="pointcut" />
	</aop:config>

</beans>


实体类

Author

package org.hibernate.search.hibernate.example.model;

import java.util.Set;

import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.Table;

import org.codehaus.jackson.annotate.JsonIgnore;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.ContainedIn;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Index;
import org.hibernate.search.annotations.Store;

@Entity
@Table(catalog="hibernate_search",name="Author")
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE,region="org.hibernate.search.hibernate.example.model.Author")
public class Author {
	@Id
	@GeneratedValue(strategy=GenerationType.AUTO)
	private Integer id;
	
	@Field(index=Index.YES,analyze=Analyze.NO,store=Store.COMPRESS)
	private String name;
	
	@ManyToMany(fetch=FetchType.LAZY,mappedBy="authors"/*,cascade={CascadeType.PERSIST,CascadeType.MERGE,CascadeType.REFRESH,CascadeType.REMOVE}*/)
	@ContainedIn
	@Cache(usage = CacheConcurrencyStrategy.READ_WRITE,region="org.hibernate.search.hibernate.example.model.Book")
	@JsonIgnore
	private Set<Book> books;
	
	public Integer getId() {
		return id;
	}

	public void setId(Integer id) {
		this.id = id;
	}

	public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}
	
	public Set<Book> getBooks() {
		return books;
	}

	public void setBooks(Set<Book> books) {
		this.books = books;
	}

	public Author() {
	}
	
	public Author(String name) {
		super();
		this.name = name;
	}
	
}


Book

package org.hibernate.search.hibernate.example.model;

import static org.hibernate.search.annotations.FieldCacheType.CLASS;
import static org.hibernate.search.annotations.FieldCacheType.ID;

import java.util.Date;
import java.util.HashSet;
import java.util.Set;

import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.Table;

import net.paoding.analysis.analyzer.PaodingAnalyzer;

import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.search.annotations.Analyze;
import org.hibernate.search.annotations.Analyzer;
import org.hibernate.search.annotations.Boost;
import org.hibernate.search.annotations.CacheFromIndex;
import org.hibernate.search.annotations.DateBridge;
import org.hibernate.search.annotations.DocumentId;
import org.hibernate.search.annotations.Field;
import org.hibernate.search.annotations.Index;
import org.hibernate.search.annotations.Indexed;
import org.hibernate.search.annotations.IndexedEmbedded;
import org.hibernate.search.annotations.Resolution;
import org.hibernate.search.annotations.Store;

@Entity
@Table(catalog="hibernate_search",name="Book")
@Indexed(index="book")
//@Analyzer(impl=IKAnalyzer.class)
@Analyzer(impl=PaodingAnalyzer.class)
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE,region="org.hibernate.search.hibernate.example.model.Book")  
@Boost(2.0f)
@CacheFromIndex( { CLASS, ID } )
public class Book {
	@Id
	@GeneratedValue(strategy=GenerationType.AUTO)
	@DocumentId
	private Integer id;

	@Field(index = Index.YES, analyze = Analyze.YES, store = Store.COMPRESS)
	@Boost(1.5f)
	private String name;
	
	@Field(index = Index.YES, analyze = Analyze.YES, store = Store.COMPRESS)
	@Boost(1.2f)
	private String description;
	
	@Field(index = Index.YES, analyze = Analyze.NO, store = Store.YES)
	@DateBridge(resolution = Resolution.DAY)
	private Date publicationDate;

	@IndexedEmbedded(depth=1)
	@ManyToMany(cascade={CascadeType.PERSIST,CascadeType.MERGE,CascadeType.REFRESH,CascadeType.REMOVE},fetch=FetchType.LAZY)
	@JoinTable(
			catalog="hibernate_search",
			name="Book_Author",
			joinColumns={@JoinColumn(name = "book_id")},
			inverseJoinColumns = {@JoinColumn(name = "author_id")}
	)
	@Cache(usage = CacheConcurrencyStrategy.READ_WRITE,region="org.hibernate.search.hibernate.example.model.Author")
	private Set<Author> authors = new HashSet<Author>();

	public Integer getId() {
		return id;
	}

	public void setId(Integer id) {
		this.id = id;
	}
	
	public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}

	public String getDescription() {
		return description;
	}

	public void setDescription(String description) {
		this.description = description;
	}

	public Date getPublicationDate() {
		return publicationDate;
	}

	public void setPublicationDate(Date publicationDate) {
		this.publicationDate = publicationDate;
	}

	public Set<Author> getAuthors() {
		return authors;
	}

	public void setAuthors(Set<Author> authors) {
		this.authors = authors;
	}

	public Book() {
	}

}

返回结果类

package org.hibernate.search.hibernate.example.model;

import java.util.List;

/**
 * 查询结果集封装
 * @author Administrator
 *
 * @param <T>
 */
public class QueryResult<T> {
	
	private int searchresultsize;
	
	List<T> searchresult;

	public int getSearchresultsize() {
		return searchresultsize;
	}

	public void setSearchresultsize(int searchresultsize) {
		this.searchresultsize = searchresultsize;
	}

	public List<T> getSearchresult() {
		return searchresult;
	}

	public void setSearchresult(List<T> searchresult) {
		this.searchresult = searchresult;
	}

	
}


Dao

package org.hibernate.search.hibernate.example.dao.impl;

import java.util.ArrayList;
import java.util.List;
import java.util.Set;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.util.Version;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
import org.hibernate.search.hibernate.example.dao.BookDao;
import org.hibernate.search.hibernate.example.model.Author;
import org.hibernate.search.hibernate.example.model.Book;
import org.hibernate.search.hibernate.example.model.QueryResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;

@Repository(value="bookDaoImpl")
public class BookDaoImpl implements BookDao {
	
	@Autowired
	@Qualifier("hibernate4sessionFactory")
	private SessionFactory sessionFactory;
	
	private Session getSession(){
		return sessionFactory.getCurrentSession();
	}
	
	@Override
	public void add(Book book) {
		
		getSession().persist(book);
		
	}

	@SuppressWarnings("unchecked")
	@Override
	public List<Book> query(int start, int pagesize) {
		return getSession().createCriteria(Book.class).setFirstResult(start).setMaxResults(pagesize).list();
	}

	@Override
	public void update(Book book) {
		getSession().merge(book);
	}

	@Override
	public void delete(Book book) {
		getSession().delete(book);
	}

	@Override
	public void delete(int id) {
		
		getSession().delete(load(id));
		
	}

	@Override
	public QueryResult<Book> query(String keyword, int start, int pagesize,Analyzer analyzer,String...field) throws Exception{
		
		QueryResult<Book> queryResult=new QueryResult<Book>();
		
		List<Book> books=new ArrayList<Book>();
		
		FullTextSession fullTextSession = Search.getFullTextSession(getSession());
		
		//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
		//QueryBuilder qb = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
		//Query luceneQuery = qb.keyword().onFields(field).matching(keyword).createQuery();

		//使用lucene api查询 从多个字段匹配 name、description、authors.name
		
		MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36,new String[]{"name","description","authors.name"}, analyzer);
		Query luceneQuery=queryParser.parse(keyword);
		
		FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(luceneQuery);
		int searchresultsize = fullTextQuery.getResultSize();
		queryResult.setSearchresultsize(searchresultsize);
		System.out.println("共查找到["+searchresultsize+"]条记录");
		
		fullTextQuery.setFirstResult(start);
		fullTextQuery.setMaxResults(pagesize);
		
		//设置按id排序
		fullTextQuery.setSort(new Sort(new SortField("id", SortField.INT ,true)));
		
		//高亮设置
		SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "</font></b>");
		QueryScorer queryScorer=new QueryScorer(luceneQuery);
		Highlighter highlighter=new Highlighter(formatter, queryScorer);

		@SuppressWarnings("unchecked")
		List<Book> tempresult = fullTextQuery.list();
		for (Book book : tempresult) {
			String highlighterString=null;
			try {
				//高亮name
				highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
				if(highlighterString!=null){
					book.setName(highlighterString);
				}
				//高亮authors.name
				Set<Author> authors = book.getAuthors();
				for (Author author : authors) {
					highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
					if(highlighterString!=null){
						author.setName(highlighterString);
					}
				}
				//高亮description
				highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
				if(highlighterString!=null){
					book.setDescription(highlighterString);
				}
			} catch (Exception e) {
			}
			
			books.add(book);
			
			
			System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
			System.out.println("----------------------------------------------------------");
		}
		
		queryResult.setSearchresult(books);
		
		return queryResult;
	}

	@Override
	public Book load(int id) {
		return (Book) getSession().get(Book.class, id);
	}

	
}



IndexManager 建索引

package org.hibernate.search.hibernate.example;


import org.hibernate.SessionFactory;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;

/**
 * @author Administrator
 *
 */
public class IndexManger implements InitializingBean{
	
	@Autowired
	@Qualifier("hibernate4sessionFactory")
	private SessionFactory sessionFactory;

	@Override
	public void afterPropertiesSet() throws Exception {
		//重建索引
		FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
		
		fullTextSession.createIndexer().startAndWait();
	}
}

SearchManager 查询类

package org.hibernate.search.hibernate.example;

import java.util.List;
import java.util.Set;

import net.paoding.analysis.analyzer.PaodingAnalyzer;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.util.Version;
import org.hibernate.SessionFactory;
import org.hibernate.search.FullTextQuery;
import org.hibernate.search.FullTextSession;
import org.hibernate.search.Search;
import org.hibernate.search.hibernate.example.model.Author;
import org.hibernate.search.hibernate.example.model.Book;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

public class SearchManager {
	
	public static void main(String[] args) throws Exception{
		ApplicationContext applicationContext=new ClassPathXmlApplicationContext("applicationContext.xml");
		SessionFactory sessionFactory = applicationContext.getBean("hibernate4sessionFactory",SessionFactory.class);
		FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
		
		//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
//		QueryBuilder qb = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
//		Query luceneQuery = qb.keyword().onFields("name","description","authors.name").matching("移动互联网").createQuery();
		
		//使用lucene api查询 从多个字段匹配 name、description、authors.name
		//使用庖丁分词器
		MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36, new String[]{"name","description","authors.name"}, new PaodingAnalyzer());
		Query luceneQuery=queryParser.parse("实战");
		
		FullTextQuery fullTextQuery =fullTextSession.createFullTextQuery(luceneQuery, Book.class);
		//设置每页显示多少条
		fullTextQuery.setMaxResults(5);
		//设置当前页
		fullTextQuery.setFirstResult(0);
		
		//高亮设置
		SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "<font/></b>");
		QueryScorer queryScorer=new QueryScorer(luceneQuery);
		Highlighter highlighter=new Highlighter(formatter, queryScorer);

		@SuppressWarnings("unchecked")
		List<Book> resultList = fullTextQuery.list();
		System.out.println("共查找到["+resultList.size()+"]条记录");
		for (Book book : resultList) {
			String highlighterString=null;
			Analyzer analyzer=new PaodingAnalyzer();
			try {
				//高亮name
				highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
				if(highlighterString!=null){
					book.setName(highlighterString);
				}
				//高亮authors.name
				Set<Author> authors = book.getAuthors();
				for (Author author : authors) {
					highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
					if(highlighterString!=null){
						author.setName(highlighterString);
					}
				}
				//高亮description
				highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
				if(highlighterString!=null){
					book.setDescription(highlighterString);
				}
			} catch (Exception e) {
			}
			
			System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
			System.out.println("----------------------------------------------------------");
		}
		
		fullTextSession.close();
		sessionFactory.close();
		
	}
}


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值