SolrUtil:
package cn.sniper.solr.util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.impl.XMLResponseParser;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.FacetField.Count;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import cn.sniper.solr.model.Message;
import cn.sniper.solr.model.Page;
import cn.sniper.solr.model.PaginationVo;
public class SolrUtil {
public static final String URL = "http://localhost:8080/solr/collection1";
private static HttpSolrServer solrServer;
public static final Logger logger = Logger.getLogger(SolrUtil.class);
public static final String LOG_SEPARATOR = "#";
public static final String QUERY_KEYWORD = "QUERY_KEYWORD";
/**
* 初始化
*/
static {
solrServer = new HttpSolrServer(URL);
solrServer.setMaxRetries(1); // defaults to 0. > 1 not recommended.
solrServer.setConnectionTimeout(5000); // 5 seconds to establish TCP
// Setting the XML response parser is only required for cross
// version compatibility and only when one side is 1.4.1 or
// earlier and the other side is 3.1 or later.
solrServer.setParser(new XMLResponseParser()); // binary parser is used by default
// The following settings are provided here for completeness.
// They will not normally be required, and should only be used
// after consulting javadocs to know whether they are truly required.
solrServer.setSoTimeout(1000); // socket read timeout
solrServer.setDefaultMaxConnectionsPerHost(1000);
solrServer.setMaxTotalConnections(1000);
solrServer.setFollowRedirects(false);
// defaults to false
// allowCompression defaults to false.
// Server side must support gzip or deflate for this to have any effect.
solrServer.setAllowCompression(true);
}
/**
* 添加单条记录
* @param map
* @throws SolrServerException
* @throws IOException
*/
public static void add(Map<String, Object> map) {
if(null != map && !map.isEmpty()) {
SolrInputDocument doc = new SolrInputDocument();
Set<String> set = map.keySet();
for(String key : set) {
Object value = map.get(key);
doc.addField(key, value);
}
try {
solrServer.add(doc);
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "addDocument" + LOG_SEPARATOR + "添加单个document出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "addDocument" + LOG_SEPARATOR + "添加单个document出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
}
}
/**
* 批量添加
* @param list
*/
public static void addList(List<Map<String, Object>> list) {
if(null != list && list.size() > 0) {
List<SolrInputDocument> docList = new ArrayList<SolrInputDocument>();
for(Map<String, Object> map : list) {
SolrInputDocument doc = new SolrInputDocument();
Set<String> set = map.keySet();
for(String key : set) {
Object value = map.get(key);
doc.addField(key, value);
}
docList.add(doc);
}
try {
solrServer.add(docList);
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "addDocumentList" + LOG_SEPARATOR + "批量添加document出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "addDocumentList" + LOG_SEPARATOR + "批量添加document出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
}
}
/**
* 单条添加
* @param message
*/
public static void addBean(Message message) {
if(null != message) {
try {
solrServer.addBean(message);
} catch (IOException e) {
e.printStackTrace();
} catch (SolrServerException e) {
e.printStackTrace();
}
}
}
/**
* 批量添加
* @param list
*/
public static void addBeans(List<Message> list) {
if(null != list && list.size() > 0) {
try {
solrServer.addBeans(list);
} catch (SolrServerException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 删除所有数据
*/
public static void delAll() {
try {
solrServer.deleteByQuery("*:*");
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "delAll" + LOG_SEPARATOR + "删除所有索引出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "delAll" + LOG_SEPARATOR + "删除所有索引出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
//硬提交
commit();
}
/**
* 需求:
* 人口表含有:姓名、昵称、性别、居住地址、身份证号等字段,入库的时候,将所有字段拼接成一个字段c_all入库,检索的时候从c_all做检索,
* 但是检索出来之后,页面需要按照:姓名:值、昵称:值、性别:值、居住地址:值、身份证号:值
* 需要高亮
* 问题:
* 由于按照c_all字段检索,所以检索后的高亮只会加在c_all检索出来的内容上,所以,需要切分高亮后的内容,和每个字段的内容做匹配,然后在每个字段上加上高亮内容
* @param pageNo 当前页码
* @param pageSize 每页显示记录数
* @param map 查询参数
* @return
*/
public static PaginationVo queryData(int pageNo, int pageSize, Map<String, Object> map) {
List<Map<String, Object>> returnList = new ArrayList<Map<String, Object>>();
long total = 0;
StringBuffer queryStr = new StringBuffer();
boolean ifHighlight = false;
String keyword = map.get(QUERY_KEYWORD).toString();
if(map != null && !map.isEmpty()) {
if(map.containsKey("type")) {
String keyName = map.get("type").toString();
queryStr.append("c_type:").append(keyName).append(" AND ");
}
if(StringUtils.isBlank(keyword)) {
queryStr.append("c_all:* ");
} else {
queryStr.append("c_all:"+keyword);
}
ifHighlight = true;
} else {
queryStr.append("*:*");
ifHighlight = false;
}
SolrQuery query = new SolrQuery();
query.setQuery(queryStr.toString())
//查询解析器的概念就是提供了一系列查询的参数,一旦我们在查询url中设置了相应的参数,那么查询解析器将会解析查询信息
.addField("*, score") // 使得solr查询得到每一条查询的分数.
.setParam("defType", "edismax"); // 查询handler的类型,即查询解析器,如果要使用权重排序,至少要用dismax解析器,edismax还可以使用bf,即函数
try {
QueryResponse respTotal = solrServer.query(query);
total = respTotal.getResults().getNumFound();
Page<HashMap<String,Object>> pageObj = new Page<HashMap<String,Object>>();
pageObj.setTotalRows(total);
pageObj.setCurPage(pageNo);
pageObj.setPageSize(pageSize);
query.setStart(Long.valueOf(pageObj.getBeginIndex()).intValue()-1); //起始位置
query.setRows(pageSize);//查询多少条
//设置高亮
if(ifHighlight) {
query.setHighlight(true);
query.addHighlightField("c_all");
query.setHighlightSimplePre("<font color='red'>");
query.setHighlightSimplePost("</font>");
}
QueryResponse resp = solrServer.query(query);
SolrDocumentList docList = resp.getResults();
Map<String, Map<String, List<String>>> highlightingMap = resp.getHighlighting();
List<String> list = new ArrayList<String>();
for(SolrDocument doc : docList) {
if(ifHighlight) {
String id = doc.get("id").toString();
Map<String, List<String>> highlightingMapTemp = highlightingMap.get(id);
Collection<String> fieldNameList = doc.getFieldNames();
int index = 100;
TreeMap<String, Object> result = new TreeMap<String, Object>();
String highFiled = highlightingMapTemp.get("c_all").get(0);
int begin = highFiled.indexOf("<font");
int end = highFiled.lastIndexOf("</font>") + "</font>".length();
highFiled = highFiled.substring(begin, end);
String[] strs = highFiled.split("</font>");
list.clear();
for(String temp : strs) {
if(temp.indexOf("<font color='red'>") >= 0) {
String valueTemp = temp.substring(temp.indexOf("<font color='red'>")+"<font color='red'>".length());
if(!list.contains(valueTemp)) {
list.add(valueTemp);
}
}
}
String value = "";
for(String fieldName : fieldNameList) {
if(fieldName.equals("score")) {
System.err.println(doc.get(fieldName).toString());
}
if(highlightingMapTemp.containsKey(fieldName)) {
result.put((index++) + "-" + fieldName, highlightingMapTemp.get(fieldName).get(0));
//result.put(fieldName, highlightingMapTemp.get(fieldName).get(0));
} else {
value = doc.get(fieldName).toString();
if(!"table_name_key".equals(fieldName.toLowerCase())) {
for(String temp : list) {
value = value.replaceAll(temp, "<font color='red'>"+temp+"</font>");
}
}
result.put((index++) + "-" + fieldName, value);
//result.put(fieldName, doc.get(fieldName));
}
}
returnList.add(result);
} else {
Collection<String> fieldNameList = doc.getFieldNames();
TreeMap<String, Object> result = new TreeMap<String, Object>();
int index = 100;
for(String fieldName : fieldNameList) {
result.put((index++) + "-" + fieldName, doc.get(fieldName));
//result.put(fieldName, doc.get(fieldName));
}
returnList.add(result);
}
}
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "queryData" + LOG_SEPARATOR + "检索数据出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} finally {
query.clear();
}
PaginationVo vo = new PaginationVo();
vo.setPage(pageNo);
vo.setTotal(Long.valueOf(total).intValue());
vo.setRows(returnList);
return vo;
}
/**
*
* @param pageNo
* @param pageSize
* @param map
* @return
*/
public static PaginationVo query(int pageNo, int pageSize, Map<String, Object> map) {
List<Map<String, Object>> returnList = new ArrayList<Map<String, Object>>();
long total = 0;
StringBuffer queryStr = new StringBuffer();
boolean ifHighlight = false;
String keyword = map.get(QUERY_KEYWORD).toString();
if(map != null && !map.isEmpty()) {
if(map.containsKey("type")) {
String keyName = map.get("type").toString();
queryStr.append("c_type:").append(keyName).append(" AND ");
}
if(StringUtils.isBlank(keyword)) {
queryStr.append("c_all:* ");
} else {
queryStr.append("c_all:"+keyword);
}
ifHighlight = true;
} else {
queryStr.append("*:*");
ifHighlight = false;
}
SolrQuery query = new SolrQuery();
query.setQuery(queryStr.toString())
//查询解析器的概念就是提供了一系列查询的参数,一旦我们在查询url中设置了相应的参数,那么查询解析器将会解析查询信息
//query.addField("*, score") // 使得solr查询得到每一条查询的分数.
.setParam("defType", "edismax"); // 查询handler的类型,即查询解析器,如果要使用权重排序,至少要用dismax解析器,edismax还可以使用bf,即函数
try {
//获得总数量
QueryResponse respTotal = solrServer.query(query);
total = respTotal.getResults().getNumFound();
Page<HashMap<String,Object>> pageObj = new Page<HashMap<String,Object>>();
pageObj.setTotalRows(total);
pageObj.setCurPage(pageNo);
pageObj.setPageSize(pageSize);
query.setStart(Long.valueOf(pageObj.getBeginIndex()).intValue()-1); //起始位置
query.setRows(pageSize);//查询多少条
//设置高亮
if(ifHighlight) {
query.setHighlight(true);
query.addHighlightField("c_all");
query.setHighlightSimplePre("<font color='red'>");
query.setHighlightSimplePost("</font>");
}
QueryResponse resp = solrServer.query(query);
SolrDocumentList docList = resp.getResults();
Map<String, Map<String, List<String>>> highlightingMap = resp.getHighlighting();
for(SolrDocument doc : docList) {
if(ifHighlight) {
String id = doc.get("id").toString();
Map<String, List<String>> highlightingMapTemp = highlightingMap.get(id);
Set<String> set = highlightingMapTemp.keySet();
for(String key : set) {
doc.setField(key, highlightingMapTemp.get(key));
}
}
returnList.add(doc.getFieldValueMap());
}
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "queryData" + LOG_SEPARATOR + "检索数据出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} finally {
query.clear();
}
PaginationVo vo = new PaginationVo();
vo.setPage(pageNo);
vo.setTotal(Long.valueOf(total).intValue());
vo.setRows(returnList);
return vo;
}
/**
* 每个字段对应一个关键字
* @param pageNo
* @param pageSize
* @param map key:查询字段 value:字段对应查询值
* @param ifAnd
* @return
*/
public static PaginationVo query(int pageNo, int pageSize, Map<String, Object> map, boolean ifAnd) {
List<Map<String, Object>> returnList = new ArrayList<Map<String, Object>>();
long total = 0;
StringBuffer queryStr = new StringBuffer();
if(map != null && !map.isEmpty()) {
Set<String> set = map.keySet();
String op = ifAnd?" AND ":" OR ";
for(String key : set) {
Object obj = map.get(key);
if(obj == null || StringUtils.isEmpty(obj.toString())) {
continue;
}
/*
* 空格 || OR 都是一个意思
* && AND 是一个意思
*/
queryStr.append(key).append(":").append(obj.toString()).append(op);
}
if(queryStr.indexOf(op) != -1) {
queryStr = new StringBuffer(queryStr.substring(0, queryStr.lastIndexOf(op)));
}
} else {
queryStr.append("*:*");
}
SolrQuery query = new SolrQuery();
//query.set("q", queryStr.toString());
query.setQuery(queryStr.toString())
//查询解析器的概念就是提供了一系列查询的参数,一旦我们在查询url中设置了相应的参数,那么查询解析器将会解析查询信息
.addField("*, score") // 使得solr查询得到每一条查询的分数,需要使用edismax解析器
.setParam("defType", "edismax"); // 查询handler的类型,即查询解析器,如果要使用权重排序,至少要用dismax解析器,edismax还可以使用bf,即函数
try {
//获得总数量
QueryResponse respTotal = solrServer.query(query);
total = respTotal.getResults().getNumFound();
Page<HashMap<String,Object>> pageObj = new Page<HashMap<String,Object>>();
pageObj.setTotalRows(total);
pageObj.setCurPage(pageNo);
pageObj.setPageSize(pageSize);
query.setStart(Long.valueOf(pageObj.getBeginIndex()).intValue()-1); //起始位置
query.setRows(pageSize);//查询多少条
QueryResponse resp = solrServer.query(query);
SolrDocumentList docList = resp.getResults();
for(SolrDocument doc : docList) {
returnList.add(doc.getFieldValueMap());
}
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "queryData" + LOG_SEPARATOR + "检索数据出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} finally {
query.clear();
}
PaginationVo vo = new PaginationVo();
vo.setPage(pageNo);
vo.setTotal(Long.valueOf(total).intValue());
vo.setRows(returnList);
return vo;
}
/**
* 多个字段查询同一个关键字 【字段加权】
* @param pageNo
* @param pageSize
* @param keyword 关键字
* @param ifAnd
* @param fields 对哪些字段做查询
* @return
*/
public static PaginationVo query(int pageNo, int pageSize, String keyword, boolean ifAnd, String...fields) {
List<Map<String, Object>> returnList = new ArrayList<Map<String, Object>>();
long total = 0;
StringBuffer queryStr = new StringBuffer();
if(fields != null && fields.length > 0) {
for(String field : fields) {
queryStr.append(field).append(" ");
}
} else {
queryStr.append("*");
}
SolrQuery query = new SolrQuery(keyword);
query.setParam("qf", queryStr.toString().trim()) //对字段做加权处理
//.setParam("q.op", "AND") //貌似没有作用
//.setParam("q.op", ifAnd?"AND":"OR") //貌似没有作用
//查询解析器的概念就是提供了一系列查询的参数,一旦我们在查询url中设置了相应的参数,那么查询解析器将会解析查询信息
.addField("*, score") // 使得solr查询得到每一条查询的分数,需要使用edismax解析器
.setParam("defType", "edismax"); // 查询handler的类型,即查询解析器,如果要使用权重排序,至少要用dismax解析器,edismax还可以使用bf,即函数
try {
//获得总数量
QueryResponse respTotal = solrServer.query(query);
total = respTotal.getResults().getNumFound();
Page<HashMap<String,Object>> pageObj = new Page<HashMap<String,Object>>();
pageObj.setTotalRows(total);
pageObj.setCurPage(pageNo);
pageObj.setPageSize(pageSize);
query.setStart(Long.valueOf(pageObj.getBeginIndex()).intValue()-1); //起始位置
query.setRows(pageSize);//查询多少条
QueryResponse resp = solrServer.query(query);
SolrDocumentList docList = resp.getResults();
for(SolrDocument doc : docList) {
returnList.add(doc.getFieldValueMap());
}
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "queryData" + LOG_SEPARATOR + "检索数据出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} finally {
query.clear();
}
PaginationVo vo = new PaginationVo();
vo.setPage(pageNo);
vo.setTotal(Long.valueOf(total).intValue());
vo.setRows(returnList);
return vo;
}
/**
* 分组查询
* @param keyword
* @param fields
* @param facetFields
* @return
*/
public static void queryGroup(String keyword, String[] fields, String[] facetFields) {
StringBuffer queryStr = new StringBuffer();
if(fields != null && fields.length > 0) {
for(String field : fields) {
queryStr.append(field).append(" ");
}
} else {
queryStr.append("*");
}
SolrQuery query = new SolrQuery(keyword);
query.setParam("qf", queryStr.toString().trim())
.setParam("defType", "edismax") //qf方式查询,需要用edismax解析器
.setIncludeScore(false) //是否按每组数量高低排序
.setFacet(true) //启用分组
.setRows(0) //设置返回结果条数,如果你时分组查询,你就设置为0
.setFacetMinCount(1) //只显示大于等于1的记录
.setFacetLimit(5) //限制每次返回结果数
.addFilterQuery("proPrice:[500 TO 1000]");
for(String facetField : facetFields) {
query.addFacetField(facetField.trim());//对哪个字段做分组统计
}
try {
QueryResponse resp = solrServer.query(query);
List<FacetField> facets = resp.getFacetFields();
for(FacetField ff : facets) {
System.out.println(ff.getName() + ":" + ff.getValueCount());
List<Count> list = ff.getValues();
for(Count c : list) {
System.out.println(c.getName() + ":" + c.getCount());
}
}
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "queryData" + LOG_SEPARATOR + "检索数据出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} finally {
query.clear();
}
}
/**
* 硬提交,写到硬盘
* 在代码中,一般不显示调用,可以在配置文件中配置每隔多少时间自动调用一次
*/
public static void commit() {
try {
solrServer.commit();
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "commit" + LOG_SEPARATOR + "硬提交出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "commit" + LOG_SEPARATOR + "硬提交出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
}
/**
* 软提交,写到内存
* 在代码中,一般不显示调用,可以在配置文件中配置每隔多少时间自动调用一次
*/
public static void softCommit() {
try {
solrServer.commit(true, true, true);
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "softCommit" + LOG_SEPARATOR + "软提交出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "softCommit" + LOG_SEPARATOR + "软提交出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
}
/**
* 索引段优化
* 将磁盘上的多个索引段合成一个大的索引段
*/
public static void optimize() {
try {
solrServer.optimize();
} catch (SolrServerException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "optimize" + LOG_SEPARATOR + "索引段优化出现异常。。。" + LOG_SEPARATOR + e.getMessage());
} catch (IOException e) {
e.printStackTrace();
logger.error(SolrUtil.class.getName() + LOG_SEPARATOR + "optimize" + LOG_SEPARATOR + "索引段优化出现异常。。。" + LOG_SEPARATOR + e.getMessage());
}
}
public static void main(String[] args) {
//SolrUtil.delAll();
// SolrUtil.init();
// SolrUtil.softCommit();
//SolrUtil.optimize();
//测试一
/*Map<String, Object> map = new HashMap<String, Object>();
map.put(SolrUtil.QUERY_KEYWORD, "罗湖");
PaginationVo vo = SolrUtil.query(1, 20, map);*/
//测试二 c_all:罗湖 native_addr:哈哈 residence_addr:南山
Map<String, Object> map = new HashMap<String, Object>();
map.put("c_all", "罗湖");
map.put("native_addr", "哈哈");
map.put("residence_addr", "南山");
PaginationVo vo = SolrUtil.query(1, 5, map, false);
//测试三,设置字段权重
// PaginationVo vo = SolrUtil.query(1, 5, "罗湖", false, new String[]{"c_all", "native_addr", "residence_addr", "nationality_id"});
List<Map<String, Object>> list = vo.getRows();
for(Map<String, Object> mapTemp : list) {
Set<String> set = mapTemp.keySet();
for(String key : set) {
System.out.print(key + ":" + mapTemp.get(key) + "\t");
}
System.out.println();
}
//SolrUtil.queryGroup("*", null, new String[]{"proCategory"});
}
/**
* 构造分组查询数据
*/
public static void init() {
//主键 商品名称(proName) 商品分类(proCategory) 商品价格(proPrice) 入库时间(executeTime)
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
Map<String, Object> map1 = new HashMap<String, Object>();
map1.put("id", 1);
map1.put("proName", "苹果手机");
map1.put("proCategory", "手机");
map1.put("proPrice", 5555.9);
list.add(map1);
Map<String, Object> map2 = new HashMap<String, Object>();
map2.put("id", 2);
map2.put("proName", "小米手机");
map2.put("proCategory", "手机");
map2.put("proPrice", 1500);
list.add(map2);
Map<String, Object> map3 = new HashMap<String, Object>();
map3.put("id", 3);
map3.put("proName", "华硕电脑");
map3.put("proCategory", "电脑");
map3.put("proPrice", 3000);
list.add(map3);
Map<String, Object> map4 = new HashMap<String, Object>();
map4.put("id", 4);
map4.put("proName", "戴尔电脑");
map4.put("proCategory", "电脑");
map4.put("proPrice", 6000);
list.add(map4);
Map<String, Object> map5 = new HashMap<String, Object>();
map5.put("id", 5);
map5.put("proName", "花花公子");
map5.put("proCategory", "服装");
map5.put("proPrice", 500);
list.add(map5);
SolrUtil.addList(list);
}
}