SphinxClient.java 45 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539
  1. /*
  2. * $Id$
  3. *
  4. * Java version of Sphinx searchd client (Java API)
  5. *
  6. * Copyright (c) 2007, Vladimir Fedorkov
  7. * Copyright (c) 2007-2016, Andrew Aksyonoff
  8. * Copyright (c) 2008-2016, Sphinx Technologies Inc
  9. * All rights reserved
  10. *
  11. * This program is free software; you can redistribute it and/or modify
  12. * it under the terms of the GNU Library General Public License. You should
  13. * have received a copy of the LGPL license along with this program; if you
  14. * did not, you can find it at http://www.gnu.org/
  15. *
  16. * WARNING!!!
  17. *
  18. * As of 2015, we strongly recommend to use either SphinxQL or REST APIs
  19. * rather than the native SphinxAPI.
  20. *
  21. * While both the native SphinxAPI protocol and the existing APIs will
  22. * continue to exist, and perhaps should not even break (too much), exposing
  23. * all the new features via multiple different native API implementations
  24. * is too much of a support complication for us.
  25. *
  26. * That said, you're welcome to overtake the maintenance of any given
  27. * official API, and remove this warning ;)
  28. *
  29. */
  30. package org.sphx.api;
  31. import java.io.*;
  32. import java.net.*;
  33. import java.util.*;
  34. import java.net.SocketAddress.*;
  35. /** Sphinx client class */
  36. public class SphinxClient
  37. {
  38. /* matching modes */
  39. public final static int SPH_MATCH_ALL = 0;
  40. public final static int SPH_MATCH_ANY = 1;
  41. public final static int SPH_MATCH_PHRASE = 2;
  42. public final static int SPH_MATCH_BOOLEAN = 3;
  43. public final static int SPH_MATCH_EXTENDED = 4;
  44. public final static int SPH_MATCH_FULLSCAN = 5;
  45. public final static int SPH_MATCH_EXTENDED2 = 6;
  46. /* ranking modes (extended2 only) */
  47. public final static int SPH_RANK_PROXIMITY_BM25 = 0;
  48. public final static int SPH_RANK_BM25 = 1;
  49. public final static int SPH_RANK_NONE = 2;
  50. public final static int SPH_RANK_WORDCOUNT = 3;
  51. public final static int SPH_RANK_PROXIMITY = 4;
  52. public final static int SPH_RANK_MATCHANY = 5;
  53. public final static int SPH_RANK_FIELDMASK = 6;
  54. public final static int SPH_RANK_SPH04 = 7;
  55. public final static int SPH_RANK_EXPR = 8;
  56. public final static int SPH_RANK_TOTAL = 9;
  57. /* sorting modes */
  58. public final static int SPH_SORT_RELEVANCE = 0;
  59. public final static int SPH_SORT_ATTR_DESC = 1;
  60. public final static int SPH_SORT_ATTR_ASC = 2;
  61. public final static int SPH_SORT_TIME_SEGMENTS = 3;
  62. public final static int SPH_SORT_EXTENDED = 4;
  63. public final static int SPH_SORT_EXPR = 5;
  64. /* grouping functions */
  65. public final static int SPH_GROUPBY_DAY = 0;
  66. public final static int SPH_GROUPBY_WEEK = 1;
  67. public final static int SPH_GROUPBY_MONTH = 2;
  68. public final static int SPH_GROUPBY_YEAR = 3;
  69. public final static int SPH_GROUPBY_ATTR = 4;
  70. public final static int SPH_GROUPBY_ATTRPAIR = 5;
  71. /* searchd reply status codes */
  72. public final static int SEARCHD_OK = 0;
  73. public final static int SEARCHD_ERROR = 1;
  74. public final static int SEARCHD_RETRY = 2;
  75. public final static int SEARCHD_WARNING = 3;
  76. /* attribute types */
  77. public final static int SPH_ATTR_INTEGER = 1;
  78. public final static int SPH_ATTR_TIMESTAMP = 2;
  79. public final static int SPH_ATTR_ORDINAL = 3;
  80. public final static int SPH_ATTR_BOOL = 4;
  81. public final static int SPH_ATTR_FLOAT = 5;
  82. public final static int SPH_ATTR_BIGINT = 6;
  83. public final static int SPH_ATTR_STRING = 7;
  84. public final static int SPH_ATTR_MULTI = 0x40000001;
  85. public final static int SPH_ATTR_MULTI64 = 0x40000002;
  86. /* searchd commands */
  87. private final static int SEARCHD_COMMAND_SEARCH = 0;
  88. private final static int SEARCHD_COMMAND_EXCERPT = 1;
  89. private final static int SEARCHD_COMMAND_UPDATE = 2;
  90. private final static int SEARCHD_COMMAND_KEYWORDS = 3;
  91. private final static int SEARCHD_COMMAND_PERSIST = 4;
  92. private final static int SEARCHD_COMMAND_FLUSHATTRS = 7;
  93. /* searchd command versions */
  94. private final static int VER_MAJOR_PROTO = 0x1;
  95. private final static int VER_COMMAND_SEARCH = 0x119;
  96. private final static int VER_COMMAND_EXCERPT = 0x102;
  97. private final static int VER_COMMAND_UPDATE = 0x103;
  98. private final static int VER_COMMAND_KEYWORDS = 0x100;
  99. private final static int VER_COMMAND_FLUSHATTRS = 0x100;
  100. /* filter types */
  101. private final static int SPH_FILTER_VALUES = 0;
  102. private final static int SPH_FILTER_RANGE = 1;
  103. private final static int SPH_FILTER_FLOATRANGE = 2;
  104. private String _host;
  105. private int _port;
  106. private String _path;
  107. private Socket _socket;
  108. private int _offset;
  109. private int _limit;
  110. private int _mode;
  111. private int[] _weights;
  112. private int _sort;
  113. private String _sortby;
  114. private int _minId;
  115. private int _maxId;
  116. private ByteArrayOutputStream _rawFilters;
  117. private DataOutputStream _filters;
  118. private int _filterCount;
  119. private String _groupBy;
  120. private int _groupFunc;
  121. private String _groupSort;
  122. private String _groupDistinct;
  123. private int _maxMatches;
  124. private int _cutoff;
  125. private int _retrycount;
  126. private int _retrydelay;
  127. private String _latitudeAttr;
  128. private String _longitudeAttr;
  129. private float _latitude;
  130. private float _longitude;
  131. private String _error;
  132. private String _warning;
  133. private boolean _connerror;
  134. private int _timeout;
  135. private ArrayList _reqs;
  136. private Map _indexWeights;
  137. private int _ranker;
  138. private String _rankexpr;
  139. private int _maxQueryTime;
  140. private Map _fieldWeights;
  141. private Map _overrideTypes;
  142. private Map _overrideValues;
  143. private String _select;
  144. /** Creates a new SphinxClient instance. */
  145. public SphinxClient()
  146. {
  147. this("localhost", 9312);
  148. }
  149. /** Creates a new SphinxClient instance, with host:port specification. */
  150. public SphinxClient(String host, int port)
  151. {
  152. _host = host;
  153. _port = port;
  154. _path = null;
  155. _socket = null;
  156. _offset = 0;
  157. _limit = 20;
  158. _mode = SPH_MATCH_EXTENDED2;
  159. _sort = SPH_SORT_RELEVANCE;
  160. _sortby = "";
  161. _minId = 0;
  162. _maxId = 0;
  163. _filterCount = 0;
  164. _rawFilters = new ByteArrayOutputStream();
  165. _filters = new DataOutputStream(_rawFilters);
  166. _groupBy = "";
  167. _groupFunc = SPH_GROUPBY_DAY;
  168. _groupSort = "@group desc";
  169. _groupDistinct = "";
  170. _maxMatches = 1000;
  171. _cutoff = 0;
  172. _retrycount = 0;
  173. _retrydelay = 0;
  174. _latitudeAttr = null;
  175. _longitudeAttr = null;
  176. _latitude = 0;
  177. _longitude = 0;
  178. _error = "";
  179. _warning = "";
  180. _connerror = false;
  181. _timeout = 1000;
  182. _reqs = new ArrayList();
  183. _weights = null;
  184. _indexWeights = new LinkedHashMap();
  185. _fieldWeights = new LinkedHashMap();
  186. _ranker = SPH_RANK_PROXIMITY_BM25;
  187. _rankexpr = "";
  188. _overrideTypes = new LinkedHashMap();
  189. _overrideValues = new LinkedHashMap();
  190. _select = "*";
  191. }
  192. /** Get last error message, if any. */
  193. public String GetLastError()
  194. {
  195. return _error;
  196. }
  197. /** Get last warning message, if any. */
  198. public String GetLastWarning()
  199. {
  200. return _warning;
  201. }
  202. /** Get last error flag (to tell network connection errors from searchd errors or broken responses). */
  203. public boolean IsConnectError()
  204. {
  205. return _connerror;
  206. }
  207. /** Set searchd host and port to connect to. */
  208. public void SetServer(String host, int port) throws SphinxException
  209. {
  210. myAssert ( host!=null && host.length()>0, "host name must not be empty" );
  211. myAssert ( port>0 && port<65536, "port must be in 1..65535 range" );
  212. _host = host;
  213. _port = port;
  214. }
  215. /** Set server connection timeout (0 to remove), in milliseconds. */
  216. public void SetConnectTimeout ( int timeout )
  217. {
  218. _timeout = Math.max ( timeout, 0 );
  219. }
  220. /** Internal method. Sanity check. */
  221. private void myAssert ( boolean condition, String err ) throws SphinxException
  222. {
  223. if ( !condition )
  224. {
  225. _error = err;
  226. throw new SphinxException ( err );
  227. }
  228. }
  229. /** Internal method. String IO helper. */
  230. private static void writeNetUTF8 ( DataOutputStream ostream, String str ) throws IOException
  231. {
  232. if ( str==null )
  233. {
  234. ostream.writeInt ( 0 );
  235. return;
  236. }
  237. byte[] sBytes = str.getBytes ( "UTF-8" );
  238. int iLen = sBytes.length;
  239. ostream.writeInt ( iLen );
  240. ostream.write ( sBytes );
  241. }
  242. /** Internal method. String IO helper. */
  243. private static String readNetUTF8(DataInputStream istream) throws IOException
  244. {
  245. int iLen = istream.readInt();
  246. byte[] sBytes = new byte [ iLen ];
  247. istream.readFully ( sBytes );
  248. return new String ( sBytes, "UTF-8");
  249. }
  250. /** Internal method. Unsigned int IO helper. */
  251. private static long readDword ( DataInputStream istream ) throws IOException
  252. {
  253. long v = (long) istream.readInt ();
  254. if ( v<0 )
  255. v += 4294967296L;
  256. return v;
  257. }
  258. /** Internal method. Connect to searchd and exchange versions. */
  259. private Socket _Connect()
  260. {
  261. if ( _socket!=null )
  262. return _socket;
  263. _connerror = false;
  264. Socket sock = null;
  265. try
  266. {
  267. sock = new Socket ();
  268. sock.setSoTimeout ( _timeout );
  269. InetSocketAddress addr = new InetSocketAddress ( _host, _port );
  270. sock.connect ( addr, _timeout );
  271. DataInputStream sIn = new DataInputStream ( sock.getInputStream() );
  272. int version = sIn.readInt();
  273. if ( version<1 )
  274. {
  275. sock.close ();
  276. _error = "expected searchd protocol version 1+, got version " + version;
  277. return null;
  278. }
  279. DataOutputStream sOut = new DataOutputStream ( sock.getOutputStream() );
  280. sOut.writeInt ( VER_MAJOR_PROTO );
  281. } catch ( IOException e )
  282. {
  283. _error = "connection to " + _host + ":" + _port + " failed: " + e;
  284. _connerror = true;
  285. try
  286. {
  287. if ( sock!=null )
  288. sock.close ();
  289. } catch ( IOException e1 ) {}
  290. return null;
  291. }
  292. return sock;
  293. }
  294. /** Internal method. Get and check response packet from searchd. */
  295. private byte[] _GetResponse ( Socket sock )
  296. {
  297. /* connect */
  298. DataInputStream sIn = null;
  299. InputStream SockInput = null;
  300. try
  301. {
  302. SockInput = sock.getInputStream();
  303. sIn = new DataInputStream ( SockInput );
  304. } catch ( IOException e )
  305. {
  306. _error = "getInputStream() failed: " + e;
  307. return null;
  308. }
  309. /* read response */
  310. byte[] response = null;
  311. short status = 0, ver = 0;
  312. int len = 0;
  313. try
  314. {
  315. /* read status fields */
  316. status = sIn.readShort();
  317. ver = sIn.readShort();
  318. len = sIn.readInt();
  319. /* read response if non-empty */
  320. if ( len<=0 )
  321. {
  322. _error = "invalid response packet size (len=" + len + ")";
  323. return null;
  324. }
  325. response = new byte[len];
  326. sIn.readFully ( response, 0, len );
  327. /* check status */
  328. if ( status==SEARCHD_WARNING )
  329. {
  330. DataInputStream in = new DataInputStream ( new ByteArrayInputStream ( response ) );
  331. int iWarnLen = in.readInt ();
  332. _warning = new String ( response, 4, iWarnLen );
  333. System.arraycopy ( response, 4+iWarnLen, response, 0, response.length-4-iWarnLen );
  334. } else if ( status==SEARCHD_ERROR )
  335. {
  336. _error = "searchd error: " + new String ( response, 4, response.length-4 );
  337. return null;
  338. } else if ( status==SEARCHD_RETRY )
  339. {
  340. _error = "temporary searchd error: " + new String ( response, 4, response.length-4 );
  341. return null;
  342. } else if ( status!=SEARCHD_OK )
  343. {
  344. _error = "searched returned unknown status, code=" + status;
  345. return null;
  346. }
  347. } catch ( IOException e )
  348. {
  349. if ( len!=0 )
  350. {
  351. /* get trace, to provide even more failure details */
  352. PrintWriter ew = new PrintWriter ( new StringWriter() );
  353. e.printStackTrace ( ew );
  354. ew.flush ();
  355. ew.close ();
  356. String sTrace = ew.toString ();
  357. /* build error message */
  358. _error = "failed to read searchd response (status=" + status + ", ver=" + ver + ", len=" + len + ", trace=" + sTrace +")";
  359. } else
  360. {
  361. _error = "received zero-sized searchd response (searchd crashed?): " + e.getMessage();
  362. }
  363. return null;
  364. } finally
  365. {
  366. if ( _socket==null )
  367. {
  368. try
  369. {
  370. if ( sIn!=null )
  371. sIn.close();
  372. if ( sock!=null && !sock.isConnected() )
  373. sock.close();
  374. } catch ( IOException e )
  375. {
  376. /* silently ignore close failures; nothing could be done anyway */
  377. }
  378. }
  379. }
  380. return response;
  381. }
  382. /** Internal method. Connect to searchd, send request, get response as DataInputStream. */
  383. private DataInputStream _DoRequest ( int command, int version, ByteArrayOutputStream req )
  384. {
  385. /* connect */
  386. Socket sock = _Connect();
  387. if ( sock==null )
  388. return null;
  389. /* send request */
  390. byte[] reqBytes = req.toByteArray();
  391. try
  392. {
  393. DataOutputStream sockDS = new DataOutputStream ( sock.getOutputStream() );
  394. sockDS.writeShort ( command );
  395. sockDS.writeShort ( version );
  396. sockDS.writeInt ( reqBytes.length );
  397. sockDS.write ( reqBytes );
  398. } catch ( Exception e )
  399. {
  400. _error = "network error: " + e;
  401. _connerror = true;
  402. return null;
  403. }
  404. /* get response */
  405. byte[] response = _GetResponse ( sock );
  406. if ( response==null )
  407. return null;
  408. /* spawn that tampon */
  409. return new DataInputStream ( new ByteArrayInputStream ( response ) );
  410. }
  411. /** Set matches offset and limit to return to client, max matches to retrieve on server, and cutoff. */
  412. public void SetLimits ( int offset, int limit, int max, int cutoff ) throws SphinxException
  413. {
  414. myAssert ( offset>=0, "offset must not be negative" );
  415. myAssert ( limit>0, "limit must be positive" );
  416. myAssert ( max>0, "max must be positive" );
  417. myAssert ( cutoff>=0, "cutoff must not be negative" );
  418. _offset = offset;
  419. _limit = limit;
  420. _maxMatches = max;
  421. _cutoff = cutoff;
  422. }
  423. /** Set matches offset and limit to return to client, and max matches to retrieve on server. */
  424. public void SetLimits ( int offset, int limit, int max ) throws SphinxException
  425. {
  426. SetLimits ( offset, limit, max, _cutoff );
  427. }
  428. /** Set matches offset and limit to return to client. */
  429. public void SetLimits ( int offset, int limit) throws SphinxException
  430. {
  431. SetLimits ( offset, limit, _maxMatches, _cutoff );
  432. }
  433. /** Set maximum query time, in milliseconds, per-index, 0 means "do not limit". */
  434. public void SetMaxQueryTime ( int maxTime ) throws SphinxException
  435. {
  436. myAssert ( maxTime>=0, "max_query_time must not be negative" );
  437. _maxQueryTime = maxTime;
  438. }
  439. /** Set matching mode. DEPRECATED */
  440. public void SetMatchMode(int mode) throws SphinxException
  441. {
  442. //System.out.println ( "DEPRECATED: Do not call this method or, even better, use SphinxQL instead of an API\n" );
  443. myAssert (
  444. mode==SPH_MATCH_ALL ||
  445. mode==SPH_MATCH_ANY ||
  446. mode==SPH_MATCH_PHRASE ||
  447. mode==SPH_MATCH_BOOLEAN ||
  448. mode==SPH_MATCH_EXTENDED ||
  449. mode==SPH_MATCH_FULLSCAN ||
  450. mode==SPH_MATCH_EXTENDED2, "unknown mode value; use one of the SPH_MATCH_xxx constants" );
  451. _mode = mode;
  452. }
  453. /** Set ranking mode. */
  454. public void SetRankingMode ( int ranker, String rankexpr ) throws SphinxException
  455. {
  456. myAssert ( ranker>=0 && ranker<SPH_RANK_TOTAL, "unknown ranker value; use one of the SPH_RANK_xxx constants" );
  457. _rankexpr = ( rankexpr==null ) ? "" : rankexpr;
  458. _ranker = ranker;
  459. }
  460. /** Set sorting mode. */
  461. public void SetSortMode ( int mode, String sortby ) throws SphinxException
  462. {
  463. myAssert (
  464. mode==SPH_SORT_RELEVANCE ||
  465. mode==SPH_SORT_ATTR_DESC ||
  466. mode==SPH_SORT_ATTR_ASC ||
  467. mode==SPH_SORT_TIME_SEGMENTS ||
  468. mode==SPH_SORT_EXTENDED ||
  469. mode==SPH_SORT_EXPR, "unknown mode value; use one of the available SPH_SORT_xxx constants" );
  470. myAssert ( mode==SPH_SORT_RELEVANCE || ( sortby!=null && sortby.length()>0 ), "sortby string must not be empty in selected mode" );
  471. _sort = mode;
  472. _sortby = ( sortby==null ) ? "" : sortby;
  473. }
  474. /** Set per-field weights (all values must be positive). WARNING: DEPRECATED, use SetFieldWeights() instead. */
  475. public void SetWeights(int[] weights) throws SphinxException
  476. {
  477. myAssert ( weights!=null, "weights must not be null" );
  478. for (int i = 0; i < weights.length; i++) {
  479. int weight = weights[i];
  480. myAssert ( weight>0, "all weights must be greater than 0" );
  481. }
  482. _weights = weights;
  483. }
  484. /**
  485. * Bind per-field weights by field name.
  486. * @param fieldWeights hash which maps String index names to Integer weights
  487. */
  488. public void SetFieldWeights ( Map fieldWeights ) throws SphinxException
  489. {
  490. /* FIXME! implement checks here */
  491. _fieldWeights = ( fieldWeights==null ) ? new LinkedHashMap () : fieldWeights;
  492. }
  493. /**
  494. * Bind per-index weights by index name (and enable summing the weights on duplicate matches, instead of replacing them).
  495. * @param indexWeights hash which maps String index names to Integer weights
  496. */
  497. public void SetIndexWeights ( Map indexWeights ) throws SphinxException
  498. {
  499. /* FIXME! implement checks here */
  500. _indexWeights = ( indexWeights==null ) ? new LinkedHashMap () : indexWeights;
  501. }
  502. /** Set document IDs range to match. */
  503. public void SetIDRange ( int min, int max ) throws SphinxException
  504. {
  505. myAssert ( min<=max, "min must be less or equal to max" );
  506. _minId = min;
  507. _maxId = max;
  508. }
  509. /** Set values filter. Only match records where attribute value is in given set. */
  510. public void SetFilter ( String attribute, int[] values, boolean exclude ) throws SphinxException
  511. {
  512. myAssert ( values!=null && values.length>0, "values array must not be null or empty" );
  513. myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" );
  514. try
  515. {
  516. writeNetUTF8 ( _filters, attribute );
  517. _filters.writeInt ( SPH_FILTER_VALUES );
  518. _filters.writeInt ( values.length );
  519. for ( int i=0; i<values.length; i++ )
  520. _filters.writeLong ( values[i] );
  521. _filters.writeInt ( exclude ? 1 : 0 );
  522. } catch ( Exception e )
  523. {
  524. myAssert ( false, "IOException: " + e.getMessage() );
  525. }
  526. _filterCount++;
  527. }
  528. /** Set values filter. Only match records where attribute value is in given set. */
  529. public void SetFilter ( String attribute, long[] values, boolean exclude ) throws SphinxException
  530. {
  531. myAssert ( values!=null && values.length>0, "values array must not be null or empty" );
  532. myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" );
  533. try
  534. {
  535. writeNetUTF8 ( _filters, attribute );
  536. _filters.writeInt ( SPH_FILTER_VALUES );
  537. _filters.writeInt ( values.length );
  538. for ( int i=0; i<values.length; i++ )
  539. _filters.writeLong ( values[i] );
  540. _filters.writeInt ( exclude ? 1 : 0 );
  541. } catch ( Exception e )
  542. {
  543. myAssert ( false, "IOException: " + e.getMessage() );
  544. }
  545. _filterCount++;
  546. }
  547. /** Set values filter with a single value (syntax sugar; see {@link #SetFilter(String,int[],boolean)}). */
  548. public void SetFilter ( String attribute, int value, boolean exclude ) throws SphinxException
  549. {
  550. long[] values = new long[] { value };
  551. SetFilter ( attribute, values, exclude );
  552. }
  553. /** Set values filter with a single value (syntax sugar; see {@link #SetFilter(String,int[],boolean)}). */
  554. public void SetFilter ( String attribute, long value, boolean exclude ) throws SphinxException
  555. {
  556. long[] values = new long[] { value };
  557. SetFilter ( attribute, values, exclude );
  558. }
  559. /** Set integer range filter. Only match records if attribute value is beetwen min and max (inclusive). */
  560. public void SetFilterRange ( String attribute, long min, long max, boolean exclude ) throws SphinxException
  561. {
  562. myAssert ( min<=max, "min must be less or equal to max" );
  563. try
  564. {
  565. writeNetUTF8 ( _filters, attribute );
  566. _filters.writeInt ( SPH_FILTER_RANGE );
  567. _filters.writeLong ( min );
  568. _filters.writeLong ( max );
  569. _filters.writeInt ( exclude ? 1 : 0 );
  570. } catch ( Exception e )
  571. {
  572. myAssert ( false, "IOException: " + e.getMessage() );
  573. }
  574. _filterCount++;
  575. }
  576. /** Set integer range filter. Only match records if attribute value is beetwen min and max (inclusive). */
  577. public void SetFilterRange ( String attribute, int min, int max, boolean exclude ) throws SphinxException
  578. {
  579. SetFilterRange ( attribute, (long)min, (long)max, exclude );
  580. }
  581. /** Set float range filter. Only match records if attribute value is beetwen min and max (inclusive). */
  582. public void SetFilterFloatRange ( String attribute, float min, float max, boolean exclude ) throws SphinxException
  583. {
  584. myAssert ( min<=max, "min must be less or equal to max" );
  585. try
  586. {
  587. writeNetUTF8 ( _filters, attribute );
  588. _filters.writeInt ( SPH_FILTER_FLOATRANGE );
  589. _filters.writeFloat ( min );
  590. _filters.writeFloat ( max );
  591. _filters.writeInt ( exclude ? 1 : 0 );
  592. } catch ( Exception e )
  593. {
  594. myAssert ( false, "IOException: " + e.getMessage() );
  595. }
  596. _filterCount++;
  597. }
  598. /** Setup geographical anchor point. Required to use @geodist in filters and sorting; distance will be computed to this point. */
  599. public void SetGeoAnchor ( String latitudeAttr, String longitudeAttr, float latitude, float longitude ) throws SphinxException
  600. {
  601. myAssert ( latitudeAttr!=null && latitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
  602. myAssert ( longitudeAttr!=null && longitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
  603. _latitudeAttr = latitudeAttr;
  604. _longitudeAttr = longitudeAttr;
  605. _latitude = latitude;
  606. _longitude = longitude;
  607. }
  608. /** Set grouping attribute and function. */
  609. public void SetGroupBy ( String attribute, int func, String groupsort ) throws SphinxException
  610. {
  611. myAssert (
  612. func==SPH_GROUPBY_DAY ||
  613. func==SPH_GROUPBY_WEEK ||
  614. func==SPH_GROUPBY_MONTH ||
  615. func==SPH_GROUPBY_YEAR ||
  616. func==SPH_GROUPBY_ATTR ||
  617. func==SPH_GROUPBY_ATTRPAIR, "unknown func value; use one of the available SPH_GROUPBY_xxx constants" );
  618. _groupBy = attribute;
  619. _groupFunc = func;
  620. _groupSort = groupsort;
  621. }
  622. /** Set grouping attribute and function with default ("@group desc") groupsort (syntax sugar). */
  623. public void SetGroupBy(String attribute, int func) throws SphinxException
  624. {
  625. SetGroupBy(attribute, func, "@group desc");
  626. }
  627. /** Set count-distinct attribute for group-by queries. */
  628. public void SetGroupDistinct(String attribute)
  629. {
  630. _groupDistinct = attribute;
  631. }
  632. /** Set distributed retries count and delay. */
  633. public void SetRetries ( int count, int delay ) throws SphinxException
  634. {
  635. myAssert ( count>=0, "count must not be negative" );
  636. myAssert ( delay>=0, "delay must not be negative" );
  637. _retrycount = count;
  638. _retrydelay = delay;
  639. }
  640. /** Set distributed retries count with default (zero) delay (syntax sugar). */
  641. public void SetRetries ( int count ) throws SphinxException
  642. {
  643. SetRetries ( count, 0 );
  644. }
  645. /**
  646. * DEPRECATED: Set attribute values override (one override list per attribute).
  647. * @param values maps Long document IDs to Int/Long/Float values (as specified in attrtype).
  648. */
  649. public void SetOverride ( String attrname, int attrtype, Map values ) throws SphinxException
  650. {
  651. //System.out.println ( "DEPRECATED: Do not call this method. Use SphinxQL REMAP() function instead.\n" );
  652. myAssert ( attrname!=null && attrname.length()>0, "attrname must not be empty" );
  653. myAssert ( attrtype==SPH_ATTR_INTEGER || attrtype==SPH_ATTR_TIMESTAMP || attrtype==SPH_ATTR_BOOL || attrtype==SPH_ATTR_FLOAT || attrtype==SPH_ATTR_BIGINT,
  654. "unsupported attrtype (must be one of INTEGER, TIMESTAMP, BOOL, FLOAT, or BIGINT)" );
  655. _overrideTypes.put ( attrname, new Integer ( attrtype ) );
  656. _overrideValues.put ( attrname, values );
  657. }
  658. /** Set select-list (attributes or expressions), SQL-like syntax. */
  659. public void SetSelect ( String select ) throws SphinxException
  660. {
  661. myAssert ( select!=null, "select clause string must not be null" );
  662. _select = select;
  663. }
  664. /** Reset all currently set filters (for multi-queries). */
  665. public void ResetFilters()
  666. {
  667. /* should we close them first? */
  668. _rawFilters = new ByteArrayOutputStream();
  669. _filters = new DataOutputStream(_rawFilters);
  670. _filterCount = 0;
  671. /* reset GEO anchor */
  672. _latitudeAttr = null;
  673. _longitudeAttr = null;
  674. _latitude = 0;
  675. _longitude = 0;
  676. }
  677. /** Clear groupby settings (for multi-queries). */
  678. public void ResetGroupBy ()
  679. {
  680. _groupBy = "";
  681. _groupFunc = SPH_GROUPBY_DAY;
  682. _groupSort = "@group desc";
  683. _groupDistinct = "";
  684. }
  685. /** Clear all attribute value overrides (for multi-queries). */
  686. public void ResetOverrides ()
  687. {
  688. _overrideTypes.clear ();
  689. _overrideValues.clear ();
  690. }
  691. /** Connect to searchd server and run current search query against all indexes (syntax sugar). */
  692. public SphinxResult Query ( String query ) throws SphinxException
  693. {
  694. return Query ( query, "*", "" );
  695. }
  696. /** Connect to searchd server and run current search query against all indexes (syntax sugar). */
  697. public SphinxResult Query ( String query, String index ) throws SphinxException
  698. {
  699. return Query ( query, index, "" );
  700. }
  701. /** Connect to searchd server and run current search query. */
  702. public SphinxResult Query ( String query, String index, String comment ) throws SphinxException
  703. {
  704. myAssert ( _reqs==null || _reqs.size()==0, "AddQuery() and Query() can not be combined; use RunQueries() instead" );
  705. AddQuery ( query, index, comment );
  706. SphinxResult[] results = RunQueries();
  707. _reqs = new ArrayList(); /* just in case it failed too early */
  708. if ( results==null || results.length<1 )
  709. return null; /* probably network error; error message should be already filled */
  710. SphinxResult res = results[0];
  711. _warning = res.warning;
  712. _error = res.error;
  713. if ( res==null || res.getStatus()==SEARCHD_ERROR )
  714. return null;
  715. return res;
  716. }
  717. /** Add new query with current settings to current search request. */
  718. public int AddQuery ( String query, String index, String comment ) throws SphinxException
  719. {
  720. ByteArrayOutputStream req = new ByteArrayOutputStream();
  721. /* build request */
  722. try {
  723. DataOutputStream out = new DataOutputStream(req);
  724. out.writeInt(_offset);
  725. out.writeInt(_limit);
  726. out.writeInt(_mode);
  727. out.writeInt(_ranker);
  728. if ( _ranker == SPH_RANK_EXPR ) {
  729. writeNetUTF8(out, _rankexpr);
  730. }
  731. out.writeInt(_sort);
  732. writeNetUTF8(out, _sortby);
  733. writeNetUTF8(out, query);
  734. int weightLen = _weights != null ? _weights.length : 0;
  735. out.writeInt(weightLen);
  736. if (_weights != null) {
  737. for (int i = 0; i < _weights.length; i++)
  738. out.writeInt(_weights[i]);
  739. }
  740. writeNetUTF8(out, index);
  741. out.writeInt(0);
  742. out.writeInt(_minId);
  743. out.writeInt(_maxId);
  744. /* filters */
  745. out.writeInt(_filterCount);
  746. out.write(_rawFilters.toByteArray());
  747. /* group-by, max matches, sort-by-group flag */
  748. out.writeInt(_groupFunc);
  749. writeNetUTF8(out, _groupBy);
  750. out.writeInt(_maxMatches);
  751. writeNetUTF8(out, _groupSort);
  752. out.writeInt(_cutoff);
  753. out.writeInt(_retrycount);
  754. out.writeInt(_retrydelay);
  755. writeNetUTF8(out, _groupDistinct);
  756. /* anchor point */
  757. if (_latitudeAttr == null || _latitudeAttr.length() == 0 || _longitudeAttr == null || _longitudeAttr.length() == 0) {
  758. out.writeInt(0);
  759. } else {
  760. out.writeInt(1);
  761. writeNetUTF8(out, _latitudeAttr);
  762. writeNetUTF8(out, _longitudeAttr);
  763. out.writeFloat(_latitude);
  764. out.writeFloat(_longitude);
  765. }
  766. /* per-index weights */
  767. out.writeInt(_indexWeights.size());
  768. for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext();) {
  769. String indexName = (String) e.next();
  770. Integer weight = (Integer) _indexWeights.get(indexName);
  771. writeNetUTF8(out, indexName);
  772. out.writeInt(weight.intValue());
  773. }
  774. /* max query time */
  775. out.writeInt ( _maxQueryTime );
  776. /* per-field weights */
  777. out.writeInt ( _fieldWeights.size() );
  778. for ( Iterator e=_fieldWeights.keySet().iterator(); e.hasNext(); )
  779. {
  780. String field = (String) e.next();
  781. Integer weight = (Integer) _fieldWeights.get ( field );
  782. writeNetUTF8 ( out, field );
  783. out.writeInt ( weight.intValue() );
  784. }
  785. /* comment */
  786. writeNetUTF8 ( out, comment );
  787. /* overrides */
  788. out.writeInt ( _overrideTypes.size() );
  789. for ( Iterator e=_overrideTypes.keySet().iterator(); e.hasNext(); )
  790. {
  791. String attr = (String) e.next();
  792. Integer type = (Integer) _overrideTypes.get ( attr );
  793. Map values = (Map) _overrideValues.get ( attr );
  794. writeNetUTF8 ( out, attr );
  795. out.writeInt ( type.intValue() );
  796. out.writeInt ( values.size() );
  797. for ( Iterator e2=values.keySet().iterator(); e2.hasNext(); )
  798. {
  799. Long id = (Long) e2.next ();
  800. out.writeLong ( id.longValue() );
  801. switch ( type.intValue() )
  802. {
  803. case SPH_ATTR_FLOAT: out.writeFloat ( ( (Float) values.get ( id ) ).floatValue() ); break;
  804. case SPH_ATTR_BIGINT: out.writeLong ( ( (Long)values.get ( id ) ).longValue() ); break;
  805. default: out.writeInt ( ( (Integer)values.get ( id ) ).intValue() ); break;
  806. }
  807. }
  808. }
  809. /* select-list */
  810. writeNetUTF8 ( out, _select );
  811. /* done! */
  812. out.flush ();
  813. int qIndex = _reqs.size();
  814. _reqs.add ( qIndex, req.toByteArray() );
  815. return qIndex;
  816. } catch ( Exception e )
  817. {
  818. myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() );
  819. } finally
  820. {
  821. try
  822. {
  823. _filters.close ();
  824. _rawFilters.close ();
  825. } catch ( IOException e )
  826. {
  827. myAssert ( false, "error in AddQuery(): " + e + ": " + e.getMessage() );
  828. }
  829. }
  830. return -1;
  831. }
  832. /** Run all previously added search queries. */
  833. public SphinxResult[] RunQueries() throws SphinxException
  834. {
  835. if ( _reqs==null || _reqs.size()<1 )
  836. {
  837. _error = "no queries defined, issue AddQuery() first";
  838. return null;
  839. }
  840. /* build the mega-request */
  841. int nreqs = _reqs.size();
  842. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  843. try
  844. {
  845. DataOutputStream req = new DataOutputStream ( reqBuf );
  846. /* its a client */
  847. req.writeInt(0);
  848. req.writeInt ( nreqs );
  849. for ( int i=0; i<nreqs; i++ )
  850. req.write ( (byte[]) _reqs.get(i) );
  851. req.flush ();
  852. } catch ( Exception e )
  853. {
  854. _error = "internal error: failed to build request: " + e;
  855. return null;
  856. }
  857. DataInputStream in =_DoRequest ( SEARCHD_COMMAND_SEARCH, VER_COMMAND_SEARCH, reqBuf );
  858. if ( in==null )
  859. return null;
  860. SphinxResult[] results = new SphinxResult [ nreqs ];
  861. _reqs = new ArrayList();
  862. try
  863. {
  864. for ( int ires=0; ires<nreqs; ires++ )
  865. {
  866. SphinxResult res = new SphinxResult();
  867. results[ires] = res;
  868. int status = in.readInt();
  869. res.setStatus ( status );
  870. if (status != SEARCHD_OK) {
  871. String message = readNetUTF8(in);
  872. if (status == SEARCHD_WARNING) {
  873. res.warning = message;
  874. } else {
  875. res.error = message;
  876. continue;
  877. }
  878. }
  879. /* read fields */
  880. int nfields = in.readInt();
  881. res.fields = new String[nfields];
  882. int pos = 0;
  883. for (int i = 0; i < nfields; i++)
  884. res.fields[i] = readNetUTF8(in);
  885. /* read arrts */
  886. int nattrs = in.readInt();
  887. res.attrTypes = new int[nattrs];
  888. res.attrNames = new String[nattrs];
  889. for (int i = 0; i < nattrs; i++) {
  890. String AttrName = readNetUTF8(in);
  891. int AttrType = in.readInt();
  892. res.attrNames[i] = AttrName;
  893. res.attrTypes[i] = AttrType;
  894. }
  895. /* read match count */
  896. int count = in.readInt();
  897. int id64 = in.readInt();
  898. res.matches = new SphinxMatch[count];
  899. for ( int matchesNo=0; matchesNo<count; matchesNo++ )
  900. {
  901. SphinxMatch docInfo;
  902. docInfo = new SphinxMatch (
  903. ( id64==0 ) ? readDword(in) : in.readLong(),
  904. in.readInt() );
  905. /* read matches */
  906. for (int attrNumber = 0; attrNumber < res.attrTypes.length; attrNumber++)
  907. {
  908. String attrName = res.attrNames[attrNumber];
  909. int type = res.attrTypes[attrNumber];
  910. /* handle bigints */
  911. if ( type==SPH_ATTR_BIGINT )
  912. {
  913. docInfo.attrValues.add ( attrNumber, new Long ( in.readLong() ) );
  914. continue;
  915. }
  916. /* handle floats */
  917. if ( type==SPH_ATTR_FLOAT )
  918. {
  919. docInfo.attrValues.add ( attrNumber, new Float ( in.readFloat() ) );
  920. continue;
  921. }
  922. /* handle strings */
  923. if ( type==SPH_ATTR_STRING )
  924. {
  925. String s = readNetUTF8(in);
  926. docInfo.attrValues.add ( attrNumber, s );
  927. continue;
  928. }
  929. /* handle everything else as unsigned ints */
  930. long val = readDword ( in );
  931. if ( type==SPH_ATTR_MULTI )
  932. {
  933. long[] vals = new long [ (int)val ];
  934. for ( int k=0; k<val; k++ )
  935. vals[k] = readDword ( in );
  936. docInfo.attrValues.add ( attrNumber, vals );
  937. } else if ( type==SPH_ATTR_MULTI64 )
  938. {
  939. val = val / 2;
  940. long[] vals = new long [ (int)val ];
  941. for ( int k=0; k<val; k++ )
  942. vals[k] = in.readLong ();
  943. docInfo.attrValues.add ( attrNumber, vals );
  944. } else
  945. {
  946. docInfo.attrValues.add ( attrNumber, new Long ( val ) );
  947. }
  948. }
  949. res.matches[matchesNo] = docInfo;
  950. }
  951. res.total = in.readInt();
  952. res.totalFound = in.readInt();
  953. res.time = in.readInt() / 1000.0f;
  954. res.words = new SphinxWordInfo [ in.readInt() ];
  955. for ( int i=0; i<res.words.length; i++ )
  956. res.words[i] = new SphinxWordInfo ( readNetUTF8(in), readDword(in), readDword(in) );
  957. }
  958. return results;
  959. } catch ( IOException e )
  960. {
  961. _error = "incomplete reply";
  962. return null;
  963. }
  964. }
  965. /**
  966. * Connect to searchd server and generate excerpts (snippets) from given documents.
  967. * @param opts maps String keys to String or Integer values (see the documentation for complete keys list).
  968. * @return null on failure, array of snippets on success.
  969. */
  970. public String[] BuildExcerpts ( String[] docs, String index, String words, Map opts ) throws SphinxException
  971. {
  972. myAssert(docs != null && docs.length > 0, "BuildExcerpts: Have no documents to process");
  973. myAssert(index != null && index.length() > 0, "BuildExcerpts: Have no index to process documents");
  974. myAssert(words != null && words.length() > 0, "BuildExcerpts: Have no words to highlight");
  975. if (opts == null) opts = new LinkedHashMap();
  976. /* fixup options */
  977. if (!opts.containsKey("before_match")) opts.put("before_match", "<b>");
  978. if (!opts.containsKey("after_match")) opts.put("after_match", "</b>");
  979. if (!opts.containsKey("chunk_separator")) opts.put("chunk_separator", "...");
  980. if (!opts.containsKey("html_strip_mode")) opts.put("html_strip_mode", "index");
  981. if (!opts.containsKey("limit")) opts.put("limit", new Integer(256));
  982. if (!opts.containsKey("limit_passages")) opts.put("limit_passages", new Integer(0));
  983. if (!opts.containsKey("limit_words")) opts.put("limit_words", new Integer(0));
  984. if (!opts.containsKey("around")) opts.put("around", new Integer(5));
  985. if (!opts.containsKey("start_passage_id")) opts.put("start_passage_id", new Integer(1));
  986. if (!opts.containsKey("exact_phrase")) opts.put("exact_phrase", new Integer(0));
  987. if (!opts.containsKey("single_passage")) opts.put("single_passage", new Integer(0));
  988. if (!opts.containsKey("use_boundaries")) opts.put("use_boundaries", new Integer(0));
  989. if (!opts.containsKey("weight_order")) opts.put("weight_order", new Integer(0));
  990. if (!opts.containsKey("load_files")) opts.put("load_files", new Integer(0));
  991. if (!opts.containsKey("allow_empty")) opts.put("allow_empty", new Integer(0));
  992. if (!opts.containsKey("query_mode")) opts.put("query_mode", new Integer(0));
  993. if (!opts.containsKey("force_all_words")) opts.put("force_all_words", new Integer(0));
  994. /* build request */
  995. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  996. DataOutputStream req = new DataOutputStream ( reqBuf );
  997. try
  998. {
  999. req.writeInt(0);
  1000. int iFlags = 1; /* remove_spaces */
  1001. if ( ((Integer)opts.get("exact_phrase")).intValue()!=0 ) iFlags |= 2;
  1002. if ( ((Integer)opts.get("single_passage")).intValue()!=0 ) iFlags |= 4;
  1003. if ( ((Integer)opts.get("use_boundaries")).intValue()!=0 ) iFlags |= 8;
  1004. if ( ((Integer)opts.get("weight_order")).intValue()!=0 ) iFlags |= 16;
  1005. if ( ((Integer)opts.get("query_mode")).intValue()!=0 ) iFlags |= 32;
  1006. if ( ((Integer)opts.get("force_all_words")).intValue()!=0 ) iFlags |= 64;
  1007. if ( ((Integer)opts.get("load_files")).intValue()!=0 ) iFlags |= 128;
  1008. if ( ((Integer)opts.get("allow_empty")).intValue()!=0 ) iFlags |= 256;
  1009. req.writeInt ( iFlags );
  1010. writeNetUTF8 ( req, index );
  1011. writeNetUTF8 ( req, words );
  1012. /* send options */
  1013. writeNetUTF8 ( req, (String) opts.get("before_match") );
  1014. writeNetUTF8 ( req, (String) opts.get("after_match") );
  1015. writeNetUTF8 ( req, (String) opts.get("chunk_separator") );
  1016. req.writeInt ( ((Integer) opts.get("limit")).intValue() );
  1017. req.writeInt ( ((Integer) opts.get("around")).intValue() );
  1018. req.writeInt ( ((Integer) opts.get("limit_passages")).intValue() );
  1019. req.writeInt ( ((Integer) opts.get("limit_words")).intValue() );
  1020. req.writeInt ( ((Integer) opts.get("start_passage_id")).intValue() );
  1021. writeNetUTF8 ( req, (String) opts.get("html_strip_mode") );
  1022. /* send documents */
  1023. req.writeInt ( docs.length );
  1024. for ( int i=0; i<docs.length; i++ )
  1025. writeNetUTF8 ( req, docs[i] );
  1026. req.flush();
  1027. } catch ( Exception e )
  1028. {
  1029. _error = "internal error: failed to build request: " + e;
  1030. return null;
  1031. }
  1032. DataInputStream in = _DoRequest ( SEARCHD_COMMAND_EXCERPT, VER_COMMAND_EXCERPT, reqBuf );
  1033. if ( in==null )
  1034. return null;
  1035. try
  1036. {
  1037. String[] res = new String [ docs.length ];
  1038. for ( int i=0; i<docs.length; i++ )
  1039. res[i] = readNetUTF8 ( in );
  1040. return res;
  1041. } catch ( Exception e )
  1042. {
  1043. _error = "incomplete reply";
  1044. return null;
  1045. }
  1046. }
  1047. /**
  1048. * Connect to searchd server and update given attributes on given documents in given indexes.
  1049. * Sample code that will set group_id=123 where id=1 and group_id=456 where id=3:
  1050. *
  1051. * <pre>
  1052. * String[] attrs = new String[1];
  1053. *
  1054. * attrs[0] = "group_id";
  1055. * long[][] values = new long[2][2];
  1056. *
  1057. * values[0] = new long[2]; values[0][0] = 1; values[0][1] = 123;
  1058. * values[1] = new long[2]; values[1][0] = 3; values[1][1] = 456;
  1059. *
  1060. * int res = cl.UpdateAttributes ( "test1", attrs, values );
  1061. * </pre>
  1062. *
  1063. * @param index index name(s) to update; might be distributed
  1064. * @param attrs array with the names of the attributes to update
  1065. * @param values array of updates; each long[] entry must contains document ID
  1066. * in the first element, and all new attribute values in the following ones
  1067. * @param ignorenonexistent the flag whether to silently ignore non existent columns up update request
  1068. * @return -1 on failure, amount of actually found and updated documents (might be 0) on success
  1069. *
  1070. * @throws SphinxException on invalid parameters
  1071. */
  1072. public int UpdateAttributes ( String index, String[] attrs, long[][] values, boolean ignorenonexistent ) throws SphinxException
  1073. {
  1074. /* check args */
  1075. myAssert ( index!=null && index.length()>0, "no index name provided" );
  1076. myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" );
  1077. myAssert ( values!=null && values.length>0, "no update entries provided" );
  1078. for ( int i=0; i<values.length; i++ )
  1079. {
  1080. myAssert ( values[i]!=null, "update entry #" + i + " is null" );
  1081. myAssert ( values[i].length==1+attrs.length, "update entry #" + i + " has wrong length" );
  1082. }
  1083. /* build and send request */
  1084. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  1085. DataOutputStream req = new DataOutputStream ( reqBuf );
  1086. try
  1087. {
  1088. writeNetUTF8 ( req, index );
  1089. req.writeInt ( attrs.length );
  1090. req.writeInt ( ignorenonexistent ? 1 : 0 );
  1091. for ( int i=0; i<attrs.length; i++ )
  1092. {
  1093. writeNetUTF8 ( req, attrs[i] );
  1094. req.writeInt ( 0 ); // not MVA attr
  1095. }
  1096. req.writeInt ( values.length );
  1097. for ( int i=0; i<values.length; i++ )
  1098. {
  1099. req.writeLong ( values[i][0] ); /* send docid as 64bit value */
  1100. for ( int j=1; j<values[i].length; j++ )
  1101. req.writeInt ( (int)values[i][j] ); /* send values as 32bit values; FIXME! what happens when they are over 2^31? */
  1102. }
  1103. req.flush();
  1104. } catch ( Exception e )
  1105. {
  1106. _error = "internal error: failed to build request: " + e;
  1107. return -1;
  1108. }
  1109. /* get and parse response */
  1110. DataInputStream in = _DoRequest ( SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, reqBuf );
  1111. if ( in==null )
  1112. return -1;
  1113. try
  1114. {
  1115. return in.readInt ();
  1116. } catch ( Exception e )
  1117. {
  1118. _error = "incomplete reply";
  1119. return -1;
  1120. }
  1121. }
  1122. /**
  1123. * Connect to searchd server and update given MVA attributes on given document in given indexes.
  1124. * Sample code that will set group_id=(123, 456, 789) where id=10
  1125. *
  1126. * <pre>
  1127. * String[] attrs = new String[1];
  1128. *
  1129. * attrs[0] = "group_id";
  1130. * int[][] values = new int[1][3];
  1131. *
  1132. * values[0] = new int[3]; values[0][0] = 123; values[0][1] = 456; values[0][2] = 789
  1133. *
  1134. * int res = cl.UpdateAttributesMVA ( "test1", 10, attrs, values );
  1135. * </pre>
  1136. *
  1137. * @param index index name(s) to update; might be distributed
  1138. * @param docid id of document to update
  1139. * @param attrs array with the names of the attributes to update
  1140. * @param values array of updates; each int[] entry must contains all new attribute values
  1141. * @param ignorenonexistent the flag whether to silently ignore non existent columns up update request
  1142. * @return -1 on failure, amount of actually found and updated documents (might be 0) on success
  1143. *
  1144. * @throws SphinxException on invalid parameters
  1145. */
  1146. public int UpdateAttributesMVA ( String index, long docid, String[] attrs, int[][] values, boolean ignorenonexistent ) throws SphinxException
  1147. {
  1148. /* check args */
  1149. myAssert ( index!=null && index.length()>0, "no index name provided" );
  1150. myAssert ( docid>0, "invalid document id" );
  1151. myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" );
  1152. myAssert ( values!=null && values.length>0, "no update entries provided" );
  1153. myAssert ( values.length==attrs.length, "update entry has wrong length" );
  1154. for ( int i=0; i<values.length; i++ )
  1155. {
  1156. myAssert ( values[i]!=null, "update entry #" + i + " is null" );
  1157. }
  1158. /* build and send request */
  1159. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  1160. DataOutputStream req = new DataOutputStream ( reqBuf );
  1161. try
  1162. {
  1163. writeNetUTF8 ( req, index );
  1164. req.writeInt ( attrs.length );
  1165. req.writeInt ( ignorenonexistent ? 1 : 0 );
  1166. for ( int i=0; i<attrs.length; i++ )
  1167. {
  1168. writeNetUTF8 ( req, attrs[i] );
  1169. req.writeInt ( 1 ); // MVA attr
  1170. }
  1171. req.writeInt ( 1 );
  1172. req.writeLong ( docid ); /* send docid as 64bit value */
  1173. for ( int i=0; i<values.length; i++ )
  1174. {
  1175. req.writeInt ( values[i].length ); /* send MVA's count */
  1176. for ( int j=0; j<values[i].length; j++ ) /* send MVAs itself*/
  1177. req.writeInt ( values[i][j] );
  1178. }
  1179. req.flush();
  1180. } catch ( Exception e )
  1181. {
  1182. _error = "internal error: failed to build request: " + e;
  1183. return -1;
  1184. }
  1185. /* get and parse response */
  1186. DataInputStream in = _DoRequest ( SEARCHD_COMMAND_UPDATE, VER_COMMAND_UPDATE, reqBuf );
  1187. if ( in==null )
  1188. return -1;
  1189. try
  1190. {
  1191. return in.readInt ();
  1192. } catch ( Exception e )
  1193. {
  1194. _error = "incomplete reply";
  1195. return -1;
  1196. }
  1197. }
  1198. public int UpdateAttributes ( String index, String[] attrs, long[][] values ) throws SphinxException
  1199. {
  1200. return UpdateAttributes ( index, attrs, values, false );
  1201. }
  1202. public int UpdateAttributesMVA ( String index, long docid, String[] attrs, int[][] values ) throws SphinxException
  1203. {
  1204. return UpdateAttributesMVA ( index, docid, attrs, values, false );
  1205. }
  1206. /**
  1207. * Connect to searchd server, and generate keyword list for a given query.
  1208. * Returns null on failure, an array of Maps with misc per-keyword info on success.
  1209. */
  1210. public Map[] BuildKeywords ( String query, String index, boolean hits ) throws SphinxException
  1211. {
  1212. /* build request */
  1213. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  1214. DataOutputStream req = new DataOutputStream ( reqBuf );
  1215. try
  1216. {
  1217. writeNetUTF8 ( req, query );
  1218. writeNetUTF8 ( req, index );
  1219. req.writeInt ( hits ? 1 : 0 );
  1220. } catch ( Exception e )
  1221. {
  1222. _error = "internal error: failed to build request: " + e;
  1223. return null;
  1224. }
  1225. /* run request */
  1226. DataInputStream in = _DoRequest ( SEARCHD_COMMAND_KEYWORDS, VER_COMMAND_KEYWORDS, reqBuf );
  1227. if ( in==null )
  1228. return null;
  1229. /* parse reply */
  1230. try
  1231. {
  1232. int iNumWords = in.readInt ();
  1233. Map[] res = new Map[iNumWords];
  1234. for ( int i=0; i<iNumWords; i++ )
  1235. {
  1236. res[i] = new LinkedHashMap ();
  1237. res[i].put ( "tokenized", readNetUTF8 ( in ) );
  1238. res[i].put ( "normalized", readNetUTF8 ( in ) );
  1239. if ( hits )
  1240. {
  1241. res[i].put ( "docs", new Long ( readDword ( in ) ) );
  1242. res[i].put ( "hits", new Long ( readDword ( in ) ) );
  1243. }
  1244. }
  1245. return res;
  1246. } catch ( Exception e )
  1247. {
  1248. _error = "incomplete reply";
  1249. return null;
  1250. }
  1251. }
  1252. /**
  1253. * Force attribute flush, and block until it completes.
  1254. * Returns current internal flush tag on success, -1 on failure.
  1255. */
  1256. public int FlushAttributes() throws SphinxException
  1257. {
  1258. /* build request */
  1259. ByteArrayOutputStream reqBuf = new ByteArrayOutputStream();
  1260. /* run request */
  1261. DataInputStream in = _DoRequest ( SEARCHD_COMMAND_FLUSHATTRS, VER_COMMAND_FLUSHATTRS, reqBuf );
  1262. if ( in==null )
  1263. return -1;
  1264. /* parse reply */
  1265. try
  1266. {
  1267. int iFlushTag = in.readInt ();
  1268. return iFlushTag;
  1269. } catch ( Exception e )
  1270. {
  1271. _error = "incomplete reply";
  1272. return -1;
  1273. }
  1274. }
  1275. /** Escape the characters with special meaning in query syntax. */
  1276. static public String EscapeString ( String s )
  1277. {
  1278. return s.replaceAll ( "([\\(\\)\\|\\-\\!\\@\\~\\\\\\\"\\&\\/\\^\\$\\=])", "\\\\$1" );
  1279. }
  1280. /** Open persistent connection to searchd. */
  1281. public boolean Open()
  1282. {
  1283. if ( _socket!=null )
  1284. {
  1285. _error = "already connected";
  1286. return false;
  1287. }
  1288. Socket sock = _Connect();
  1289. if ( sock==null )
  1290. return false;
  1291. // command, command version = 0, body length = 4, body = 1
  1292. try
  1293. {
  1294. DataOutputStream sOut = new DataOutputStream ( sock.getOutputStream() );
  1295. sOut.writeShort ( SEARCHD_COMMAND_PERSIST );
  1296. sOut.writeShort ( 0 );
  1297. sOut.writeInt ( 4 );
  1298. sOut.writeInt ( 1 );
  1299. } catch ( IOException e )
  1300. {
  1301. _error = "network error: " + e;
  1302. _connerror = true;
  1303. }
  1304. _socket = sock;
  1305. return true;
  1306. }
  1307. /** Close existing persistent connection. */
  1308. public boolean Close()
  1309. {
  1310. if ( _socket==null )
  1311. {
  1312. _error = "not connected";
  1313. return false;
  1314. }
  1315. try
  1316. {
  1317. _socket.close();
  1318. } catch ( IOException e )
  1319. {}
  1320. _socket = null;
  1321. return true;
  1322. }
  1323. }
  1324. /*
  1325. * $Id$
  1326. */