attribute column value
* is in given values set.
*
* @param attribute attribute name to filter by
* @param values values set to match the attribute value by
* @param exclude whether to exclude matching documents instead
*
* @throws SphinxException on invalid parameters
*/
public void SetFilter ( String attribute, int[] values, boolean exclude ) throws SphinxException
{
myAssert ( values!=null && values.length>0, "values array must not be null or empty" );
myAssert ( attribute!=null && attribute.length()>0, "attribute name must not be null or empty" );
try
{
writeNetUTF8 ( _filters, attribute );
_filters.writeInt ( SPH_FILTER_VALUES );
_filters.writeInt ( values.length );
for ( int i=0; iattribute column value
* is beetwen given min and max values (including themselves).
*
* @param attribute attribute name to filter by
* @param min min attribute value
* @param max max attribute value
* @param exclude whether to exclude matching documents instead
*
* @throws SphinxException on invalid parameters
* Set float range filter.
*/
public void SetFilterFloatRange ( String attribute, float min, float max, boolean exclude ) throws SphinxException
{
myAssert ( min<=max, "min must be less or equal to max" );
try
{
writeNetUTF8 ( _filters, attribute );
_filters.writeInt ( SPH_FILTER_RANGE );
_filters.writeFloat ( min );
_filters.writeFloat ( max );
_filters.writeInt ( exclude ? 1 : 0 );
} catch ( Exception e )
{
myAssert ( false, "IOException: " + e.getMessage() );
}
_filterCount++;
}
/**
* Setup geographical anchor point.
*
* Required to use @geodist in filters and sorting.
* Distance will be computed to this point.
*
* @param latitudeAttr the name of latitude attribute
* @param longitudeAttr the name of longitude attribute
* @param latitude anchor point latitude, in radians
* @param longitude anchor point longitude, in radians
*
* @throws SphinxException on invalid parameters
*/
public void SetGeoAnchor ( String latitudeAttr, String longitudeAttr, float latitude, float longitude ) throws SphinxException
{
myAssert ( latitudeAttr!=null && latitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
myAssert ( longitudeAttr!=null && longitudeAttr.length()>0, "longitudeAttr string must not be null or empty" );
_latitudeAttr = latitudeAttr;
_longitudeAttr = longitudeAttr;
_latitude = latitude;
_longitude = longitude;
}
/** Set grouping attribute and function. */
public void SetGroupBy ( String attribute, int func, String groupsort ) throws SphinxException
{
myAssert (
func==SPH_GROUPBY_DAY ||
func==SPH_GROUPBY_WEEK ||
func==SPH_GROUPBY_MONTH ||
func==SPH_GROUPBY_YEAR ||
func==SPH_GROUPBY_ATTR ||
func==SPH_GROUPBY_ATTRPAIR, "unknown func value; use one of the available SPH_GROUPBY_xxx constants" );
_groupBy = attribute;
_groupFunc = func;
_groupSort = groupsort;
}
/** Set grouping attribute and function with default ("@group desc") groupsort (syntax sugar). */
public void SetGroupBy(String attribute, int func) throws SphinxException
{
SetGroupBy(attribute, func, "@group desc");
}
/** Set count-distinct attribute for group-by queries. */
public void SetGroupDistinct(String attribute)
{
_groupDistinct = attribute;
}
/** Set distributed retries count and delay. */
public void SetRetries ( int count, int delay ) throws SphinxException
{
myAssert ( count>=0, "count must not be negative" );
myAssert ( delay>=0, "delay must not be negative" );
_retrycount = count;
_retrydelay = delay;
}
/** Set distributed retries count with default (zero) delay (syntax sugar). */
public void SetRetries ( int count ) throws SphinxException
{
SetRetries ( count, 0 );
}
/** Reset all currently set filters (for multi-queries). */
public void ResetFilters()
{
/* should we close them first? */
_rawFilters = new ByteArrayOutputStream();
_filters = new DataOutputStream(_rawFilters);
_filterCount = 0;
/* reset GEO anchor */
_latitudeAttr = null;
_longitudeAttr = null;
_latitude = 0;
_longitude = 0;
}
/** Connect to searchd server and run current search query against all indexes (syntax sugar). */
public SphinxResult Query(String query) throws SphinxException
{
return Query(query, "*");
}
/**
* Connect to searchd server and run current search query.
*
* @param query query string
* @param index index name(s) to query. May contain anything-separated
* list of index names, or "*" which means to query all indexes.
* @return {@link SphinxResult} object
*
* @throws SphinxException on invalid parameters
*/
public SphinxResult Query ( String query, String index ) throws SphinxException
{
myAssert ( _reqs==null || _reqs.size()==0, "AddQuery() and Query() can not be combined; use RunQueries() instead" );
AddQuery(query, index);
SphinxResult[] results = RunQueries();
if (results == null || results.length < 1) {
return null; /* probably network error; error message should be already filled */
}
SphinxResult res = results[0];
_warning = res.warning;
_error = res.error;
if (res == null || res.getStatus() == SEARCHD_ERROR) {
return null;
} else {
return res;
}
}
/** Add new query with current settings to current search request. */
public int AddQuery ( String query, String index ) throws SphinxException
{
ByteArrayOutputStream req = new ByteArrayOutputStream();
/* build request */
try {
DataOutputStream out = new DataOutputStream(req);
out.writeInt(_offset);
out.writeInt(_limit);
out.writeInt(_mode);
out.writeInt(_sort);
writeNetUTF8(out, _sortby);
writeNetUTF8(out, query);
int weightLen = _weights != null ? _weights.length : 0;
out.writeInt(weightLen);
if (_weights != null) {
for (int i = 0; i < _weights.length; i++)
out.writeInt(_weights[i]);
}
writeNetUTF8(out, index);
out.writeInt(0);
out.writeInt(_minId);
out.writeInt(_maxId);
/* filters */
out.writeInt(_filterCount);
out.write(_rawFilters.toByteArray());
/* group-by, max matches, sort-by-group flag */
out.writeInt(_groupFunc);
writeNetUTF8(out, _groupBy);
out.writeInt(_maxMatches);
writeNetUTF8(out, _groupSort);
out.writeInt(_cutoff);
out.writeInt(_retrycount);
out.writeInt(_retrydelay);
writeNetUTF8(out, _groupDistinct);
/* anchor point */
if (_latitudeAttr == null || _latitudeAttr.length() == 0 || _longitudeAttr == null || _longitudeAttr.length() == 0) {
out.writeInt(0);
} else {
out.writeInt(1);
writeNetUTF8(out, _latitudeAttr);
writeNetUTF8(out, _longitudeAttr);
out.writeFloat(_latitude);
out.writeFloat(_longitude);
}
/* per-index weights */
out.writeInt(_indexWeights.size());
for (Iterator e = _indexWeights.keySet().iterator(); e.hasNext();) {
String indexName = (String) e.next();
Integer weight = (Integer) _indexWeights.get(indexName);
writeNetUTF8(out, indexName);
out.writeInt(weight.intValue());
}
out.flush();
int qIndex = _reqs.size();
_reqs.add(qIndex, req.toByteArray());
return qIndex;
} catch (Exception ex) {
myAssert(false, "error on AddQuery: " + ex.getMessage());
} finally {
try {
_filters.close();
_rawFilters.close();
} catch (IOException e) {
myAssert(false, "error on AddQuery: " + e.getMessage());
}
}
return -1;
}
/** Run all previously added search queries. */
public SphinxResult[] RunQueries() throws SphinxException
{
if (_reqs == null || _reqs.size() < 1) {
_error = "no queries defined, issue AddQuery() first";
return null;
}
Socket sock = _Connect();
if (sock == null) return null;
/* send query, get response */
ByteArrayOutputStream req = new ByteArrayOutputStream();
DataOutputStream prepareRQ = null;
int nreqs = _reqs.size();
try {
prepareRQ = new DataOutputStream(req);
prepareRQ.writeShort(SEARCHD_COMMAND_SEARCH);
prepareRQ.writeShort(VER_COMMAND_SEARCH);
int rqLen = 4;
for (int i = 0; i < nreqs; i++) {
byte[] subRq = (byte[]) _reqs.get(i);
rqLen += subRq.length;
}
prepareRQ.writeInt(rqLen);
prepareRQ.writeInt(nreqs);
for (int i = 0; i < nreqs; i++) {
byte[] subRq = (byte[]) _reqs.get(i);
prepareRQ.write(subRq);
}
OutputStream SockOut = sock.getOutputStream();
byte[] reqBytes = req.toByteArray();
SockOut.write(reqBytes);
} catch (Exception e) {
myAssert(false, "Query: Unable to create read/write streams: " + e.getMessage());
return null;
}
/* reset requests */
_reqs = new ArrayList();
/* get response */
byte[] response = null;
response = _GetResponse ( sock );
if (response == null) return null;
/* parse response */
SphinxResult[] results = new SphinxResult[nreqs];
DataInputStream in;
in = new DataInputStream(new ByteArrayInputStream(response));
/* read schema */
int ires;
try {
for (ires = 0; ires < nreqs; ires++) {
SphinxResult res = new SphinxResult();
results[ires] = res;
int status = in.readInt();
res.setStatus(status);
if (status != SEARCHD_OK) {
String message = readNetUTF8(in);
if (status == SEARCHD_WARNING) {
res.warning = message;
} else {
res.error = message;
continue;
}
}
/* read fields */
int nfields = in.readInt();
res.fields = new String[nfields];
int pos = 0;
for (int i = 0; i < nfields; i++)
res.fields[i] = readNetUTF8(in);
/* read arrts */
int nattrs = in.readInt();
res.attrTypes = new int[nattrs];
res.attrNames = new String[nattrs];
for (int i = 0; i < nattrs; i++) {
String AttrName = readNetUTF8(in);
int AttrType = in.readInt();
res.attrNames[i] = AttrName;
res.attrTypes[i] = AttrType;
}
/* read match count */
int count = in.readInt();
int id64 = in.readInt();
res.matches = new SphinxMatch[count];
for ( int matchesNo=0; matchesNo* String[] attrs = new String[1]; * * attrs[0] = "group_id"; * long[][] values = new long[2][2]; * * values[0] = new long[2]; values[0][0] = 1; values[0][1] = 123; * values[1] = new long[2]; values[1][0] = 3; values[1][1] = 456; * * int res = cl.UpdateAttributes ( "test1", attrs, values ); ** * @param index index name(s) to update; might be distributed * @param attrs array with the names of the attributes to update * @param values array of updates; each long[] entry must contains document ID * in the first element, and all new attribute values in the following ones * @return -1 on failure, amount of actually found and updated documents (might be 0) on success * * @throws SphinxException on invalid parameters */ public int UpdateAttributes ( String index, String[] attrs, long[][] values ) throws SphinxException { /* check args */ myAssert ( index!=null && index.length()>0, "no index name provided" ); myAssert ( attrs!=null && attrs.length>0, "no attribute names provided" ); myAssert ( values!=null && values.length>0, "no update entries provided" ); for ( int i=0; i