sorterprecalc.cpp 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. //
  2. // Copyright (c) 2017-2026, Manticore Software LTD (https://manticoresearch.com)
  3. // Copyright (c) 2001-2016, Andrew Aksyonoff
  4. // Copyright (c) 2008-2016, Sphinx Technologies Inc
  5. // All rights reserved
  6. //
  7. // This program is free software; you can redistribute it and/or modify
  8. // it under the terms of the GNU General Public License. You should have
  9. // received a copy of the GPL license along with this program; if you
  10. // did not, you can find it at http://www.gnu.org/
  11. //
  12. #include "sorterprecalc.h"
  13. #include "sortertraits.h"
  14. static const char * GetPrecalcSorterName() { return "Precalc"; }
  15. class FastBaseSorter_c : public MatchSorter_c, ISphNoncopyable, protected BaseGroupSorter_c
  16. {
  17. public:
  18. FastBaseSorter_c ( const CSphGroupSorterSettings & tSettings ) : BaseGroupSorter_c ( tSettings ) {}
  19. bool IsGroupby () const final { return true; }
  20. bool CanBeCloned() const final { return false; }
  21. void SetMerge ( bool bMerge ) final {}
  22. void Finalize ( MatchProcessor_i & tProcessor, bool, bool bFinalizeMatches ) final { if ( GetLength() ) tProcessor.Process ( &m_tData ); }
  23. int GetLength() final { return m_bDataInitialized ? 1 : 0; }
  24. ISphMatchSorter * Clone() const final { return nullptr; }
  25. void MoveTo ( ISphMatchSorter * pRhs, bool bCopyMeta ) final { assert ( 0 && "Not supported"); }
  26. bool IsPrecalc() const final { return true; }
  27. int Flatten ( CSphMatch * pTo ) final;
  28. protected:
  29. CSphMatch m_tData;
  30. bool m_bDataInitialized = false;
  31. };
  32. int FastBaseSorter_c::Flatten ( CSphMatch * pTo )
  33. {
  34. assert ( m_bDataInitialized );
  35. Swap ( *pTo, m_tData );
  36. m_iTotal = 0;
  37. m_bDataInitialized = false;
  38. return 1;
  39. }
  40. // fast count distinct sorter
  41. // works by using precalculated count distinct taken from secondary indexes
  42. class FastCountDistinctSorter_c final : public FastBaseSorter_c
  43. {
  44. public:
  45. FastCountDistinctSorter_c ( int64_t iCountDistinct, const CSphString & sAttr, const CSphGroupSorterSettings & tSettings );
  46. bool Push ( const CSphMatch & tEntry ) final { return PushEx(tEntry); }
  47. void Push ( const VecTraits_T<const CSphMatch> & dMatches ) final { assert ( 0 && "Not supported in grouping"); }
  48. bool PushGrouped ( const CSphMatch & tEntry, bool ) final { return PushEx(tEntry); }
  49. void AddDesc ( CSphVector<IteratorDesc_t> & dDesc ) const final { dDesc.Add ( { m_sAttr, GetPrecalcSorterName() } ); }
  50. private:
  51. int64_t m_iCountDistinct = 0;
  52. CSphString m_sAttr;
  53. bool PushEx ( const CSphMatch & tEntry );
  54. };
  55. FastCountDistinctSorter_c::FastCountDistinctSorter_c ( int64_t iCountDistinct, const CSphString & sAttr, const CSphGroupSorterSettings & tSettings )
  56. : FastBaseSorter_c ( tSettings )
  57. , m_iCountDistinct ( iCountDistinct )
  58. , m_sAttr ( sAttr )
  59. {}
  60. FORCE_INLINE bool FastCountDistinctSorter_c::PushEx ( const CSphMatch & tEntry )
  61. {
  62. if ( m_bDataInitialized )
  63. return true; // always return true, otherwise in RT indexes we won't be able to hit cutoff in disk chunks after the first one
  64. m_pSchema->CloneMatch ( m_tData, tEntry );
  65. m_tData.SetAttr ( m_tLocGroupby, 1 ); // fake group number
  66. m_tData.SetAttr ( m_tLocCount, 1 );
  67. m_tData.SetAttr ( m_tLocDistinct, m_iCountDistinct );
  68. m_bDataInitialized = true;
  69. ++m_iTotal;
  70. return true;
  71. }
  72. // fast count sorter
  73. // works by using precalculated count taken from secondary indexes
  74. class FastCountFilterSorter_c final : public FastBaseSorter_c
  75. {
  76. public:
  77. FastCountFilterSorter_c ( int64_t iCount, const CSphString & sAttr, const CSphGroupSorterSettings & tSettings );
  78. bool Push ( const CSphMatch & tEntry ) final { return PushEx(tEntry); }
  79. void Push ( const VecTraits_T<const CSphMatch> & dMatches ) final { assert ( 0 && "Not supported in grouping"); }
  80. bool PushGrouped ( const CSphMatch & tEntry, bool ) final { return PushEx(tEntry); }
  81. void AddDesc ( CSphVector<IteratorDesc_t> & dDesc ) const final { dDesc.Add ( { m_sAttr, GetPrecalcSorterName() } ); }
  82. private:
  83. int64_t m_iCount;
  84. CSphString m_sAttr;
  85. bool PushEx ( const CSphMatch & tEntry );
  86. };
  87. FastCountFilterSorter_c::FastCountFilterSorter_c ( int64_t iCount, const CSphString & sAttr, const CSphGroupSorterSettings & tSettings )
  88. : FastBaseSorter_c ( tSettings )
  89. , m_iCount ( iCount )
  90. , m_sAttr ( sAttr )
  91. {}
  92. FORCE_INLINE bool FastCountFilterSorter_c::PushEx ( const CSphMatch & tEntry )
  93. {
  94. if ( m_bDataInitialized )
  95. return true; // always return true, otherwise in RT indexes we won't be able to hit cutoff in disk chunks after the first one
  96. m_pSchema->CloneMatch ( m_tData, tEntry );
  97. m_tData.SetAttr ( m_tLocGroupby, 1 ); // fake group number
  98. m_tData.SetAttr ( m_tLocCount, m_iCount );
  99. m_bDataInitialized = true;
  100. m_iTotal++;
  101. return true;
  102. }
  103. ///////////////////////////////////////////////////////////////////////////////
  104. ISphMatchSorter * CreatePrecalcSorter ( const PrecalculatedSorterResults_t & tPrecalc, const CSphGroupSorterSettings & tSettings )
  105. {
  106. if ( tPrecalc.m_iCountDistinct!=-1 )
  107. return new FastCountDistinctSorter_c ( tPrecalc.m_iCountDistinct, tPrecalc.m_sAttr, tSettings );
  108. if ( tPrecalc.m_iCountFilter!=-1 )
  109. return new FastCountFilterSorter_c ( tPrecalc.m_iCountFilter, tPrecalc.m_sAttr, tSettings );
  110. if ( tPrecalc.m_iCount!=-1 )
  111. return new FastCountFilterSorter_c ( tPrecalc.m_iCount, "count(*)", tSettings );
  112. return nullptr;
  113. }