Merge pull request #29 from VictorDenisov/master

Retrieve another batch for next in case of unlimited request
This commit is contained in:
Greg Weber 2015-09-30 04:13:55 -07:00
commit 1e84ac0d3c
2 changed files with 14 additions and 2 deletions

View file

@ -49,7 +49,7 @@ import Prelude hiding (lookup)
import Control.Exception (Exception, throwIO)
import Control.Monad (unless, replicateM, liftM)
import Data.Int (Int32)
import Data.Maybe (listToMaybe, catMaybes)
import Data.Maybe (listToMaybe, catMaybes, isNothing)
import Data.Word (Word32)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (mappend)
@ -628,7 +628,7 @@ next (Cursor fcol batchSize var) = modifyMVar var nextState where
let newLimit = do
limit <- mLimit
return $ limit - 1
dBatch' <- if null docs' && cid /= 0 && (newLimit > (Just 0))
dBatch' <- if null docs' && cid /= 0 && ((newLimit > (Just 0)) || (isNothing newLimit))
then nextBatch' fcol batchSize newLimit cid
else return $ return (Batch newLimit cid docs')
when (newLimit == (Just 0)) $ unless (cid == 0) $ send [KillCursors [cid]]

View file

@ -135,6 +135,18 @@ spec = around withCleanDatabase $ do
liftIO $ (length returnedDocs) `shouldBe` 100000
describe "rest" $ do
it "returns all documents from the collection" $ do
let docs = (flip map) [0..6000] $ \i ->
["name" =: (T.pack $ "name " ++ (show i))]
collectionName = "smallCollection"
db $ insertAll_ collectionName docs
db $ do
cur <- find $ (select [] collectionName)
returnedDocs <- rest cur
liftIO $ (length returnedDocs) `shouldBe` 6001
describe "aggregate" $ do
it "aggregates to normalize and sort documents" $ do
db $ insertAll_ "users" [ ["_id" =: "jane", "joined" =: parseDate "2011-03-02", "likes" =: ["golf", "racquetball"]]