Skip to content

Commit

Permalink
Add comment re magic number of 1000 for chunking
Browse files Browse the repository at this point in the history
  • Loading branch information
bartelink committed Nov 26, 2021
1 parent 4344ab0 commit b0f8b15
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions feed-consumer/Ingester.fs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ module PipelineEvent =
let handle maxDop (stream, span) = async {
match stream, span with
| PipelineEvent.ItemsForFc (fc, items) ->
// Take chunks of max 1000 in order to make handler latency be less 'lumpy'
// What makes sense in terms of a good chunking size will vary depending on the workload in question
let ticketIds = seq { for x in items -> x.id } |> Seq.truncate 1000 |> Seq.toArray
let maybeAccept = Seq.distinct ticketIds |> Seq.mapi (fun i _x -> async {
do! Async.Sleep(TimeSpan.FromSeconds 1.)
Expand Down

0 comments on commit b0f8b15

Please sign in to comment.