Skip to content

Commit

Permalink
modern sync: chunk bookmark update log entries into segments
Browse files Browse the repository at this point in the history
Summary: Repos like opsfiles or fbsource have a huge blobimport at the beginning of their bookmark update log, handling millions of commtis at a time is just a recipe for disaster, so grabbing 5000 (for now) at a time.

Reviewed By: andreacampi

Differential Revision: D68563304

fbshipit-source-id: 16e3646550d462b984a3b8581735f4517ecf7523
  • Loading branch information
lmvasquezg authored and facebook-github-bot committed Jan 24, 2025
1 parent 5d28edf commit 36f7589
Showing 1 changed file with 22 additions and 17 deletions.
39 changes: 22 additions & 17 deletions eden/mononoke/modern_sync/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,27 +187,32 @@ pub async fn sync(

let commits = repo
.commit_graph()
.ancestors_difference(ctx, to, from)
.ancestors_difference_segment_slices(ctx, to, from, 5000)
.await?;

let mut missing_changesets =
sender.filter_existing_commits(commits).await?;

missing_changesets.reverse();

stream::iter(missing_changesets.into_iter().map(Ok))
.try_for_each(|cs_id| {
commits
.try_for_each(|chunk| {
cloned!(ctx, repo, logger, sender);
async move {
process_one_changeset(
&cs_id,
&ctx,
repo,
&logger,
sender,
app_args.log_to_ods,
)
.await
let missing_changesets =
sender.filter_existing_commits(chunk).await?;
stream::iter(missing_changesets.into_iter().map(Ok))
.try_for_each(|cs_id| {
cloned!(ctx, repo, logger, sender);
async move {
process_one_changeset(
&cs_id,
&ctx,
repo,
&logger,
sender,
app_args.log_to_ods,
)
.await
}
})
.await?;
Ok(())
}
})
.await?;
Expand Down

0 comments on commit 36f7589

Please sign in to comment.