Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion __tests__/integration/organization.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ describe('organization', () => {
'Starting synchronization for organization: test-org'
);
expect(getLogger().info).toHaveBeenCalledWith(
'Found 0 repositories to synchronize'
expect.stringMatching(/Found 0 repositories to synchronize/)
);
expect(getLogger().info).toHaveBeenCalledWith(
expect.stringContaining('Completed synchronization for organization: test-org')
Expand Down
39 changes: 33 additions & 6 deletions src/organization.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,31 @@ async function checkOrganizationMembership(octokit, org, username) {
}
}

/**
* Bounded-concurrency map. Runs `fn(item)` over `items` with at most
* `concurrency` calls in flight. Resolves to an array of results in the
* same order as `items`. No new dependency.
*
* Used by `synchronizeAllRepositories` to avoid the previous fully-serial
* loop (~30-60 s for 13 repos) without going fully parallel (which would
* blow the GitHub 5000/h rate budget on a 100-repo org).
*/
async function pLimitMap(items, concurrency, fn) {
const results = new Array(items.length);
let cursor = 0;
const workers = Array.from({ length: Math.min(concurrency, items.length) }, async () => {
let i = cursor++;
while (i < items.length) {
results[i] = await fn(items[i], i);
i = cursor++;
}
});
await Promise.all(workers);
return results;
}

const SYNC_CONCURRENCY = 3;

async function synchronizeAllRepositories(octokit, org) {
try {
getLogger().info(`Starting synchronization for organization: ${org}`);
Expand All @@ -29,27 +54,29 @@ async function synchronizeAllRepositories(octokit, org) {
per_page: 100
});

getLogger().info(`Found ${repos.length} repositories to synchronize`);
getLogger().info(`Found ${repos.length} repositories to synchronize (concurrency=${SYNC_CONCURRENCY})`);

for (const repo of repos) {
const todo = repos.filter((repo) => {
if (repo.archived) {
getLogger().info(`Skipping archived repository: ${repo.full_name}`);
continue;
return false;
}

if (isControlSurfaceRepo(repo.full_name)) {
getLogger().info(`Skipping control-surface repository: ${repo.full_name}`);
continue;
return false;
}
return true;
});

await pLimitMap(todo, SYNC_CONCURRENCY, async (repo) => {
getLogger().info(`Processing repository: ${repo.full_name}`);
const configResult = await configureRepository(octokit, repo);
if (!configResult.success) {
getLogger().warn(`⚠️ Failed to configure ${repo.full_name}: ${configResult.error}`);
} else {
getLogger().info(`✅ Synchronized ${repo.full_name}`);
}
}
});

getLogger().info(`✅ Completed synchronization for organization: ${org}`);
return { success: true, repositoriesProcessed: repos.length };
Expand Down
Loading