-
Notifications
You must be signed in to change notification settings - Fork 149
feat: implement hybrid cache architecture #1246
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 13 commits
956f388
cc49057
d6413ce
cf76665
e998d07
734621c
452eb18
168d9b0
08116ba
992c862
d5e1b5b
6d5f886
6bc0ddc
627137b
235e152
c762b5e
ab28d78
88bbce8
1737bfd
d35d109
0a54773
133e5e6
f03d686
bc0be9f
d07ed9c
2acaee7
49695ff
b5bc3d8
d110463
b073eb3
928846d
5b94ec9
7c05bfb
a42bd2e
fd23676
bd14e59
e67ff36
7e97d9e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
jescalada marked this conversation as resolved.
Show resolved
Hide resolved
|
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
| @@ -0,0 +1,181 @@ | ||||||
| import fs from 'fs'; | ||||||
| import path from 'path'; | ||||||
| import { getCacheConfig } from '../../../config'; | ||||||
|
|
||||||
| export interface CacheStats { | ||||||
| totalRepositories: number; | ||||||
| totalSizeMB: number; | ||||||
fabiovincenzi marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||
| repositories: Array<{ | ||||||
| name: string; | ||||||
| sizeMB: number; | ||||||
| lastAccessed: Date; | ||||||
| }>; | ||||||
| } | ||||||
|
|
||||||
| export class CacheManager { | ||||||
| private cacheDir: string; | ||||||
fabiovincenzi marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||
| private maxSizeGB: number; | ||||||
| private maxRepositories: number; | ||||||
|
|
||||||
| constructor( | ||||||
| cacheDir: string = './.remote/cache', | ||||||
| maxSizeGB: number = 2, | ||||||
| maxRepositories: number = 50, | ||||||
| ) { | ||||||
| this.cacheDir = cacheDir; | ||||||
| this.maxSizeGB = maxSizeGB; | ||||||
| this.maxRepositories = maxRepositories; | ||||||
| } | ||||||
|
|
||||||
| /** | ||||||
| * Update access time for repository (for LRU purposes) | ||||||
| */ | ||||||
| touchRepository(repoName: string): void { | ||||||
| const repoPath = path.join(this.cacheDir, repoName); | ||||||
| if (fs.existsSync(repoPath)) { | ||||||
| const now = new Date(); | ||||||
| fs.utimesSync(repoPath, now, now); | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| /** | ||||||
| * Get cache statistics | ||||||
| */ | ||||||
| getCacheStats(): CacheStats { | ||||||
| if (!fs.existsSync(this.cacheDir)) { | ||||||
| return { | ||||||
| totalRepositories: 0, | ||||||
| totalSizeMB: 0, | ||||||
| repositories: [], | ||||||
| }; | ||||||
| } | ||||||
|
|
||||||
| const repositories: Array<{ name: string; sizeMB: number; lastAccessed: Date }> = []; | ||||||
| let totalSizeMB = 0; | ||||||
|
|
||||||
| const entries = fs.readdirSync(this.cacheDir, { withFileTypes: true }); | ||||||
|
|
||||||
| for (const entry of entries) { | ||||||
| if (entry.isDirectory()) { | ||||||
| const repoPath = path.join(this.cacheDir, entry.name); | ||||||
| const sizeMB = this.getDirectorySize(repoPath); | ||||||
| const stats = fs.statSync(repoPath); | ||||||
|
|
||||||
| repositories.push({ | ||||||
| name: entry.name, | ||||||
| sizeMB, | ||||||
| lastAccessed: stats.atime, | ||||||
| }); | ||||||
|
|
||||||
| totalSizeMB += sizeMB; | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| // Sort by last accessed (newest first) | ||||||
| repositories.sort((a, b) => b.lastAccessed.getTime() - a.lastAccessed.getTime()); | ||||||
|
|
||||||
| return { | ||||||
| totalRepositories: repositories.length, | ||||||
| totalSizeMB, | ||||||
| repositories, | ||||||
| }; | ||||||
| } | ||||||
|
|
||||||
| /** | ||||||
| * Enforce cache limits using LRU eviction | ||||||
| */ | ||||||
| enforceLimits(): { removedRepos: string[]; freedMB: number } { | ||||||
| const stats = this.getCacheStats(); | ||||||
| const removedRepos: string[] = []; | ||||||
| let freedMB = 0; | ||||||
|
|
||||||
| // Sort repositories by last accessed (oldest first for removal) | ||||||
| const reposToEvaluate = [...stats.repositories].sort( | ||||||
fabiovincenzi marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||
| (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), | ||||||
| ); | ||||||
|
||||||
|
|
||||||
| // Check size limit | ||||||
| let currentSizeMB = stats.totalSizeMB; | ||||||
| const maxSizeMB = this.maxSizeGB * 1024; | ||||||
|
|
||||||
| for (const repo of reposToEvaluate) { | ||||||
| const shouldRemove = | ||||||
| currentSizeMB > maxSizeMB || // Over size limit | ||||||
| stats.totalRepositories - removedRepos.length > this.maxRepositories; // Over count limit | ||||||
|
|
||||||
| if (shouldRemove) { | ||||||
| this.removeRepository(repo.name); | ||||||
| removedRepos.push(repo.name); | ||||||
| freedMB += repo.sizeMB; | ||||||
| currentSizeMB -= repo.sizeMB; | ||||||
| } else { | ||||||
| break; // We've cleaned enough | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| return { removedRepos, freedMB }; | ||||||
| } | ||||||
|
|
||||||
| /** | ||||||
| * Remove specific repository from cache | ||||||
| */ | ||||||
| private removeRepository(repoName: string): void { | ||||||
| const repoPath = path.join(this.cacheDir, repoName); | ||||||
| if (fs.existsSync(repoPath)) { | ||||||
| fs.rmSync(repoPath, { recursive: true, force: true }); | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| /** | ||||||
| * Calculate directory size in MB | ||||||
| */ | ||||||
| private getDirectorySize(dirPath: string): number { | ||||||
fabiovincenzi marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
| let totalBytes = 0; | ||||||
|
|
||||||
| const calculateSize = (currentPath: string) => { | ||||||
| const items = fs.readdirSync(currentPath, { withFileTypes: true }); | ||||||
|
|
||||||
| for (const item of items) { | ||||||
| const itemPath = path.join(currentPath, item.name); | ||||||
|
|
||||||
| if (item.isDirectory()) { | ||||||
| calculateSize(itemPath); | ||||||
| } else { | ||||||
| try { | ||||||
| const stats = fs.statSync(itemPath); | ||||||
| totalBytes += stats.size; | ||||||
| } catch (error) { | ||||||
| // Skip files that can't be read | ||||||
| } | ||||||
| } | ||||||
| } | ||||||
| }; | ||||||
|
|
||||||
| try { | ||||||
| calculateSize(dirPath); | ||||||
| } catch (error) { | ||||||
| return 0; | ||||||
| } | ||||||
|
|
||||||
| return Math.round(totalBytes / (1024 * 1024)); // Convert to MB | ||||||
|
||||||
| return Math.round(totalBytes / (1024 * 1024)); // Convert to MB | |
| return Math.ceil(totalBytes / (1024 * 1024)); // Convert to MB |
rounding down would hide some amount of KB up to 511KB. Not the end of the world but safest to always round up rather than be shortchanged.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
There's no conversion anymore since we are using bytes everywhere
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,16 +1,41 @@ | ||
| import { Action, Step } from '../../actions'; | ||
| import fs from 'node:fs'; | ||
|
|
||
| const WORK_DIR = './.remote/work'; | ||
|
|
||
| const exec = async (req: any, action: Action): Promise<Action> => { | ||
| const step = new Step('clearBareClone'); | ||
|
|
||
| // Recursively remove the contents of ./.remote and ignore exceptions | ||
| fs.rm('./.remote', { recursive: true, force: true }, (err) => { | ||
| if (err) { | ||
| throw err; | ||
| // In test environment, clean up EVERYTHING to prevent memory leaks | ||
| if (process.env.NODE_ENV === 'test') { | ||
| // TEST: Full cleanup (bare cache + all working copies) | ||
| try { | ||
| if (fs.existsSync('./.remote')) { | ||
| fs.rmSync('./.remote', { recursive: true, force: true }); | ||
| step.log('Test environment: Full .remote directory cleaned'); | ||
| } else { | ||
| step.log('Test environment: .remote directory already clean'); | ||
| } | ||
| } catch (err) { | ||
| step.log(`Warning: Could not clean .remote directory: ${err}`); | ||
| } | ||
| } else { | ||
|
||
| // PRODUCTION: Delete ONLY this push's working copy | ||
| const workCopy = `${WORK_DIR}/${action.id}`; | ||
|
|
||
| if (fs.existsSync(workCopy)) { | ||
| try { | ||
| fs.rmSync(workCopy, { recursive: true, force: true }); | ||
| step.log(`Cleaned working copy for push ${action.id}`); | ||
| } catch (err) { | ||
| step.log(`Warning: Could not clean working copy ${workCopy}: ${err}`); | ||
| } | ||
| } else { | ||
| step.log(`Working copy ${workCopy} not found (may have been already cleaned)`); | ||
| } | ||
| console.log(`.remote is deleted!`); | ||
| }); | ||
|
|
||
| step.log('Bare cache preserved for reuse'); | ||
| } | ||
|
|
||
| action.addStep(step); | ||
| return action; | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.