From 956f3883cfc61f3bf00f6a67ecf7789ac3bc37d6 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Fri, 10 Oct 2025 16:18:20 +0200 Subject: [PATCH 01/26] feat: implement hybrid cache architecture for repository cloning --- .../processors/push-action/cache-manager.ts | 181 ++++++++++++++++++ .../processors/push-action/clearBareClone.ts | 37 +++- .../processors/push-action/git-operations.ts | 122 ++++++++++++ src/proxy/processors/push-action/metrics.ts | 37 ++++ .../processors/push-action/pullRemote.ts | 107 ++++++++--- 5 files changed, 453 insertions(+), 31 deletions(-) create mode 100644 src/proxy/processors/push-action/cache-manager.ts create mode 100644 src/proxy/processors/push-action/git-operations.ts create mode 100644 src/proxy/processors/push-action/metrics.ts diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts new file mode 100644 index 000000000..8cab9a4c7 --- /dev/null +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -0,0 +1,181 @@ +import fs from 'fs'; +import path from 'path'; +import { getCacheConfig } from '../../../config'; + +export interface CacheStats { + totalRepositories: number; + totalSizeMB: number; + repositories: Array<{ + name: string; + sizeMB: number; + lastAccessed: Date; + }>; +} + +export class CacheManager { + private cacheDir: string; + private maxSizeGB: number; + private maxRepositories: number; + + constructor( + cacheDir: string = './.remote/cache', + maxSizeGB: number = 2, + maxRepositories: number = 50, + ) { + this.cacheDir = cacheDir; + this.maxSizeGB = maxSizeGB; + this.maxRepositories = maxRepositories; + } + + /** + * Update access time for repository (for LRU purposes) + */ + touchRepository(repoName: string): void { + const repoPath = path.join(this.cacheDir, repoName); + if (fs.existsSync(repoPath)) { + const now = new Date(); + fs.utimesSync(repoPath, now, now); + } + } + + /** + * Get cache statistics + */ + getCacheStats(): CacheStats { + if (!fs.existsSync(this.cacheDir)) { + return { + totalRepositories: 0, + totalSizeMB: 0, + repositories: [], + }; + } + + const repositories: Array<{ name: string; sizeMB: number; lastAccessed: Date }> = []; + let totalSizeMB = 0; + + const entries = fs.readdirSync(this.cacheDir, { withFileTypes: true }); + + for (const entry of entries) { + if (entry.isDirectory()) { + const repoPath = path.join(this.cacheDir, entry.name); + const sizeMB = this.getDirectorySize(repoPath); + const stats = fs.statSync(repoPath); + + repositories.push({ + name: entry.name, + sizeMB, + lastAccessed: stats.atime, + }); + + totalSizeMB += sizeMB; + } + } + + // Sort by last accessed (newest first) + repositories.sort((a, b) => b.lastAccessed.getTime() - a.lastAccessed.getTime()); + + return { + totalRepositories: repositories.length, + totalSizeMB, + repositories, + }; + } + + /** + * Enforce cache limits using LRU eviction + */ + enforceLimits(): { removedRepos: string[]; freedMB: number } { + const stats = this.getCacheStats(); + const removedRepos: string[] = []; + let freedMB = 0; + + // Sort repositories by last accessed (oldest first for removal) + const reposToEvaluate = [...stats.repositories].sort( + (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), + ); + + // Check size limit + let currentSizeMB = stats.totalSizeMB; + const maxSizeMB = this.maxSizeGB * 1024; + + for (const repo of reposToEvaluate) { + const shouldRemove = + currentSizeMB > maxSizeMB || // Over size limit + stats.totalRepositories - removedRepos.length > this.maxRepositories; // Over count limit + + if (shouldRemove) { + this.removeRepository(repo.name); + removedRepos.push(repo.name); + freedMB += repo.sizeMB; + currentSizeMB -= repo.sizeMB; + } else { + break; // We've cleaned enough + } + } + + return { removedRepos, freedMB }; + } + + /** + * Remove specific repository from cache + */ + private removeRepository(repoName: string): void { + const repoPath = path.join(this.cacheDir, repoName); + if (fs.existsSync(repoPath)) { + fs.rmSync(repoPath, { recursive: true, force: true }); + } + } + + /** + * Calculate directory size in MB + */ + private getDirectorySize(dirPath: string): number { + let totalBytes = 0; + + const calculateSize = (currentPath: string) => { + const items = fs.readdirSync(currentPath, { withFileTypes: true }); + + for (const item of items) { + const itemPath = path.join(currentPath, item.name); + + if (item.isDirectory()) { + calculateSize(itemPath); + } else { + try { + const stats = fs.statSync(itemPath); + totalBytes += stats.size; + } catch (error) { + // Skip files that can't be read + } + } + } + }; + + try { + calculateSize(dirPath); + } catch (error) { + return 0; + } + + return Math.round(totalBytes / (1024 * 1024)); // Convert to MB + } + + /** + * Get cache configuration + */ + getConfig() { + return { + maxSizeGB: this.maxSizeGB, + maxRepositories: this.maxRepositories, + cacheDir: this.cacheDir, + }; + } +} + +// Global instance initialized with config +const config = getCacheConfig(); +export const cacheManager = new CacheManager( + config.cacheDir, + config.maxSizeGB, + config.maxRepositories, +); diff --git a/src/proxy/processors/push-action/clearBareClone.ts b/src/proxy/processors/push-action/clearBareClone.ts index 91f7f5b22..143dd3d39 100644 --- a/src/proxy/processors/push-action/clearBareClone.ts +++ b/src/proxy/processors/push-action/clearBareClone.ts @@ -1,16 +1,41 @@ import { Action, Step } from '../../actions'; import fs from 'node:fs'; +const WORK_DIR = './.remote/work'; + const exec = async (req: any, action: Action): Promise => { const step = new Step('clearBareClone'); - // Recursively remove the contents of ./.remote and ignore exceptions - fs.rm('./.remote', { recursive: true, force: true }, (err) => { - if (err) { - throw err; + // In test environment, clean up EVERYTHING to prevent memory leaks + if (process.env.NODE_ENV === 'test') { + // TEST: Full cleanup (bare cache + all working copies) + try { + if (fs.existsSync('./.remote')) { + fs.rmSync('./.remote', { recursive: true, force: true }); + step.log('Test environment: Full .remote directory cleaned'); + } else { + step.log('Test environment: .remote directory already clean'); + } + } catch (err) { + step.log(`Warning: Could not clean .remote directory: ${err}`); + } + } else { + // PRODUCTION: Delete ONLY this push's working copy + const workCopy = `${WORK_DIR}/${action.id}`; + + if (fs.existsSync(workCopy)) { + try { + fs.rmSync(workCopy, { recursive: true, force: true }); + step.log(`Cleaned working copy for push ${action.id}`); + } catch (err) { + step.log(`Warning: Could not clean working copy ${workCopy}: ${err}`); + } + } else { + step.log(`Working copy ${workCopy} not found (may have been already cleaned)`); } - console.log(`.remote is deleted!`); - }); + + step.log('Bare cache preserved for reuse'); + } action.addStep(step); return action; diff --git a/src/proxy/processors/push-action/git-operations.ts b/src/proxy/processors/push-action/git-operations.ts new file mode 100644 index 000000000..262609c09 --- /dev/null +++ b/src/proxy/processors/push-action/git-operations.ts @@ -0,0 +1,122 @@ +import { Step } from '../../actions'; +import { cacheManager } from './cache-manager'; + +/** + * Git Operations for Hybrid Cache + */ + +/** + * Execute a git command with credentials sanitization + */ +async function execGitCommand( + command: string, + step: Step, + maxBuffer: number = 50 * 1024 * 1024, +): Promise<{ stdout: string; stderr: string }> { + const { exec } = await import('child_process'); + const { promisify } = await import('util'); + const execAsync = promisify(exec); + + const { stdout, stderr } = await execAsync(command, { maxBuffer }); + + if (stdout) step.log(stdout.trim()); + if (stderr) step.log(stderr.trim()); + + return { stdout, stderr }; +} + +/** + * Build URL with embedded credentials + */ +function buildUrlWithCredentials(url: string, username: string, password: string): string { + return url.replace('://', `://${encodeURIComponent(username)}:${encodeURIComponent(password)}@`); +} + +/** + * Remove credentials from bare repository config + */ +async function sanitizeRepositoryConfig(bareRepo: string, cleanUrl: string): Promise { + const { exec } = await import('child_process'); + const { promisify } = await import('util'); + const execAsync = promisify(exec); + + // Remove any URL with credentials + await execAsync(`cd "${bareRepo}" && git config --unset remote.origin.url 2>/dev/null || true`); + // Set clean URL without credentials + await execAsync(`cd "${bareRepo}" && git config remote.origin.url "${cleanUrl}"`); +} + +/** + * Clone working copy from bare repository using native git + */ +export async function cloneWorkingCopy( + bareRepo: string, + workCopyPath: string, + step: Step, +): Promise { + try { + await execGitCommand(`git clone "${bareRepo}" "${workCopyPath}"`, step); + step.log(`Working copy created at ${workCopyPath}`); + } catch (error: any) { + step.log(`Failed to create working copy: ${error.message}`); + throw error; + } +} + +/** + * Fetch updates in bare repository using native git command + */ +export async function fetchBareRepository( + bareRepo: string, + url: string, + username: string, + password: string, + step: Step, +): Promise { + const urlWithCreds = buildUrlWithCredentials(url, username, password); + + try { + // Fetch all branches with depth=1 + await execGitCommand( + `cd "${bareRepo}" && git fetch --depth=1 "${urlWithCreds}" "+refs/heads/*:refs/heads/*"`, + step, + ); + + // SECURITY: Remove credentials from config + await sanitizeRepositoryConfig(bareRepo, url); + + step.log(`Bare repository updated (credentials removed)`); + } catch (error: any) { + step.log(`Failed to fetch bare repository: ${error.message}`); + throw error; + } +} + +/** + * Clone bare repository using native git command + */ +export async function cloneBareRepository( + bareRepo: string, + url: string, + username: string, + password: string, + step: Step, +): Promise { + const urlWithCreds = buildUrlWithCredentials(url, username, password); + + try { + await execGitCommand(`git clone --bare --depth=1 "${urlWithCreds}" "${bareRepo}"`, step); + + // SECURITY: Remove credentials from config immediately after clone + await sanitizeRepositoryConfig(bareRepo, url); + + step.log(`Bare repository created at ${bareRepo} (credentials sanitized)`); + + // Update access time for LRU after successful clone + const repoName = bareRepo.split('/').pop() || ''; + cacheManager.touchRepository(repoName); + } catch (error: any) { + step.log(`Failed to clone bare repository: ${error.message}`); + throw error; + } +} diff --git a/src/proxy/processors/push-action/metrics.ts b/src/proxy/processors/push-action/metrics.ts new file mode 100644 index 000000000..f7e32c077 --- /dev/null +++ b/src/proxy/processors/push-action/metrics.ts @@ -0,0 +1,37 @@ +import { Step } from '../../actions'; + +/** + * Performance Timer + * + * Logs basic timing info for operations + */ +export class PerformanceTimer { + private step: Step; + private startTime: number = 0; + private operation: string = ''; + + constructor(step: Step) { + this.step = step; + } + + start(operation: string): void { + this.operation = operation; + this.startTime = Date.now(); + this.step.log(`🚀 ${operation} started`); + } + + mark(message: string): void { + if (this.startTime > 0) { + const elapsed = Date.now() - this.startTime; + this.step.log(`⚡ ${message}: ${elapsed}ms`); + } + } + + end(): void { + if (this.startTime > 0) { + const totalTime = Date.now() - this.startTime; + this.step.log(`✅ ${this.operation} completed: ${totalTime}ms`); + this.startTime = 0; + } + } +} diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index 73b8981ec..e21b10208 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -1,45 +1,102 @@ import { Action, Step } from '../../actions'; import fs from 'fs'; -import git from 'isomorphic-git'; -import gitHttpClient from 'isomorphic-git/http/node'; +import { PerformanceTimer } from './metrics'; +import { cacheManager } from './cache-manager'; +import { cloneWorkingCopy, fetchBareRepository, cloneBareRepository } from './git-operations'; -const dir = './.remote'; +const BARE_CACHE = './.remote/cache'; +const WORK_DIR = './.remote/work'; const exec = async (req: any, action: Action): Promise => { const step = new Step('pullRemote'); + const timer = new PerformanceTimer(step); try { - action.proxyGitPath = `${dir}/${action.id}`; + // Paths for hybrid architecture + // Ensure repoName ends with .git for bare repository convention + const repoNameWithGit = action.repoName.endsWith('.git') + ? action.repoName + : `${action.repoName}.git`; + const bareRepo = `${BARE_CACHE}/${repoNameWithGit}`; + const workCopy = `${WORK_DIR}/${action.id}`; - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir); - } + // Check if bare cache exists + const bareExists = fs.existsSync(bareRepo); - if (!fs.existsSync(action.proxyGitPath)) { - step.log(`Creating folder ${action.proxyGitPath}`); - fs.mkdirSync(action.proxyGitPath, 0o755); - } + step.log(`Bare cache: ${bareExists ? 'EXISTS' : 'MISSING'}`); + step.log(`Strategy: ${bareExists ? 'FETCH + LOCAL_CLONE' : 'BARE_CLONE + LOCAL_CLONE'}`); + + // Start timing + const strategy = bareExists ? 'CACHED' : 'CLONE'; + timer.start(`${strategy} ${action.repoName}`); - const cmd = `git clone ${action.url}`; - step.log(`Executing ${cmd}`); + if (!fs.existsSync(BARE_CACHE)) { + fs.mkdirSync(BARE_CACHE, { recursive: true }); + } + if (!fs.existsSync(WORK_DIR)) { + fs.mkdirSync(WORK_DIR, { recursive: true }); + } + timer.mark('Setup complete'); const authHeader = req.headers?.authorization; const [username, password] = Buffer.from(authHeader.split(' ')[1], 'base64') .toString() .split(':'); - await git.clone({ - fs, - http: gitHttpClient, - url: action.url, - dir: `${action.proxyGitPath}/${action.repoName}`, - onAuth: () => ({ username, password }), - singleBranch: true, - depth: 1, - }); - - step.log(`Completed ${cmd}`); - step.setContent(`Completed ${cmd}`); + // PHASE 1: Bare Cache (persistent, shared) === + if (bareExists) { + // CACHE HIT: Fetch updates in bare repository + step.log(`Fetching updates in bare cache...`); + + try { + await fetchBareRepository(bareRepo, action.url, username, password, step); + + // Update access time for LRU + cacheManager.touchRepository(`${action.repoName}.git`); + timer.mark('Fetch complete'); + } catch (fetchError) { + step.log(`Fetch failed, rebuilding bare cache: ${fetchError}`); + // Remove broken cache and re-clone + if (fs.existsSync(bareRepo)) { + fs.rmSync(bareRepo, { recursive: true, force: true }); + } + await cloneBareRepository(bareRepo, action.url, username, password, step); + timer.mark('Bare clone complete (fallback)'); + } + } else { + // CACHE MISS: Clone bare repository + step.log(`Cloning bare repository to cache...`); + await cloneBareRepository(bareRepo, action.url, username, password, step); + timer.mark('Bare clone complete'); + } + + // PHASE 2: Working Copy (temporary, isolated) === + step.log(`Creating isolated working copy for push ${action.id}...`); + + await cloneWorkingCopy(bareRepo, `${workCopy}/${action.repoName}`, step); + + timer.mark('Working copy ready'); + + // Set action path to working copy + action.proxyGitPath = workCopy; + + const completedMsg = bareExists + ? `Completed fetch + local clone (hybrid cache)` + : `Completed bare clone + local clone (hybrid cache)`; + + step.log(completedMsg); + step.setContent(completedMsg); + + // End timing + timer.end(); + + // Enforce cache limits (LRU eviction on bare cache) + const evictionResult = cacheManager.enforceLimits(); + if (evictionResult.removedRepos.length > 0) { + step.log( + `LRU evicted ${evictionResult.removedRepos.length} bare repos, freed ${evictionResult.freedMB}MB`, + ); + } } catch (e: any) { step.setError(e.toString('utf-8')); throw e; From cc49057bff0c87348f8fc639f2bd19045d6f6636 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Fri, 10 Oct 2025 16:18:46 +0200 Subject: [PATCH 02/26] feat: add configurable cache limits via proxy.config.json --- config.schema.json | 20 ++++++++++++++++++++ proxy.config.json | 5 +++++ src/config/generated/config.ts | 31 +++++++++++++++++++++++++++++++ src/config/index.ts | 15 +++++++++++++++ src/config/types.ts | 7 +++++++ 5 files changed, 78 insertions(+) diff --git a/config.schema.json b/config.schema.json index 4bb48a186..991675b4b 100644 --- a/config.schema.json +++ b/config.schema.json @@ -198,6 +198,26 @@ } } } + }, + "cache": { + "description": "Configuration for bare repository cache (hybrid cache system)", + "type": "object", + "properties": { + "maxSizeGB": { + "type": "number", + "description": "Maximum cache size in gigabytes (default 2GB)" + }, + "maxRepositories": { + "type": "number", + "description": "Maximum number of repositories in cache (default 50)" + }, + "cacheDir": { + "type": "string", + "description": "Directory path for bare repository cache (default ./.remote/cache)" + } + }, + "required": ["maxSizeGB", "maxRepositories", "cacheDir"], + "additionalProperties": false } }, "definitions": { diff --git a/proxy.config.json b/proxy.config.json index bdaedff4f..428f9c801 100644 --- a/proxy.config.json +++ b/proxy.config.json @@ -182,5 +182,10 @@ "loginRequired": true } ] + }, + "cache": { + "maxSizeGB": 2, + "maxRepositories": 50, + "cacheDir": "./.remote/cache" } } diff --git a/src/config/generated/config.ts b/src/config/generated/config.ts index 151269286..a4db4851f 100644 --- a/src/config/generated/config.ts +++ b/src/config/generated/config.ts @@ -35,6 +35,10 @@ export interface GitProxyConfig { * List of repositories that are authorised to be pushed to through the proxy. */ authorisedList?: AuthorisedRepo[]; + /** + * Configuration for bare repository cache (hybrid cache system) + */ + cache?: Cache; /** * Enforce rules and patterns on commits including e-mail and message */ @@ -264,6 +268,24 @@ export interface AuthorisedRepo { [property: string]: any; } +/** + * Configuration for bare repository cache (hybrid cache system) + */ +export interface Cache { + /** + * Directory path for bare repository cache (default ./.remote/cache) + */ + cacheDir: string; + /** + * Maximum number of repositories in cache (default 50) + */ + maxRepositories: number; + /** + * Maximum cache size in gigabytes (default 2GB) + */ + maxSizeGB: number; +} + /** * API Rate limiting configuration. */ @@ -530,6 +552,7 @@ const typeMap: any = { typ: u(undefined, a(r('AuthenticationElement'))), }, { json: 'authorisedList', js: 'authorisedList', typ: u(undefined, a(r('AuthorisedRepo'))) }, + { json: 'cache', js: 'cache', typ: u(undefined, r('Cache')) }, { json: 'commitConfig', js: 'commitConfig', typ: u(undefined, m('any')) }, { json: 'configurationSources', js: 'configurationSources', typ: u(undefined, 'any') }, { json: 'contactEmail', js: 'contactEmail', typ: u(undefined, '') }, @@ -617,6 +640,14 @@ const typeMap: any = { ], 'any', ), + Cache: o( + [ + { json: 'cacheDir', js: 'cacheDir', typ: '' }, + { json: 'maxRepositories', js: 'maxRepositories', typ: 3.14 }, + { json: 'maxSizeGB', js: 'maxSizeGB', typ: 3.14 }, + ], + false, + ), RateLimit: o( [ { json: 'limit', js: 'limit', typ: 3.14 }, diff --git a/src/config/index.ts b/src/config/index.ts index 436a8a5b2..fad8cbab2 100644 --- a/src/config/index.ts +++ b/src/config/index.ts @@ -103,6 +103,9 @@ function mergeConfigurations( commitConfig: { ...defaultConfig.commitConfig, ...userSettings.commitConfig }, attestationConfig: { ...defaultConfig.attestationConfig, ...userSettings.attestationConfig }, rateLimit: userSettings.rateLimit || defaultConfig.rateLimit, + cache: userSettings.cache + ? { ...defaultConfig.cache, ...userSettings.cache } + : defaultConfig.cache, tls: tlsConfig, tempPassword: { ...defaultConfig.tempPassword, ...userSettings.tempPassword }, // Preserve legacy SSL fields @@ -196,6 +199,7 @@ export const logConfiguration = () => { console.log(`data sink = ${JSON.stringify(getDatabase())}`); console.log(`authentication = ${JSON.stringify(getAuthMethods())}`); console.log(`rateLimit = ${JSON.stringify(getRateLimit())}`); + console.log(`cache = ${JSON.stringify(getCacheConfig())}`); }; export const getAPIs = () => { @@ -285,6 +289,17 @@ export const getRateLimit = () => { return config.rateLimit; }; +export const getCacheConfig = () => { + const config = loadFullConfiguration(); + return ( + config.cache || { + maxSizeGB: 2, + maxRepositories: 50, + cacheDir: './.remote/cache', + } + ); +}; + // Function to handle configuration updates const handleConfigUpdate = async (newConfig: Configuration) => { console.log('Configuration updated from external source'); diff --git a/src/config/types.ts b/src/config/types.ts index 291de4081..af71421df 100644 --- a/src/config/types.ts +++ b/src/config/types.ts @@ -23,6 +23,7 @@ export interface UserSettings { csrfProtection: boolean; domains: Record; rateLimit: RateLimitConfig; + cache: CacheConfig; } export interface TLSConfig { @@ -59,3 +60,9 @@ export interface TempPasswordConfig { export type RateLimitConfig = Partial< Pick >; + +export interface CacheConfig { + maxSizeGB: number; + maxRepositories: number; + cacheDir: string; +} From d6413ce1b22a50355cf96175a95ce5c94ad3630f Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Fri, 10 Oct 2025 16:19:15 +0200 Subject: [PATCH 03/26] test: update clearBareClone tests for hybrid cache structure --- test/processors/clearBareClone.test.js | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/processors/clearBareClone.test.js b/test/processors/clearBareClone.test.js index c58460913..611a895f2 100644 --- a/test/processors/clearBareClone.test.js +++ b/test/processors/clearBareClone.test.js @@ -11,7 +11,7 @@ const actionId = '123__456'; const timestamp = Date.now(); describe('clear bare and local clones', async () => { - it('pull remote generates a local .remote folder', async () => { + it('pull remote generates a local .remote folder with hybrid cache structure', async () => { const action = new Action(actionId, 'type', 'get', timestamp, 'finos/git-proxy.git'); action.url = 'https://github.com/finos/git-proxy.git'; @@ -26,14 +26,16 @@ describe('clear bare and local clones', async () => { action, ); - expect(fs.existsSync(`./.remote/${actionId}`)).to.be.true; + // Hybrid cache creates: .remote/cache (bare repos) and .remote/work (working copies) + expect(fs.existsSync(`./.remote/work/${actionId}`)).to.be.true; + expect(fs.existsSync(`./.remote/cache/git-proxy.git`)).to.be.true; }).timeout(20000); - it('clear bare clone function purges .remote folder and specific clone folder', async () => { + it('clear bare clone function purges .remote folder in test environment', async () => { const action = new Action(actionId, 'type', 'get', timestamp, 'finos/git-proxy.git'); await clearBareClone(null, action); - expect(fs.existsSync(`./.remote`)).to.throw; - expect(fs.existsSync(`./.remote/${actionId}`)).to.throw; + // In test environment, clearBareClone removes the entire .remote directory + expect(fs.existsSync(`./.remote`)).to.be.false; }); afterEach(() => { From cf76665bc94fc0710ef83dbf60514be125984775 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Fri, 10 Oct 2025 16:26:09 +0200 Subject: [PATCH 04/26] chore: fix metric logging --- src/proxy/processors/push-action/metrics.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/proxy/processors/push-action/metrics.ts b/src/proxy/processors/push-action/metrics.ts index f7e32c077..081d84ebc 100644 --- a/src/proxy/processors/push-action/metrics.ts +++ b/src/proxy/processors/push-action/metrics.ts @@ -17,20 +17,20 @@ export class PerformanceTimer { start(operation: string): void { this.operation = operation; this.startTime = Date.now(); - this.step.log(`🚀 ${operation} started`); + this.step.log(`${operation} started`); } mark(message: string): void { if (this.startTime > 0) { const elapsed = Date.now() - this.startTime; - this.step.log(`⚡ ${message}: ${elapsed}ms`); + this.step.log(`${message}: ${elapsed}ms`); } } end(): void { if (this.startTime > 0) { const totalTime = Date.now() - this.startTime; - this.step.log(`✅ ${this.operation} completed: ${totalTime}ms`); + this.step.log(`${this.operation} completed: ${totalTime}ms`); this.startTime = 0; } } From 734621cb8b28af46d48f276e1572dd6bff555bbd Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 20 Oct 2025 09:41:45 +0200 Subject: [PATCH 05/26] Merge branch 'main' into feature/hybrid-cache --- .gitignore | 6 + config.schema.json | 199 ++++- eslint.config.mjs | 1 - package-lock.json | 725 +++++++++++------- package.json | 47 +- packages/git-proxy-cli/{index.js => index.ts} | 138 ++-- packages/git-proxy-cli/package.json | 10 +- .../test/{testCli.test.js => testCli.test.ts} | 299 ++++---- .../test/{testCliUtils.js => testCliUtils.ts} | 119 +-- packages/git-proxy-cli/tsconfig.json | 21 +- proxy.config.json | 6 +- src/config/generated/config.ts | 268 ++++++- src/config/index.ts | 4 +- src/config/types.ts | 68 -- src/db/file/helper.ts | 1 + src/db/file/index.ts | 3 + src/db/file/pushes.ts | 5 +- src/db/file/repo.ts | 7 +- src/db/file/users.ts | 12 +- src/db/index.ts | 16 +- src/db/mongo/pushes.ts | 7 +- src/db/mongo/users.ts | 6 +- src/db/types.ts | 31 +- src/proxy/actions/Step.ts | 13 +- .../push-action/checkAuthorEmails.ts | 4 +- .../push-action/checkCommitMessages.ts | 4 +- src/proxy/processors/push-action/gitleaks.ts | 2 +- src/proxy/processors/push-action/scanDiff.ts | 10 +- src/service/emailSender.js | 20 - src/service/index.js | 142 ---- src/service/index.ts | 116 +++ ...{activeDirectory.js => activeDirectory.ts} | 54 +- src/service/passport/index.js | 36 - src/service/passport/index.ts | 39 + src/service/passport/jwtAuthHandler.js | 57 -- src/service/passport/jwtAuthHandler.ts | 81 ++ src/service/passport/jwtUtils.js | 99 --- src/service/passport/jwtUtils.ts | 103 +++ src/service/passport/ldaphelper.js | 51 -- src/service/passport/ldaphelper.ts | 64 ++ src/service/passport/local.js | 59 -- src/service/passport/local.ts | 70 ++ src/service/passport/oidc.js | 125 --- src/service/passport/oidc.ts | 130 ++++ src/service/passport/types.ts | 55 ++ src/service/routes/{auth.js => auth.ts} | 98 ++- src/service/routes/config.js | 22 - src/service/routes/config.ts | 22 + src/service/routes/healthcheck.js | 10 - src/service/routes/healthcheck.ts | 11 + src/service/routes/home.js | 14 - src/service/routes/home.ts | 15 + src/service/routes/index.js | 23 - src/service/routes/index.ts | 23 + .../routes/{publicApi.js => publicApi.ts} | 4 +- src/service/routes/push.js | 181 ----- src/service/routes/push.ts | 206 +++++ src/service/routes/{repo.js => repo.ts} | 67 +- src/service/routes/users.js | 19 - src/service/routes/users.ts | 25 + src/service/routes/utils.ts | 10 + src/service/urls.js | 20 - src/service/urls.ts | 20 + src/types/models.ts | 5 + src/types/passport-activedirectory.d.ts | 7 + test/1.test.js | 2 +- test/generated-config.test.js | 9 +- test/processors/gitLeaks.test.js | 2 +- test/processors/scanDiff.test.js | 5 +- test/services/routes/auth.test.js | 6 +- test/services/routes/users.test.js | 2 +- test/testJwtAuthHandler.test.js | 8 +- test/testLogin.test.js | 2 +- test/testOidc.test.js | 176 +++++ test/testProxyRoute.test.js | 2 +- test/testPush.test.js | 47 +- test/testRepoApi.test.js | 2 +- tsconfig.publish.json | 3 +- website/docs/configuration/reference.mdx | 553 +++++++++++-- 79 files changed, 3210 insertions(+), 1744 deletions(-) rename packages/git-proxy-cli/{index.js => index.ts} (80%) mode change 100755 => 100644 rename packages/git-proxy-cli/test/{testCli.test.js => testCli.test.ts} (69%) rename packages/git-proxy-cli/test/{testCliUtils.js => testCliUtils.ts} (74%) delete mode 100644 src/config/types.ts create mode 100644 src/db/file/helper.ts delete mode 100644 src/service/emailSender.js delete mode 100644 src/service/index.js create mode 100644 src/service/index.ts rename src/service/passport/{activeDirectory.js => activeDirectory.ts} (63%) delete mode 100644 src/service/passport/index.js create mode 100644 src/service/passport/index.ts delete mode 100644 src/service/passport/jwtAuthHandler.js create mode 100644 src/service/passport/jwtAuthHandler.ts delete mode 100644 src/service/passport/jwtUtils.js create mode 100644 src/service/passport/jwtUtils.ts delete mode 100644 src/service/passport/ldaphelper.js create mode 100644 src/service/passport/ldaphelper.ts delete mode 100644 src/service/passport/local.js create mode 100644 src/service/passport/local.ts delete mode 100644 src/service/passport/oidc.js create mode 100644 src/service/passport/oidc.ts create mode 100644 src/service/passport/types.ts rename src/service/routes/{auth.js => auth.ts} (69%) delete mode 100644 src/service/routes/config.js create mode 100644 src/service/routes/config.ts delete mode 100644 src/service/routes/healthcheck.js create mode 100644 src/service/routes/healthcheck.ts delete mode 100644 src/service/routes/home.js create mode 100644 src/service/routes/home.ts delete mode 100644 src/service/routes/index.js create mode 100644 src/service/routes/index.ts rename src/service/routes/{publicApi.js => publicApi.ts} (72%) delete mode 100644 src/service/routes/push.js create mode 100644 src/service/routes/push.ts rename src/service/routes/{repo.js => repo.ts} (75%) delete mode 100644 src/service/routes/users.js create mode 100644 src/service/routes/users.ts create mode 100644 src/service/routes/utils.ts delete mode 100644 src/service/urls.js create mode 100644 src/service/urls.ts create mode 100644 src/types/passport-activedirectory.d.ts create mode 100644 test/testOidc.test.js diff --git a/.gitignore b/.gitignore index ea4f36546..c6076f1af 100644 --- a/.gitignore +++ b/.gitignore @@ -269,3 +269,9 @@ website/.docusaurus # Jetbrains IDE .idea + +# VS COde IDE +.vscode/settings.json + +# Generated from testing +/test/fixtures/test-package/package-lock.json diff --git a/config.schema.json b/config.schema.json index 991675b4b..f299eb284 100644 --- a/config.schema.json +++ b/config.schema.json @@ -30,19 +30,6 @@ }, "additionalProperties": false }, - "github": { - "type": "object", - "description": "Deprecated: Defunct property that was used to provide the API URL for GitHub. No longer referenced in the codebase.", - "properties": { - "baseUrl": { - "type": "string", - "format": "uri", - "examples": ["https://api.github.com"], - "deprecated": true - } - }, - "additionalProperties": false - }, "gitleaks": { "type": "object", "description": "Configuration for the gitleaks (https://github.com/gitleaks/gitleaks) plugin", @@ -57,16 +44,188 @@ "additionalProperties": false }, "commitConfig": { - "description": "Enforce rules and patterns on commits including e-mail and message", - "type": "object" + "title": "CommitConfig", + "description": "Block commits based on rules defined over author/committer e-mail addresses, commit message content and diff content", + "type": "object", + "additionalProperties": false, + "properties": { + "author": { + "title": "Author", + "description": "Rules applied to commit authors", + "type": "object", + "additionalProperties": false, + "properties": { + "email": { + "title": "Email", + "description": "Rules applied to author email addresses", + "type": "object", + "additionalProperties": false, + "properties": { + "local": { + "title": "Local", + "description": "Rules applied to the local portion of the email address (i.e. section before the @ symbol)", + "type": "object", + "additionalProperties": false, + "properties": { + "block": { + "title": "Block", + "description": "Block commits with author email addresses where the first part matches this regular expression", + "type": "string" + } + }, + "required": [] + }, + "domain": { + "title": "Domain", + "description": "Rules applied to the domain portion of the email address (i.e. section after the @ symbol)", + "type": "object", + "additionalProperties": false, + "properties": { + "allow": { + "title": "Allow", + "description": "Allow only commits where the domain part of the email address matches this regular expression", + "type": "string" + } + }, + "required": [] + } + }, + "required": [] + } + }, + "required": [] + }, + "message": { + "title": "Message", + "description": "Rules applied to commit messages", + "type": "object", + "additionalProperties": false, + "properties": { + "block": { + "title": "MessageBlock", + "description": "Block commits where the commit message matches any of the given patterns", + "type": "object", + "additionalProperties": false, + "properties": { + "literals": { + "title": "MessageBlockLiteral", + "description": "Block commits where the commit message contains any of the given string literals", + "type": "array", + "items": { "type": "string" } + }, + "patterns": { + "title": "MessageBlockLiteral", + "description": "Block commits where the commit message matches any of the given regular expressions", + "type": "array", + "items": { "type": "string" } + } + }, + "required": [] + } + }, + "required": [] + }, + "diff": { + "title": "Diff", + "description": "Rules applied to commit diff content", + "type": "object", + "additionalProperties": false, + "properties": { + "block": { + "title": "DiffBlock", + "description": "Block commits where the commit diff matches any of the given patterns", + "type": "object", + "additionalProperties": false, + "properties": { + "literals": { + "title": "DiffBlockLiteral", + "description": "Block commits where the commit diff content contains any of the given string literals", + "type": "array", + "items": { + "type": "string" + } + }, + "patterns": { + "title": "MessageBlockPatterns", + "description": "Block commits where the commit diff content matches any of the given regular expressions", + "type": "array", + "items": {} + }, + "providers": { + "title": "MessageBlockProviders", + "description": "Block commits where the commit diff content matches any of the given regular expressions, except where the repository path (project/organisation) matches one of the listed privateOrganisations. The keys in this array are listed as the block type in logs.", + "type": "object", + "additionalProperties": { "type": "string" } + } + }, + "required": [] + } + }, + "required": [] + } + }, + "required": [] }, "attestationConfig": { - "description": "Customisable questions to add to attestation form", - "type": "object" + "title": "AttestationConfig", + "description": "Configuration for the attestation form displayed to reviewers. Reviewers will need to check the box next to each question in order to complete the review attestation.", + "type": "object", + "additionalProperties": false, + "properties": { + "questions": { + "title": "AttestationQuestions", + "description": "Customisable attestation questions to add to attestation form.", + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "properties": { + "label": { + "title": "QuestionLabel", + "description": "The text of the question that will be displayed to the reviewer", + "type": "string" + }, + "tooltip": { + "title": "QuestionTooltip", + "description": "A tooltip and optional set of links that will be displayed on mouseover of the question and used to provide additional guidance to the reviewer.", + "type": "object", + "additionalProperties": false, + "properties": { + "text": { + "type": "string" + }, + "links": { + "type": "array", + "items": { "type": "string", "format": "url" } + } + }, + "required": ["text"] + } + }, + "required": ["label", "tooltip"], + "title": "Question" + } + } + }, + "required": [] }, "domains": { - "description": "Provide domains to use alternative to the defaults", - "type": "object" + "description": "Provide custom URLs for the git proxy interfaces in case it cannot determine its own URL", + "type": "object", + "properties": { + "proxy": { + "title": "ProxyUrl", + "description": "Override for the default proxy URL, should include the protocol", + "type": "string", + "format": "url" + }, + "service": { + "title": "Service UI URL", + "description": "Override for the service UI URL, should include the protocol", + "type": "string", + "format": "url" + } + } }, "rateLimit": { "description": "API Rate limiting configuration.", @@ -93,7 +252,7 @@ "additionalProperties": false }, "privateOrganizations": { - "description": "Pattern searches for listed private organizations are disabled", + "description": "Provider searches for listed private organizations are disabled, see commitConfig.diff.block.providers", "type": "array" }, "urlShortener": { diff --git a/eslint.config.mjs b/eslint.config.mjs index d57683053..284e94b91 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -7,7 +7,6 @@ import js from '@eslint/js'; import ts from 'typescript-eslint'; import react from 'eslint-plugin-react'; import json from '@eslint/json'; -// @ts-expect-error import cypress from 'eslint-plugin-cypress'; import prettierConfig from 'eslint-config-prettier/flat'; diff --git a/package-lock.json b/package-lock.json index 47fa7bfe4..4e2549c3c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@finos/git-proxy", - "version": "2.0.0-rc.2", + "version": "2.0.0-rc.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@finos/git-proxy", - "version": "2.0.0-rc.2", + "version": "2.0.0-rc.3", "license": "Apache-2.0", "workspaces": [ "./packages/git-proxy-cli" @@ -14,7 +14,7 @@ "dependencies": { "@material-ui/core": "^4.12.4", "@material-ui/icons": "4.11.3", - "@primer/octicons-react": "^19.18.0", + "@primer/octicons-react": "^19.19.0", "@seald-io/nedb": "^4.1.2", "axios": "^1.12.2", "bcryptjs": "^3.0.2", @@ -24,12 +24,13 @@ "cors": "^2.8.5", "diff2html": "^3.4.52", "env-paths": "^3.0.0", + "escape-string-regexp": "^5.0.0", "express": "^4.21.2", "express-http-proxy": "^2.1.2", "express-rate-limit": "^8.1.0", "express-session": "^1.18.2", "history": "5.3.0", - "isomorphic-git": "^1.33.1", + "isomorphic-git": "^1.34.0", "jsonwebtoken": "^9.0.2", "jwk-to-pem": "^2.0.7", "load-plugin": "^6.0.3", @@ -37,8 +38,7 @@ "lusca": "^1.7.0", "moment": "^2.30.1", "mongodb": "^5.9.2", - "nodemailer": "^6.10.1", - "openid-client": "^6.8.0", + "openid-client": "^6.8.1", "parse-diff": "^0.11.1", "passport": "^0.7.0", "passport-activedirectory": "^1.4.0", @@ -64,14 +64,22 @@ "@commitlint/cli": "^19.8.1", "@commitlint/config-conventional": "^19.8.1", "@eslint/compat": "^1.4.0", - "@eslint/js": "^9.36.0", + "@eslint/js": "^9.37.0", "@eslint/json": "^0.13.2", + "@types/activedirectory2": "^1.2.6", + "@types/cors": "^2.8.19", "@types/domutils": "^1.7.8", "@types/express": "^5.0.3", "@types/express-http-proxy": "^1.6.7", + "@types/express-session": "^1.18.2", + "@types/jsonwebtoken": "^9.0.10", + "@types/jwk-to-pem": "^2.0.3", "@types/lodash": "^4.17.20", + "@types/lusca": "^1.7.5", "@types/mocha": "^10.0.10", - "@types/node": "^22.18.6", + "@types/node": "^22.18.10", + "@types/passport": "^1.0.17", + "@types/passport-local": "^1.0.38", "@types/react-dom": "^17.0.26", "@types/react-html-parser": "^2.0.7", "@types/sinon": "^17.0.4", @@ -80,15 +88,15 @@ "@vitejs/plugin-react": "^4.7.0", "chai": "^4.5.0", "chai-http": "^4.4.0", - "cypress": "^15.3.0", - "eslint": "^9.36.0", + "cypress": "^15.4.0", + "eslint": "^9.37.0", "eslint-config-prettier": "^10.1.8", - "eslint-plugin-cypress": "^5.1.1", + "eslint-plugin-cypress": "^5.2.0", "eslint-plugin-react": "^7.37.5", "fast-check": "^4.3.0", "globals": "^16.4.0", "husky": "^9.1.7", - "lint-staged": "^16.2.0", + "lint-staged": "^16.2.4", "mocha": "^10.8.2", "nyc": "^17.1.0", "prettier": "^3.6.2", @@ -98,9 +106,9 @@ "sinon-chai": "^3.7.0", "ts-mocha": "^11.1.0", "ts-node": "^10.9.2", - "tsx": "^4.20.5", - "typescript": "^5.9.2", - "typescript-eslint": "^8.44.1", + "tsx": "^4.20.6", + "typescript": "^5.9.3", + "typescript-eslint": "^8.46.1", "vite": "^4.5.14", "vite-tsconfig-paths": "^5.1.4" }, @@ -108,10 +116,10 @@ "node": ">=20.19.2" }, "optionalDependencies": { - "@esbuild/darwin-arm64": "^0.25.10", - "@esbuild/darwin-x64": "^0.25.10", - "@esbuild/linux-x64": "0.25.10", - "@esbuild/win32-x64": "0.25.10" + "@esbuild/darwin-arm64": "^0.25.11", + "@esbuild/darwin-x64": "^0.25.11", + "@esbuild/linux-x64": "0.25.11", + "@esbuild/win32-x64": "0.25.11" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -927,9 +935,9 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", - "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.10.tgz", + "integrity": "sha512-0NFWnA+7l41irNuaSVlLfgNT12caWJVLzp5eAVhZ0z1qpxbockccEt3s+149rE64VUI3Ml2zt8Nv5JVc4QXTsw==", "cpu": [ "ppc64" ], @@ -995,9 +1003,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.10", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz", - "integrity": "sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==", + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.11.tgz", + "integrity": "sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==", "cpu": [ "arm64" ], @@ -1011,9 +1019,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.10", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz", - "integrity": "sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==", + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.11.tgz", + "integrity": "sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==", "cpu": [ "x64" ], @@ -1197,9 +1205,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.10", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz", - "integrity": "sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==", + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.11.tgz", + "integrity": "sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==", "cpu": [ "x64" ], @@ -1213,9 +1221,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", - "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.10.tgz", + "integrity": "sha512-AKQM3gfYfSW8XRk8DdMCzaLUFB15dTrZfnX8WXQoOUpUBQ+NaAFCP1kPS/ykbbGYz7rxn0WS48/81l9hFl3u4A==", "cpu": [ "arm64" ], @@ -1247,9 +1255,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", - "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.10.tgz", + "integrity": "sha512-5Se0VM9Wtq797YFn+dLimf2Zx6McttsH2olUBsDml+lm0GOCRVebRWUvDtkY4BWYv/3NgzS8b/UM3jQNh5hYyw==", "cpu": [ "arm64" ], @@ -1281,9 +1289,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", - "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.10.tgz", + "integrity": "sha512-AVTSBhTX8Y/Fz6OmIVBip9tJzZEUcY8WLh7I59+upa5/GPhh2/aM6bvOMQySspnCCHvFi79kMtdJS1w0DXAeag==", "cpu": [ "arm64" ], @@ -1349,9 +1357,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.10", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz", - "integrity": "sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==", + "version": "0.25.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.11.tgz", + "integrity": "sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==", "cpu": [ "x64" ], @@ -1448,9 +1456,27 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.3.1", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.0.tgz", + "integrity": "sha512-WUFvV4WoIwW8Bv0KeKCIIEgdSiFOsulyN0xrMu+7z43q/hkOLXjvb5u7UC9jDxvRzcrbEmuZBX5yJZz1741jog==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers/node_modules/@eslint/core": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", + "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", "dev": true, "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } @@ -1520,9 +1546,9 @@ "license": "MIT" }, "node_modules/@eslint/js": { - "version": "9.36.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.36.0.tgz", - "integrity": "sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==", + "version": "9.37.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.37.0.tgz", + "integrity": "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==", "dev": true, "license": "MIT", "engines": { @@ -2222,9 +2248,9 @@ } }, "node_modules/@primer/octicons-react": { - "version": "19.18.0", - "resolved": "https://registry.npmjs.org/@primer/octicons-react/-/octicons-react-19.18.0.tgz", - "integrity": "sha512-nLFlLmWfz3McbTiOUKVO+iwB15ALYQC9rHeP8K3qM1pyJ8svGaPjGR72BQSEM8ThyQUUodq/Re1n94tO5NNhzQ==", + "version": "19.19.0", + "resolved": "https://registry.npmjs.org/@primer/octicons-react/-/octicons-react-19.19.0.tgz", + "integrity": "sha512-dTO3khy50yS7XC0FB5L7Wwg+aEjI7mrdiZ+FeZGKiNSpkpcRDn7HTidLdtKgo0cJp6QKpqtUHGHRRpa+wrc6Bg==", "license": "MIT", "engines": { "node": ">=8" @@ -2311,6 +2337,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/activedirectory2": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@types/activedirectory2/-/activedirectory2-1.2.6.tgz", + "integrity": "sha512-mJsoOWf9LRpYBkExOWstWe6g6TQnZyZjVULNrX8otcCJgVliesk9T/+W+1ahrx2zaevxsp28sSKOwo/b7TOnSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ldapjs": "*" + } + }, "node_modules/@types/babel__core": { "version": "7.20.5", "dev": true, @@ -2383,6 +2419,16 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/cors": { + "version": "2.8.19", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", + "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/domhandler": { "version": "2.4.5", "dev": true, @@ -2430,6 +2476,16 @@ "@types/send": "*" } }, + "node_modules/@types/express-session": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/@types/express-session/-/express-session-1.18.2.tgz", + "integrity": "sha512-k+I0BxwVXsnEU2hV77cCobC08kIsn4y44C3gC0b46uxZVMaXA04lSPgRLR/bSL2w0t0ShJiG8o4jPzRG/nscFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, "node_modules/@types/htmlparser2": { "version": "3.10.7", "dev": true, @@ -2451,11 +2507,49 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.10", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.10.tgz", + "integrity": "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*", + "@types/node": "*" + } + }, + "node_modules/@types/jwk-to-pem": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/jwk-to-pem/-/jwk-to-pem-2.0.3.tgz", + "integrity": "sha512-I/WFyFgk5GrNbkpmt14auGO3yFK1Wt4jXzkLuI+fDBNtO5ZI2rbymyGd6bKzfSBEuyRdM64ZUwxU1+eDcPSOEQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/ldapjs": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/ldapjs/-/ldapjs-3.0.6.tgz", + "integrity": "sha512-E2Tn1ltJDYBsidOT9QG4engaQeQzRQ9aYNxVmjCkD33F7cIeLPgrRDXAYs0O35mK2YDU20c/+ZkNjeAPRGLM0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/lodash": { "version": "4.17.20", "dev": true, "license": "MIT" }, + "node_modules/@types/lusca": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/@types/lusca/-/lusca-1.7.5.tgz", + "integrity": "sha512-l49gAf8pu2iMzbKejLcz6Pqj+51H2na6BgORv1ElnE8ByPFcBdh/eZ0WNR1Va/6ZuNSZa01Hoy1DTZ3IZ+y+kA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, "node_modules/@types/mime": { "version": "1.3.5", "dev": true, @@ -2466,15 +2560,55 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { - "version": "22.18.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.6.tgz", - "integrity": "sha512-r8uszLPpeIWbNKtvWRt/DbVi5zbqZyj1PTmhRMqBMvDnaz1QpmSKujUtJLrqGZeoM8v72MfYggDceY4K1itzWQ==", + "version": "22.18.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.10.tgz", + "integrity": "sha512-anNG/V/Efn/YZY4pRzbACnKxNKoBng2VTFydVu8RRs5hQjikP8CQfaeAV59VFSCzKNp90mXiVXW2QzV56rwMrg==", "license": "MIT", "dependencies": { "undici-types": "~6.21.0" } }, + "node_modules/@types/passport": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/@types/passport/-/passport-1.0.17.tgz", + "integrity": "sha512-aciLyx+wDwT2t2/kJGJR2AEeBz0nJU4WuRX04Wu9Dqc5lSUtwu0WERPHYsLhF9PtseiAMPBGNUOtFjxZ56prsg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/passport-local": { + "version": "1.0.38", + "resolved": "https://registry.npmjs.org/@types/passport-local/-/passport-local-1.0.38.tgz", + "integrity": "sha512-nsrW4A963lYE7lNTv9cr5WmiUD1ibYJvWrpE13oxApFsRt77b0RdtZvKbCdNIY4v/QZ6TRQWaDDEwV1kCTmcXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*", + "@types/passport": "*", + "@types/passport-strategy": "*" + } + }, + "node_modules/@types/passport-strategy": { + "version": "0.2.38", + "resolved": "https://registry.npmjs.org/@types/passport-strategy/-/passport-strategy-0.2.38.tgz", + "integrity": "sha512-GC6eMqqojOooq993Tmnmp7AUTbbQSgilyvpCYQjT+H6JfG/g6RGc7nXEniZlp0zyKJ0WUdOiZWLBZft9Yug1uA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*", + "@types/passport": "*" + } + }, "node_modules/@types/prop-types": { "version": "15.7.11", "license": "MIT" @@ -2627,17 +2761,17 @@ } }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.44.1.tgz", - "integrity": "sha512-molgphGqOBT7t4YKCSkbasmu1tb1MgrZ2szGzHbclF7PNmOkSTQVHy+2jXOSnxvR3+Xe1yySHFZoqMpz3TfQsw==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.1.tgz", + "integrity": "sha512-rUsLh8PXmBjdiPY+Emjz9NX2yHvhS11v0SR6xNJkm5GM1MO9ea/1GoDKlHHZGrOJclL/cZ2i/vRUYVtjRhrHVQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.44.1", - "@typescript-eslint/type-utils": "8.44.1", - "@typescript-eslint/utils": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1", + "@typescript-eslint/scope-manager": "8.46.1", + "@typescript-eslint/type-utils": "8.46.1", + "@typescript-eslint/utils": "8.46.1", + "@typescript-eslint/visitor-keys": "8.46.1", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", @@ -2651,7 +2785,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.44.1", + "@typescript-eslint/parser": "^8.46.1", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -2665,16 +2799,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.44.1.tgz", - "integrity": "sha512-EHrrEsyhOhxYt8MTg4zTF+DJMuNBzWwgvvOYNj/zm1vnaD/IC5zCXFehZv94Piqa2cRFfXrTFxIvO95L7Qc/cw==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.1.tgz", + "integrity": "sha512-6JSSaBZmsKvEkbRUkf7Zj7dru/8ZCrJxAqArcLaVMee5907JdtEbKGsZ7zNiIm/UAkpGUkaSMZEXShnN2D1HZA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.44.1", - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/typescript-estree": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1", + "@typescript-eslint/scope-manager": "8.46.1", + "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/typescript-estree": "8.46.1", + "@typescript-eslint/visitor-keys": "8.46.1", "debug": "^4.3.4" }, "engines": { @@ -2690,14 +2824,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.44.1.tgz", - "integrity": "sha512-ycSa60eGg8GWAkVsKV4E6Nz33h+HjTXbsDT4FILyL8Obk5/mx4tbvCNsLf9zret3ipSumAOG89UcCs/KRaKYrA==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.1.tgz", + "integrity": "sha512-FOIaFVMHzRskXr5J4Jp8lFVV0gz5ngv3RHmn+E4HYxSJ3DgDzU7fVI1/M7Ijh1zf6S7HIoaIOtln1H5y8V+9Zg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.44.1", - "@typescript-eslint/types": "^8.44.1", + "@typescript-eslint/tsconfig-utils": "^8.46.1", + "@typescript-eslint/types": "^8.46.1", "debug": "^4.3.4" }, "engines": { @@ -2712,14 +2846,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.1.tgz", - "integrity": "sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.1.tgz", + "integrity": "sha512-weL9Gg3/5F0pVQKiF8eOXFZp8emqWzZsOJuWRUNtHT+UNV2xSJegmpCNQHy37aEQIbToTq7RHKhWvOsmbM680A==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1" + "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/visitor-keys": "8.46.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2730,9 +2864,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.44.1.tgz", - "integrity": "sha512-B5OyACouEjuIvof3o86lRMvyDsFwZm+4fBOqFHccIctYgBjqR3qT39FBYGN87khcgf0ExpdCBeGKpKRhSFTjKQ==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.1.tgz", + "integrity": "sha512-X88+J/CwFvlJB+mK09VFqx5FE4H5cXD+H/Bdza2aEWkSb8hnWIQorNcscRl4IEo1Cz9VI/+/r/jnGWkbWPx54g==", "dev": true, "license": "MIT", "engines": { @@ -2747,15 +2881,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.44.1.tgz", - "integrity": "sha512-KdEerZqHWXsRNKjF9NYswNISnFzXfXNDfPxoTh7tqohU/PRIbwTmsjGK6V9/RTYWau7NZvfo52lgVk+sJh0K3g==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.1.tgz", + "integrity": "sha512-+BlmiHIiqufBxkVnOtFwjah/vrkF4MtKKvpXrKSPLCkCtAp8H01/VV43sfqA98Od7nJpDcFnkwgyfQbOG0AMvw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/typescript-estree": "8.44.1", - "@typescript-eslint/utils": "8.44.1", + "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/typescript-estree": "8.46.1", + "@typescript-eslint/utils": "8.46.1", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, @@ -2772,9 +2906,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz", - "integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.1.tgz", + "integrity": "sha512-C+soprGBHwWBdkDpbaRC4paGBrkIXxVlNohadL5o0kfhsXqOC6GYH2S/Obmig+I0HTDl8wMaRySwrfrXVP8/pQ==", "dev": true, "license": "MIT", "engines": { @@ -2786,16 +2920,16 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.1.tgz", - "integrity": "sha512-qnQJ+mVa7szevdEyvfItbO5Vo+GfZ4/GZWWDRRLjrxYPkhM+6zYB2vRYwCsoJLzqFCdZT4mEqyJoyzkunsZ96A==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.1.tgz", + "integrity": "sha512-uIifjT4s8cQKFQ8ZBXXyoUODtRoAd7F7+G8MKmtzj17+1UbdzFl52AzRyZRyKqPHhgzvXunnSckVu36flGy8cg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.44.1", - "@typescript-eslint/tsconfig-utils": "8.44.1", - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/visitor-keys": "8.44.1", + "@typescript-eslint/project-service": "8.46.1", + "@typescript-eslint/tsconfig-utils": "8.46.1", + "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/visitor-keys": "8.46.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -2841,9 +2975,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", "dev": true, "license": "ISC", "bin": { @@ -2854,16 +2988,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.44.1.tgz", - "integrity": "sha512-DpX5Fp6edTlocMCwA+mHY8Mra+pPjRZ0TfHkXI8QFelIKcbADQz1LUPNtzOFUriBB2UYqw4Pi9+xV4w9ZczHFg==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.1.tgz", + "integrity": "sha512-vkYUy6LdZS7q1v/Gxb2Zs7zziuXN0wxqsetJdeZdRe/f5dwJFglmuvZBfTUivCtjH725C1jWCDfpadadD95EDQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", - "@typescript-eslint/scope-manager": "8.44.1", - "@typescript-eslint/types": "8.44.1", - "@typescript-eslint/typescript-estree": "8.44.1" + "@typescript-eslint/scope-manager": "8.46.1", + "@typescript-eslint/types": "8.46.1", + "@typescript-eslint/typescript-estree": "8.46.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2878,13 +3012,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.1.tgz", - "integrity": "sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.1.tgz", + "integrity": "sha512-ptkmIf2iDkNUjdeu2bQqhFPV1m6qTnFFjg7PPDjxKWaMaP0Z6I9l30Jr3g5QqbZGdw8YdYvLp+XnqnWWZOg/NA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.44.1", + "@typescript-eslint/types": "8.46.1", "eslint-visitor-keys": "^4.2.1" }, "engines": { @@ -4320,9 +4454,9 @@ "license": "MIT" }, "node_modules/cypress": { - "version": "15.3.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-15.3.0.tgz", - "integrity": "sha512-g9rDhoK9y8wW4Vx3Ppr8dtfvThXxPL3mJsV5e98fG+6EerrhXKmeRT2sL86cvNRtEZouXJfsuVL1lqiMuGNGcg==", + "version": "15.4.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-15.4.0.tgz", + "integrity": "sha512-+GC/Y/LXAcaMCzfuM7vRx5okRmonceZbr0ORUAoOrZt/5n2eGK8yh04bok1bWSjZ32wRHrZESqkswQ6biArN5w==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -4386,9 +4520,9 @@ "license": "MIT" }, "node_modules/cypress/node_modules/semver": { - "version": "7.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", - "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", "dev": true, "license": "ISC", "bin": { @@ -5112,28 +5246,30 @@ "license": "MIT" }, "node_modules/escape-string-regexp": { - "version": "1.0.5", - "dev": true, + "version": "5.0.0", "license": "MIT", "engines": { - "node": ">=0.8.0" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/eslint": { - "version": "9.36.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.36.0.tgz", - "integrity": "sha512-hB4FIzXovouYzwzECDcUkJ4OcfOEkXTv2zRY6B9bkwjx/cprAq0uvm1nl7zvQ0/TsUk0zQiN4uPfJpB9m+rPMQ==", + "version": "9.37.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.37.0.tgz", + "integrity": "sha512-XyLmROnACWqSxiGYArdef1fItQd47weqB7iwtfr9JHwRrqIXZdcFMvvEcL9xHCmL0SNsOvF0c42lWyM1U5dgig==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.0", - "@eslint/config-helpers": "^0.3.1", - "@eslint/core": "^0.15.2", + "@eslint/config-helpers": "^0.4.0", + "@eslint/core": "^0.16.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.36.0", - "@eslint/plugin-kit": "^0.3.5", + "@eslint/js": "9.37.0", + "@eslint/plugin-kit": "^0.4.0", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -5195,7 +5331,9 @@ } }, "node_modules/eslint-plugin-cypress": { - "version": "5.1.1", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-cypress/-/eslint-plugin-cypress-5.2.0.tgz", + "integrity": "sha512-vuCUBQloUSILxtJrUWV39vNIQPlbg0L7cTunEAzvaUzv9LFZZym+KFLH18n9j2cZuFPdlxOqTubCvg5se0DyGw==", "dev": true, "license": "MIT", "dependencies": { @@ -5266,6 +5404,33 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/eslint/node_modules/@eslint/core": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", + "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/eslint/node_modules/@eslint/plugin-kit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.0.tgz", + "integrity": "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.16.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/eslint/node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -5756,6 +5921,14 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "dev": true, @@ -6029,6 +6202,21 @@ "dev": true, "license": "ISC" }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", "license": "MIT", @@ -6929,6 +7117,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-git-ref-name-valid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-git-ref-name-valid/-/is-git-ref-name-valid-1.0.0.tgz", + "integrity": "sha512-2hLTg+7IqMSP9nNp/EVCxzvAOJGsAn0f/cKtF8JaBeivjH5UgE/XZo3iJ0AvibdE7KSF1f/7JbjBTB8Wqgbn/w==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/is-glob": { "version": "4.0.3", "dev": true, @@ -7227,7 +7424,9 @@ "license": "ISC" }, "node_modules/isomorphic-git": { - "version": "1.33.1", + "version": "1.34.0", + "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.34.0.tgz", + "integrity": "sha512-J82yRa/4wm9VuOWSlI37I9Sa+n1gWaSWuKQk8zhpo6RqTW+ZTcK5c/KubLMcuVU3Btc+maRCa3YlRKqqY9q7qQ==", "license": "MIT", "dependencies": { "async-lock": "^1.4.1", @@ -7235,6 +7434,7 @@ "crc-32": "^1.2.0", "diff3": "0.0.3", "ignore": "^5.1.4", + "is-git-ref-name-valid": "^1.0.0", "minimisted": "^2.0.0", "pako": "^1.0.10", "path-browserify": "^1.0.1", @@ -7252,6 +7452,8 @@ }, "node_modules/isomorphic-git/node_modules/pify": { "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "license": "MIT", "engines": { "node": ">=6" @@ -7844,19 +8046,19 @@ "license": "MIT" }, "node_modules/lint-staged": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.0.tgz", - "integrity": "sha512-spdYSOCQ2MdZ9CM1/bu/kDmaYGsrpNOeu1InFFV8uhv14x6YIubGxbCpSmGILFoxkiheNQPDXSg5Sbb5ZuVnug==", + "version": "16.2.4", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.4.tgz", + "integrity": "sha512-Pkyr/wd90oAyXk98i/2KwfkIhoYQUMtss769FIT9hFM5ogYZwrk+GRE46yKXSg2ZGhcJ1p38Gf5gmI5Ohjg2yg==", "dev": true, "license": "MIT", "dependencies": { - "commander": "14.0.1", - "listr2": "9.0.4", - "micromatch": "4.0.8", - "nano-spawn": "1.0.3", - "pidtree": "0.6.0", - "string-argv": "0.3.2", - "yaml": "2.8.1" + "commander": "^14.0.1", + "listr2": "^9.0.4", + "micromatch": "^4.0.8", + "nano-spawn": "^2.0.0", + "pidtree": "^0.6.0", + "string-argv": "^0.3.2", + "yaml": "^2.8.1" }, "bin": { "lint-staged": "bin/lint-staged.js" @@ -7954,9 +8156,9 @@ } }, "node_modules/lint-staged/node_modules/emoji-regex": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.5.0.tgz", - "integrity": "sha512-lb49vf1Xzfx080OKA0o6l8DQQpV+6Vg95zyCJX9VB/BqKYlhG7N4wgROUUHRA+ZPUefLnteQOad7z1kT2bV7bg==", + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", "dev": true, "license": "MIT" }, @@ -8874,9 +9076,9 @@ "license": "MIT" }, "node_modules/nano-spawn": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-1.0.3.tgz", - "integrity": "sha512-jtpsQDetTnvS2Ts1fiRdci5rx0VYws5jGyC+4IYOTnIQ/wwdf6JdomlHBwqC3bJYOvaKu0C2GSZ1A60anrYpaA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz", + "integrity": "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==", "dev": true, "license": "MIT", "engines": { @@ -8977,13 +9179,6 @@ "dev": true, "license": "MIT" }, - "node_modules/nodemailer": { - "version": "6.10.1", - "license": "MIT-0", - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/nopt": { "version": "1.0.10", "license": "MIT", @@ -9212,9 +9407,9 @@ } }, "node_modules/oauth4webapi": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/oauth4webapi/-/oauth4webapi-3.8.1.tgz", - "integrity": "sha512-olkZDELNycOWQf9LrsELFq8n05LwJgV8UkrS0cburk6FOwf8GvLam+YB+Uj5Qvryee+vwWOfQVeI5Vm0MVg7SA==", + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/oauth4webapi/-/oauth4webapi-3.8.2.tgz", + "integrity": "sha512-FzZZ+bht5X0FKe7Mwz3DAVAmlH1BV5blSak/lHMBKz0/EBMhX6B10GlQYI51+oRp8ObJaX0g6pXrAxZh5s8rjw==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/panva" @@ -9351,13 +9546,13 @@ } }, "node_modules/openid-client": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/openid-client/-/openid-client-6.8.0.tgz", - "integrity": "sha512-oG1d1nAVhIIE+JSjLS+7E9wY1QOJpZltkzlJdbZ7kEn7Hp3hqur2TEeQ8gLOHoHkhbRAGZJKoOnEQcLOQJuIyg==", + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/openid-client/-/openid-client-6.8.1.tgz", + "integrity": "sha512-VoYT6enBo6Vj2j3Q5Ec0AezS+9YGzQo1f5Xc42lreMGlfP4ljiXPKVDvCADh+XHCV/bqPu/wWSiCVXbJKvrODw==", "license": "MIT", "dependencies": { "jose": "^6.1.0", - "oauth4webapi": "^3.8.1" + "oauth4webapi": "^3.8.2" }, "funding": { "url": "https://github.com/sponsors/panva" @@ -11736,7 +11931,9 @@ } }, "node_modules/tsx": { - "version": "4.20.5", + "version": "4.20.6", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.6.tgz", + "integrity": "sha512-ytQKuwgmrrkDTFP4LjR0ToE2nqgy886GpvRSpU0JAnrdBYppuY5rLkRUYPU1yCryb24SsKBTL/hlDQAEFVwtZg==", "dev": true, "license": "MIT", "dependencies": { @@ -11754,9 +11951,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", - "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.10.tgz", + "integrity": "sha512-dQAxF1dW1C3zpeCDc5KqIYuZ1tgAdRXNoZP7vkBIRtKZPYe2xVr/d3SkirklCHudW1B45tGiUlz2pUWDfbDD4w==", "cpu": [ "arm" ], @@ -11771,9 +11968,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", - "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.10.tgz", + "integrity": "sha512-LSQa7eDahypv/VO6WKohZGPSJDq5OVOo3UoFR1E4t4Gj1W7zEQMUhI+lo81H+DtB+kP+tDgBp+M4oNCwp6kffg==", "cpu": [ "arm64" ], @@ -11788,9 +11985,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", - "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.10.tgz", + "integrity": "sha512-MiC9CWdPrfhibcXwr39p9ha1x0lZJ9KaVfvzA0Wxwz9ETX4v5CHfF09bx935nHlhi+MxhA63dKRRQLiVgSUtEg==", "cpu": [ "x64" ], @@ -11805,9 +12002,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", - "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.10.tgz", + "integrity": "sha512-JC74bdXcQEpW9KkV326WpZZjLguSZ3DfS8wrrvPMHgQOIEIG/sPXEN/V8IssoJhbefLRcRqw6RQH2NnpdprtMA==", "cpu": [ "arm64" ], @@ -11822,9 +12019,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/darwin-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", - "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.10.tgz", + "integrity": "sha512-tguWg1olF6DGqzws97pKZ8G2L7Ig1vjDmGTwcTuYHbuU6TTjJe5FXbgs5C1BBzHbJ2bo1m3WkQDbWO2PvamRcg==", "cpu": [ "x64" ], @@ -11839,9 +12036,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", - "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.10.tgz", + "integrity": "sha512-3ZioSQSg1HT2N05YxeJWYR+Libe3bREVSdWhEEgExWaDtyFbbXWb49QgPvFH8u03vUPX10JhJPcz7s9t9+boWg==", "cpu": [ "arm64" ], @@ -11856,9 +12053,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", - "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.10.tgz", + "integrity": "sha512-LLgJfHJk014Aa4anGDbh8bmI5Lk+QidDmGzuC2D+vP7mv/GeSN+H39zOf7pN5N8p059FcOfs2bVlrRr4SK9WxA==", "cpu": [ "x64" ], @@ -11873,9 +12070,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", - "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.10.tgz", + "integrity": "sha512-oR31GtBTFYCqEBALI9r6WxoU/ZofZl962pouZRTEYECvNF/dtXKku8YXcJkhgK/beU+zedXfIzHijSRapJY3vg==", "cpu": [ "arm" ], @@ -11890,9 +12087,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", - "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.10.tgz", + "integrity": "sha512-5luJWN6YKBsawd5f9i4+c+geYiVEw20FVW5x0v1kEMWNq8UctFjDiMATBxLvmmHA4bf7F6hTRaJgtghFr9iziQ==", "cpu": [ "arm64" ], @@ -11907,9 +12104,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", - "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.10.tgz", + "integrity": "sha512-NrSCx2Kim3EnnWgS4Txn0QGt0Xipoumb6z6sUtl5bOEZIVKhzfyp/Lyw4C1DIYvzeW/5mWYPBFJU3a/8Yr75DQ==", "cpu": [ "ia32" ], @@ -11924,9 +12121,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-loong64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", - "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.10.tgz", + "integrity": "sha512-xoSphrd4AZda8+rUDDfD9J6FUMjrkTz8itpTITM4/xgerAZZcFW7Dv+sun7333IfKxGG8gAq+3NbfEMJfiY+Eg==", "cpu": [ "loong64" ], @@ -11941,9 +12138,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", - "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.10.tgz", + "integrity": "sha512-ab6eiuCwoMmYDyTnyptoKkVS3k8fy/1Uvq7Dj5czXI6DF2GqD2ToInBI0SHOp5/X1BdZ26RKc5+qjQNGRBelRA==", "cpu": [ "mips64el" ], @@ -11958,9 +12155,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", - "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.10.tgz", + "integrity": "sha512-NLinzzOgZQsGpsTkEbdJTCanwA5/wozN9dSgEl12haXJBzMTpssebuXR42bthOF3z7zXFWH1AmvWunUCkBE4EA==", "cpu": [ "ppc64" ], @@ -11975,9 +12172,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", - "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.10.tgz", + "integrity": "sha512-FE557XdZDrtX8NMIeA8LBJX3dC2M8VGXwfrQWU7LB5SLOajfJIxmSdyL/gU1m64Zs9CBKvm4UAuBp5aJ8OgnrA==", "cpu": [ "riscv64" ], @@ -11992,9 +12189,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-s390x": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", - "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.10.tgz", + "integrity": "sha512-3BBSbgzuB9ajLoVZk0mGu+EHlBwkusRmeNYdqmznmMc9zGASFjSsxgkNsqmXugpPk00gJ0JNKh/97nxmjctdew==", "cpu": [ "s390x" ], @@ -12009,9 +12206,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", - "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.10.tgz", + "integrity": "sha512-QSX81KhFoZGwenVyPoberggdW1nrQZSvfVDAIUXr3WqLRZGZqWk/P4T8p2SP+de2Sr5HPcvjhcJzEiulKgnxtA==", "cpu": [ "x64" ], @@ -12026,9 +12223,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", - "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.10.tgz", + "integrity": "sha512-7RTytDPGU6fek/hWuN9qQpeGPBZFfB4zZgcz2VK2Z5VpdUxEI8JKYsg3JfO0n/Z1E/6l05n0unDCNc4HnhQGig==", "cpu": [ "x64" ], @@ -12043,9 +12240,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", - "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.10.tgz", + "integrity": "sha512-XkA4frq1TLj4bEMB+2HnI0+4RnjbuGZfet2gs/LNs5Hc7D89ZQBHQ0gL2ND6Lzu1+QVkjp3x1gIcPKzRNP8bXw==", "cpu": [ "x64" ], @@ -12060,9 +12257,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/sunos-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", - "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.10.tgz", + "integrity": "sha512-fswk3XT0Uf2pGJmOpDB7yknqhVkJQkAQOcW/ccVOtfx05LkbWOaRAtn5SaqXypeKQra1QaEa841PgrSL9ubSPQ==", "cpu": [ "x64" ], @@ -12077,9 +12274,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", - "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.10.tgz", + "integrity": "sha512-ah+9b59KDTSfpaCg6VdJoOQvKjI33nTaQr4UluQwW7aEwZQsbMCfTmfEO4VyewOxx4RaDT/xCy9ra2GPWmO7Kw==", "cpu": [ "arm64" ], @@ -12094,9 +12291,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", - "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.10.tgz", + "integrity": "sha512-QHPDbKkrGO8/cz9LKVnJU22HOi4pxZnZhhA2HYHez5Pz4JeffhDjf85E57Oyco163GnzNCVkZK0b/n4Y0UHcSw==", "cpu": [ "ia32" ], @@ -12111,9 +12308,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", - "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.10.tgz", + "integrity": "sha512-9KpxSVFCu0iK1owoez6aC/s/EdUQLDN3adTxGCqxMVhrPDj6bt5dbrHDXUuq+Bs2vATFBBrQS5vdQ/Ed2P+nbw==", "cpu": [ "x64" ], @@ -12128,7 +12325,9 @@ } }, "node_modules/tsx/node_modules/esbuild": { - "version": "0.25.9", + "version": "0.25.10", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.10.tgz", + "integrity": "sha512-9RiGKvCwaqxO2owP61uQ4BgNborAQskMR6QusfWzQqv7AZOg5oGehdY2pRJMTKuwxd1IDBP4rSbI5lHzU7SMsQ==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -12139,32 +12338,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.9", - "@esbuild/android-arm": "0.25.9", - "@esbuild/android-arm64": "0.25.9", - "@esbuild/android-x64": "0.25.9", - "@esbuild/darwin-arm64": "0.25.9", - "@esbuild/darwin-x64": "0.25.9", - "@esbuild/freebsd-arm64": "0.25.9", - "@esbuild/freebsd-x64": "0.25.9", - "@esbuild/linux-arm": "0.25.9", - "@esbuild/linux-arm64": "0.25.9", - "@esbuild/linux-ia32": "0.25.9", - "@esbuild/linux-loong64": "0.25.9", - "@esbuild/linux-mips64el": "0.25.9", - "@esbuild/linux-ppc64": "0.25.9", - "@esbuild/linux-riscv64": "0.25.9", - "@esbuild/linux-s390x": "0.25.9", - "@esbuild/linux-x64": "0.25.9", - "@esbuild/netbsd-arm64": "0.25.9", - "@esbuild/netbsd-x64": "0.25.9", - "@esbuild/openbsd-arm64": "0.25.9", - "@esbuild/openbsd-x64": "0.25.9", - "@esbuild/openharmony-arm64": "0.25.9", - "@esbuild/sunos-x64": "0.25.9", - "@esbuild/win32-arm64": "0.25.9", - "@esbuild/win32-ia32": "0.25.9", - "@esbuild/win32-x64": "0.25.9" + "@esbuild/aix-ppc64": "0.25.10", + "@esbuild/android-arm": "0.25.10", + "@esbuild/android-arm64": "0.25.10", + "@esbuild/android-x64": "0.25.10", + "@esbuild/darwin-arm64": "0.25.10", + "@esbuild/darwin-x64": "0.25.10", + "@esbuild/freebsd-arm64": "0.25.10", + "@esbuild/freebsd-x64": "0.25.10", + "@esbuild/linux-arm": "0.25.10", + "@esbuild/linux-arm64": "0.25.10", + "@esbuild/linux-ia32": "0.25.10", + "@esbuild/linux-loong64": "0.25.10", + "@esbuild/linux-mips64el": "0.25.10", + "@esbuild/linux-ppc64": "0.25.10", + "@esbuild/linux-riscv64": "0.25.10", + "@esbuild/linux-s390x": "0.25.10", + "@esbuild/linux-x64": "0.25.10", + "@esbuild/netbsd-arm64": "0.25.10", + "@esbuild/netbsd-x64": "0.25.10", + "@esbuild/openbsd-arm64": "0.25.10", + "@esbuild/openbsd-x64": "0.25.10", + "@esbuild/openharmony-arm64": "0.25.10", + "@esbuild/sunos-x64": "0.25.10", + "@esbuild/win32-arm64": "0.25.10", + "@esbuild/win32-ia32": "0.25.10", + "@esbuild/win32-x64": "0.25.10" } }, "node_modules/tunnel-agent": { @@ -12291,7 +12490,9 @@ } }, "node_modules/typescript": { - "version": "5.9.2", + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", "bin": { @@ -12303,16 +12504,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.44.1", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.44.1.tgz", - "integrity": "sha512-0ws8uWGrUVTjEeN2OM4K1pLKHK/4NiNP/vz6ns+LjT/6sqpaYzIVFajZb1fj/IDwpsrrHb3Jy0Qm5u9CPcKaeg==", + "version": "8.46.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.46.1.tgz", + "integrity": "sha512-VHgijW803JafdSsDO8I761r3SHrgk4T00IdyQ+/UsthtgPRsBWQLqoSxOolxTpxRKi1kGXK0bSz4CoAc9ObqJA==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.44.1", - "@typescript-eslint/parser": "8.44.1", - "@typescript-eslint/typescript-estree": "8.44.1", - "@typescript-eslint/utils": "8.44.1" + "@typescript-eslint/eslint-plugin": "8.46.1", + "@typescript-eslint/parser": "8.46.1", + "@typescript-eslint/typescript-estree": "8.46.1", + "@typescript-eslint/utils": "8.46.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -13013,7 +13214,7 @@ }, "packages/git-proxy-cli": { "name": "@finos/git-proxy-cli", - "version": "0.1.0", + "version": "2.0.0-rc.3", "license": "Apache-2.0", "dependencies": { "@finos/git-proxy": "file:../..", @@ -13021,7 +13222,7 @@ "yargs": "^17.7.2" }, "bin": { - "git-proxy-cli": "index.js" + "git-proxy-cli": "dist/index.js" }, "devDependencies": { "chai": "^4.5.0" diff --git a/package.json b/package.json index dfade78bc..6b4e9dbf9 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,8 @@ "version": "2.0.0-rc.3", "description": "Deploy custom push protections and policies on top of Git.", "scripts": { - "cli": "node ./packages/git-proxy-cli/index.js", + "cli": "tsx ./packages/git-proxy-cli/index.ts", + "cli:js": "node ./packages/git-proxy-cli/dist/index.js", "client": "vite --config vite.config.ts", "clientinstall": "npm install --prefix client", "server": "tsx index.ts", @@ -44,7 +45,7 @@ "dependencies": { "@material-ui/core": "^4.12.4", "@material-ui/icons": "4.11.3", - "@primer/octicons-react": "^19.18.0", + "@primer/octicons-react": "^19.19.0", "@seald-io/nedb": "^4.1.2", "axios": "^1.12.2", "bcryptjs": "^3.0.2", @@ -54,12 +55,13 @@ "cors": "^2.8.5", "diff2html": "^3.4.52", "env-paths": "^3.0.0", + "escape-string-regexp": "^5.0.0", "express": "^4.21.2", "express-http-proxy": "^2.1.2", "express-rate-limit": "^8.1.0", "express-session": "^1.18.2", "history": "5.3.0", - "isomorphic-git": "^1.33.1", + "isomorphic-git": "^1.34.0", "jsonwebtoken": "^9.0.2", "jwk-to-pem": "^2.0.7", "load-plugin": "^6.0.3", @@ -67,8 +69,7 @@ "lusca": "^1.7.0", "moment": "^2.30.1", "mongodb": "^5.9.2", - "nodemailer": "^6.10.1", - "openid-client": "^6.8.0", + "openid-client": "^6.8.1", "parse-diff": "^0.11.1", "passport": "^0.7.0", "passport-activedirectory": "^1.4.0", @@ -90,31 +91,39 @@ "@commitlint/cli": "^19.8.1", "@commitlint/config-conventional": "^19.8.1", "@eslint/compat": "^1.4.0", - "@eslint/js": "^9.36.0", + "@eslint/js": "^9.37.0", "@eslint/json": "^0.13.2", + "@types/activedirectory2": "^1.2.6", + "@types/cors": "^2.8.19", "@types/domutils": "^1.7.8", "@types/express": "^5.0.3", "@types/express-http-proxy": "^1.6.7", + "@types/express-session": "^1.18.2", + "@types/jsonwebtoken": "^9.0.10", + "@types/jwk-to-pem": "^2.0.3", "@types/lodash": "^4.17.20", + "@types/lusca": "^1.7.5", "@types/mocha": "^10.0.10", - "@types/node": "^22.18.6", + "@types/node": "^22.18.10", + "@types/passport": "^1.0.17", + "@types/passport-local": "^1.0.38", "@types/react-dom": "^17.0.26", "@types/react-html-parser": "^2.0.7", - "@types/validator": "^13.15.3", "@types/sinon": "^17.0.4", + "@types/validator": "^13.15.3", "@types/yargs": "^17.0.33", "@vitejs/plugin-react": "^4.7.0", "chai": "^4.5.0", "chai-http": "^4.4.0", - "cypress": "^15.3.0", - "eslint": "^9.36.0", + "cypress": "^15.4.0", + "eslint": "^9.37.0", "eslint-config-prettier": "^10.1.8", - "eslint-plugin-cypress": "^5.1.1", + "eslint-plugin-cypress": "^5.2.0", "eslint-plugin-react": "^7.37.5", "fast-check": "^4.3.0", "globals": "^16.4.0", "husky": "^9.1.7", - "lint-staged": "^16.2.0", + "lint-staged": "^16.2.4", "mocha": "^10.8.2", "nyc": "^17.1.0", "prettier": "^3.6.2", @@ -124,17 +133,17 @@ "sinon-chai": "^3.7.0", "ts-mocha": "^11.1.0", "ts-node": "^10.9.2", - "tsx": "^4.20.5", - "typescript": "^5.9.2", - "typescript-eslint": "^8.44.1", + "tsx": "^4.20.6", + "typescript": "^5.9.3", + "typescript-eslint": "^8.46.1", "vite": "^4.5.14", "vite-tsconfig-paths": "^5.1.4" }, "optionalDependencies": { - "@esbuild/darwin-arm64": "^0.25.10", - "@esbuild/darwin-x64": "^0.25.10", - "@esbuild/linux-x64": "0.25.10", - "@esbuild/win32-x64": "0.25.10" + "@esbuild/darwin-arm64": "^0.25.11", + "@esbuild/darwin-x64": "^0.25.11", + "@esbuild/linux-x64": "0.25.11", + "@esbuild/win32-x64": "0.25.11" }, "browserslist": { "production": [ diff --git a/packages/git-proxy-cli/index.js b/packages/git-proxy-cli/index.ts old mode 100755 new mode 100644 similarity index 80% rename from packages/git-proxy-cli/index.js rename to packages/git-proxy-cli/index.ts index 614104d6a..5a95e5cf0 --- a/packages/git-proxy-cli/index.js +++ b/packages/git-proxy-cli/index.ts @@ -1,9 +1,12 @@ #!/usr/bin/env node -const axios = require('axios'); -const yargs = require('yargs/yargs'); -const { hideBin } = require('yargs/helpers'); -const fs = require('fs'); -const util = require('util'); +import axios from 'axios'; +import yargs from 'yargs/yargs'; +import { hideBin } from 'yargs/helpers'; +import fs from 'fs'; +import util from 'util'; + +import { CommitData, PushData } from '@finos/git-proxy/src/types/models'; +import { PushQuery } from '@finos/git-proxy/src/db/types'; const GIT_PROXY_COOKIE_FILE = 'git-proxy-cookie'; // GitProxy UI HOST and PORT (configurable via environment variable) @@ -19,7 +22,7 @@ axios.defaults.timeout = 30000; * @param {string} username The user name to login with * @param {string} password The password to use for the login */ -async function login(username, password) { +async function login(username: string, password: string) { try { let response = await axios.post( `${baseUrl}/api/auth/login`, @@ -44,7 +47,7 @@ async function login(username, password) { const user = `"${response.data.username}" <${response.data.email}>`; const isAdmin = response.data.admin ? ' (admin)' : ''; console.log(`Login ${user}${isAdmin}: OK`); - } catch (error) { + } catch (error: any) { if (error.response) { console.error(`Error: Login '${username}': '${error.response.status}'`); process.exitCode = 1; @@ -61,7 +64,7 @@ async function login(username, password) { * the push is allowed, authorised, blocked, canceled, encountered an error, * or was rejected. * - * @param {Object} filters - An object containing filter criteria for Git + * @param {Partial} filters - An object containing filter criteria for Git * pushes. * @param {boolean} filters.allowPush - If not null, filters for pushes with * given attribute and status. @@ -76,7 +79,7 @@ async function login(username, password) { * @param {boolean} filters.rejected - If not null, filters for pushes with * given attribute and status. */ -async function getGitPushes(filters) { +async function getGitPushes(filters: Partial) { if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { console.error('Error: List: Authentication required'); process.exitCode = 1; @@ -91,40 +94,64 @@ async function getGitPushes(filters) { params: filters, }); - const records = []; - response.data?.forEach((push) => { - const record = {}; - record.id = push.id; - record.timestamp = push.timestamp; - record.url = push.url; - record.allowPush = push.allowPush; - record.authorised = push.authorised; - record.blocked = push.blocked; - record.canceled = push.canceled; - record.error = push.error; - record.rejected = push.rejected; - - record.lastStep = { - stepName: push.lastStep?.stepName, - error: push.lastStep?.error, - errorMessage: push.lastStep?.errorMessage, - blocked: push.lastStep?.blocked, - blockedMessage: push.lastStep?.blockedMessage, + const records: PushData[] = []; + response.data.forEach((push: PushData) => { + const record: PushData = { + id: push.id, + repo: push.repo, + branch: push.branch, + commitFrom: push.commitFrom, + commitTo: push.commitTo, + commitData: push.commitData, + diff: push.diff, + error: push.error, + canceled: push.canceled, + rejected: push.rejected, + blocked: push.blocked, + authorised: push.authorised, + attestation: push.attestation, + autoApproved: push.autoApproved, + timestamp: push.timestamp, + url: push.url, + allowPush: push.allowPush, }; - record.commitData = []; - push.commitData?.forEach((pushCommitDataRecord) => { - record.commitData.push({ - message: pushCommitDataRecord.message, - committer: pushCommitDataRecord.committer, + if (push.lastStep) { + record.lastStep = { + id: push.lastStep?.id, + content: push.lastStep?.content, + logs: push.lastStep?.logs, + stepName: push.lastStep?.stepName, + error: push.lastStep?.error, + errorMessage: push.lastStep?.errorMessage, + blocked: push.lastStep?.blocked, + blockedMessage: push.lastStep?.blockedMessage, + }; + } + + if (push.commitData) { + const commitData: CommitData[] = []; + push.commitData.forEach((pushCommitDataRecord: CommitData) => { + commitData.push({ + message: pushCommitDataRecord.message, + committer: pushCommitDataRecord.committer, + committerEmail: pushCommitDataRecord.committerEmail, + author: pushCommitDataRecord.author, + authorEmail: pushCommitDataRecord.authorEmail, + commitTimestamp: pushCommitDataRecord.commitTimestamp, + tree: pushCommitDataRecord.tree, + parent: pushCommitDataRecord.parent, + commitTs: pushCommitDataRecord.commitTs, + }); }); - }); + record.commitData = commitData; + } records.push(record); }); console.log(`${util.inspect(records, false, null, false)}`); - } catch (error) { + } catch (error: any) { // default error const errorMessage = `Error: List: '${error.message}'`; process.exitCode = 2; @@ -136,7 +163,7 @@ async function getGitPushes(filters) { * Authorise git push by ID * @param {string} id The ID of the git push to authorise */ -async function authoriseGitPush(id) { +async function authoriseGitPush(id: string) { if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { console.error('Error: Authorise: Authentication required'); process.exitCode = 1; @@ -168,7 +195,7 @@ async function authoriseGitPush(id) { ); console.log(`Authorise: ID: '${id}': OK`); - } catch (error) { + } catch (error: any) { // default error let errorMessage = `Error: Authorise: '${error.message}'`; process.exitCode = 2; @@ -176,8 +203,7 @@ async function authoriseGitPush(id) { if (error.response) { switch (error.response.status) { case 401: - errorMessage = - 'Error: Authorise: Authentication required (401): ' + error?.response?.data?.message; + errorMessage = 'Error: Authorise: Authentication required'; process.exitCode = 3; break; case 404: @@ -193,7 +219,7 @@ async function authoriseGitPush(id) { * Reject git push by ID * @param {string} id The ID of the git push to reject */ -async function rejectGitPush(id) { +async function rejectGitPush(id: string) { if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { console.error('Error: Reject: Authentication required'); process.exitCode = 1; @@ -216,7 +242,7 @@ async function rejectGitPush(id) { ); console.log(`Reject: ID: '${id}': OK`); - } catch (error) { + } catch (error: any) { // default error let errorMessage = `Error: Reject: '${error.message}'`; process.exitCode = 2; @@ -224,8 +250,7 @@ async function rejectGitPush(id) { if (error.response) { switch (error.response.status) { case 401: - errorMessage = - 'Error: Reject: Authentication required (401): ' + error?.response?.data?.message; + errorMessage = 'Error: Reject: Authentication required'; process.exitCode = 3; break; case 404: @@ -241,7 +266,7 @@ async function rejectGitPush(id) { * Cancel git push by ID * @param {string} id The ID of the git push to cancel */ -async function cancelGitPush(id) { +async function cancelGitPush(id: string) { if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { console.error('Error: Cancel: Authentication required'); process.exitCode = 1; @@ -264,7 +289,7 @@ async function cancelGitPush(id) { ); console.log(`Cancel: ID: '${id}': OK`); - } catch (error) { + } catch (error: any) { // default error let errorMessage = `Error: Cancel: '${error.message}'`; process.exitCode = 2; @@ -272,8 +297,7 @@ async function cancelGitPush(id) { if (error.response) { switch (error.response.status) { case 401: - errorMessage = - 'Error: Cancel: Authentication required (401): ' + error?.response?.data?.message; + errorMessage = 'Error: Cancel: Authentication required'; process.exitCode = 3; break; case 404: @@ -302,7 +326,7 @@ async function logout() { headers: { Cookie: cookies }, }, ); - } catch (error) { + } catch (error: any) { console.log(`Warning: Logout: '${error.message}'`); } } @@ -326,7 +350,7 @@ async function reloadConfig() { await axios.post(`${baseUrl}/api/v1/admin/reload-config`, {}, { headers: { Cookie: cookies } }); console.log('Configuration reloaded successfully'); - } catch (error) { + } catch (error: any) { const errorMessage = `Error: Reload config: '${error.message}'`; process.exitCode = 2; console.error(errorMessage); @@ -341,7 +365,13 @@ async function reloadConfig() { * @param {string} gitAccount The git account for the new user * @param {boolean} [admin=false] Whether the user should be an admin (optional) */ -async function createUser(username, password, email, gitAccount, admin = false) { +async function createUser( + username: string, + password: string, + email: string, + gitAccount: string, + admin: boolean = false, +) { if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { console.error('Error: Create User: Authentication required'); process.exitCode = 1; @@ -366,7 +396,7 @@ async function createUser(username, password, email, gitAccount, admin = false) ); console.log(`User '${username}' created successfully`); - } catch (error) { + } catch (error: any) { let errorMessage = `Error: Create User: '${error.message}'`; process.exitCode = 2; @@ -518,8 +548,10 @@ yargs(hideBin(process.argv)) // eslint-disable-line @typescript-eslint/no-unused }) .command({ command: 'reload-config', - description: 'Reload GitProxy configuration without restarting', - action: reloadConfig, + describe: 'Reload GitProxy configuration without restarting', + handler() { + reloadConfig(); + }, }) .command({ command: 'create-user', diff --git a/packages/git-proxy-cli/package.json b/packages/git-proxy-cli/package.json index f425c1408..fd8543dfe 100644 --- a/packages/git-proxy-cli/package.json +++ b/packages/git-proxy-cli/package.json @@ -2,7 +2,9 @@ "name": "@finos/git-proxy-cli", "version": "2.0.0-rc.3", "description": "Command line interface tool for FINOS GitProxy.", - "bin": "./index.js", + "bin": { + "git-proxy-cli": "./dist/index.js" + }, "dependencies": { "axios": "^1.12.2", "yargs": "^17.7.2", @@ -12,8 +14,10 @@ "chai": "^4.5.0" }, "scripts": { - "lint": "eslint --fix . --ext .js,.jsx", - "test": "NODE_ENV=test ts-mocha --exit --timeout 10000", + "build": "tsc", + "lint": "eslint \"./*.ts\" --fix", + "test:dev": "NODE_ENV=test ts-mocha test/*.ts --exit --timeout 10000", + "test": "npm run build && NODE_ENV=test ts-mocha test/*.ts --exit --timeout 10000", "test-coverage": "nyc npm run test", "test-coverage-ci": "nyc --reporter=lcovonly --reporter=text --reporter=html npm run test" }, diff --git a/packages/git-proxy-cli/test/testCli.test.js b/packages/git-proxy-cli/test/testCli.test.ts similarity index 69% rename from packages/git-proxy-cli/test/testCli.test.js rename to packages/git-proxy-cli/test/testCli.test.ts index 626fed667..98b7ae01a 100644 --- a/packages/git-proxy-cli/test/testCli.test.js +++ b/packages/git-proxy-cli/test/testCli.test.ts @@ -1,14 +1,11 @@ -const helper = require('./testCliUtils'); +import * as helper from './testCliUtils'; +import path from 'path'; -const path = require('path'); +import { setConfigFile } from '../../../src/config/file'; -// set test proxy config file path *before* loading the proxy -require('../../../src/config/file').configFile = path.join( - process.cwd(), - 'test', - 'testCli.proxy.config.json', -); -const service = require('../../../src/service'); +import { Repo } from '../../../src/db/types'; + +setConfigFile(path.join(process.cwd(), 'test', 'testCli.proxy.config.json')); /* test constants */ // push ID which does not exist @@ -27,12 +24,14 @@ const TEST_PASSWORD = 'testpassword'; const TEST_EMAIL = 'jane.doe@email.com'; const TEST_GIT_ACCOUNT = 'testGitAccount'; +const CLI_PATH = 'npx git-proxy-cli'; + describe('test git-proxy-cli', function () { // *** help *** describe(`test git-proxy-cli :: help`, function () { it(`print help if no command or option is given`, async function () { - const cli = `npx -- @finos/git-proxy-cli`; + const cli = `${CLI_PATH}`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = [ @@ -44,7 +43,7 @@ describe('test git-proxy-cli', function () { }); it(`print help if invalid command or option is given`, async function () { - const cli = `npx -- @finos/git-proxy-cli invalid --invalid`; + const cli = `${CLI_PATH} invalid --invalid`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = [ @@ -56,7 +55,7 @@ describe('test git-proxy-cli', function () { }); it(`print help if "--help" option is given`, async function () { - const cli = `npx -- @finos/git-proxy-cli invalid --help`; + const cli = `${CLI_PATH} invalid --help`; const expectedExitCode = 0; const expectedMessages = ['Commands:', 'Options:']; const expectedErrorMessages = null; @@ -68,7 +67,7 @@ describe('test git-proxy-cli', function () { describe(`test git-proxy-cli :: version`, function () { it(`"--version" option prints version details `, async function () { - const cli = `npx -- @finos/git-proxy-cli --version`; + const cli = `${CLI_PATH} --version`; const expectedExitCode = 0; const packageJson = require('../../../package.json'); const version = packageJson.version; @@ -82,7 +81,7 @@ describe('test git-proxy-cli', function () { describe('test git-proxy-cli :: configuration', function () { it(`"config" command prints configuration details`, async function () { - const cli = `npx -- @finos/git-proxy-cli config`; + const cli = `${CLI_PATH} config`; const expectedExitCode = 0; const expectedMessages = ['GitProxy URL:']; const expectedErrorMessages = null; @@ -104,7 +103,7 @@ describe('test git-proxy-cli', function () { it('login should fail when server is down', async function () { const username = 'admin'; const password = 'admin'; - const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const cli = `${CLI_PATH} login --username ${username} --password ${password}`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = [`Error: Login '${username}':`]; @@ -114,43 +113,43 @@ describe('test git-proxy-cli', function () { it('login should fail with invalid credentials', async function () { const username = 'unkn0wn'; const password = 'p4ssw0rd'; - const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const cli = `${CLI_PATH} login --username ${username} --password ${password}`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = [`Error: Login '${username}': '401'`]; try { - await helper.startServer(service); + await helper.startServer(); await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('login shoud be successful with valid credentials (admin)', async function () { const username = 'admin'; const password = 'admin'; - const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const cli = `${CLI_PATH} login --username ${username} --password ${password}`; const expectedExitCode = 0; const expectedMessages = [`Login "${username}" (admin): OK`]; const expectedErrorMessages = null; try { - await helper.startServer(service); + await helper.startServer(); await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('login shoud be successful with valid credentials (non-admin)', async function () { - const cli = `npx -- @finos/git-proxy-cli login --username ${TEST_USER} --password ${TEST_PASSWORD}`; + const cli = `${CLI_PATH} login --username ${TEST_USER} --password ${TEST_PASSWORD}`; const expectedExitCode = 0; const expectedMessages = [`Login "${TEST_USER}" <${TEST_EMAIL}>: OK`]; const expectedErrorMessages = null; try { - await helper.startServer(service); + await helper.startServer(); await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -161,7 +160,7 @@ describe('test git-proxy-cli', function () { it('logout shoud succeed when server is down (and not logged in before)', async function () { await helper.removeCookiesFile(); - const cli = `npx -- @finos/git-proxy-cli logout`; + const cli = `${CLI_PATH} logout`; const expectedExitCode = 0; const expectedMessages = [`Logout: OK`]; const expectedErrorMessages = null; @@ -170,13 +169,13 @@ describe('test git-proxy-cli', function () { it('logout should succeed when server is down (but logged in before)', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } - const cli = `npx -- @finos/git-proxy-cli logout`; + const cli = `${CLI_PATH} logout`; const expectedExitCode = 0; const expectedMessages = [`Logout: OK`]; const expectedErrorMessages = null; @@ -187,29 +186,29 @@ describe('test git-proxy-cli', function () { try { await helper.createCookiesFileWithExpiredCookie(); - const cli = `npx -- @finos/git-proxy-cli logout`; + const cli = `${CLI_PATH} logout`; const expectedExitCode = 0; const expectedMessages = [`Logout: OK`]; const expectedErrorMessages = null; - await helper.startServer(service); + await helper.startServer(); await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('logout shoud be successful when authenticated (server is up)', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli logout`; + const cli = `${CLI_PATH} logout`; const expectedExitCode = 0; const expectedMessages = [`Logout: OK`]; const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -220,7 +219,7 @@ describe('test git-proxy-cli', function () { const pushId = `auth000000000000000000000000000000000000__${Date.now()}`; before(async function () { - await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addRepoToDb(TEST_REPO_CONFIG as Repo); await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); }); @@ -234,14 +233,14 @@ describe('test git-proxy-cli', function () { it('attempt to authorise should fail when server is down', async function () { try { // start server -> login -> stop server - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const cli = `${CLI_PATH} authorise --id ${id}`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = ['Error: Authorise:']; @@ -252,7 +251,7 @@ describe('test git-proxy-cli', function () { await helper.removeCookiesFile(); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const cli = `${CLI_PATH} authorise --id ${id}`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Error: Authorise: Authentication required']; @@ -262,31 +261,31 @@ describe('test git-proxy-cli', function () { it('attempt to authorise should fail when not authenticated (server restarted)', async function () { try { await helper.createCookiesFileWithExpiredCookie(); - await helper.startServer(service); + await helper.startServer(); const id = pushId; - const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const cli = `${CLI_PATH} authorise --id ${id}`; const expectedExitCode = 3; const expectedMessages = null; const expectedErrorMessages = ['Error: Authorise: Authentication required']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to authorise should fail when git push ID not found', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const cli = `${CLI_PATH} authorise --id ${id}`; const expectedExitCode = 4; const expectedMessages = null; const expectedErrorMessages = [`Error: Authorise: ID: '${id}': Not Found`]; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -297,7 +296,7 @@ describe('test git-proxy-cli', function () { const pushId = `cancel0000000000000000000000000000000000__${Date.now()}`; before(async function () { - await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addRepoToDb(TEST_REPO_CONFIG as Repo); await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); await helper.addGitPushToDb(pushId, TEST_USER, TEST_EMAIL, TEST_REPO); }); @@ -311,14 +310,14 @@ describe('test git-proxy-cli', function () { it('attempt to cancel should fail when server is down', async function () { try { // start server -> login -> stop server - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const cli = `${CLI_PATH} cancel --id ${id}`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = ['Error: Cancel:']; @@ -329,7 +328,7 @@ describe('test git-proxy-cli', function () { await helper.removeCookiesFile(); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const cli = `${CLI_PATH} cancel --id ${id}`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Error: Cancel: Authentication required']; @@ -339,32 +338,32 @@ describe('test git-proxy-cli', function () { it('attempt to cancel should fail when not authenticated (server restarted)', async function () { try { await helper.createCookiesFileWithExpiredCookie(); - await helper.startServer(service); + await helper.startServer(); const id = pushId; - const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const cli = `${CLI_PATH} cancel --id ${id}`; const expectedExitCode = 3; const expectedMessages = null; const expectedErrorMessages = ['Error: Cancel: Authentication required']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); // }); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to cancel should fail when git push ID not found', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const cli = `${CLI_PATH} cancel --id ${id}`; const expectedExitCode = 4; const expectedMessages = null; const expectedErrorMessages = [`Error: Cancel: ID: '${id}': Not Found`]; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -375,13 +374,13 @@ describe('test git-proxy-cli', function () { it('attempt to ls should fail when server is down', async function () { try { // start server -> login -> stop server - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } - const cli = `npx -- @finos/git-proxy-cli ls`; + const cli = `${CLI_PATH} ls`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = ['Error: List:']; @@ -391,7 +390,7 @@ describe('test git-proxy-cli', function () { it('attempt to ls should fail when not authenticated', async function () { await helper.removeCookiesFile(); - const cli = `npx -- @finos/git-proxy-cli ls`; + const cli = `${CLI_PATH} ls`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Error: List: Authentication required']; @@ -400,16 +399,16 @@ describe('test git-proxy-cli', function () { it('attempt to ls should fail when invalid option given', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --invalid`; + const cli = `${CLI_PATH} ls --invalid`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Options:', 'Unknown argument: invalid']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -420,7 +419,7 @@ describe('test git-proxy-cli', function () { const pushId = `reject0000000000000000000000000000000000__${Date.now()}`; before(async function () { - await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addRepoToDb(TEST_REPO_CONFIG as Repo); await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); }); @@ -434,14 +433,14 @@ describe('test git-proxy-cli', function () { it('attempt to reject should fail when server is down', async function () { try { // start server -> login -> stop server - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const cli = `${CLI_PATH} reject --id ${id}`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = ['Error: Reject:']; @@ -452,7 +451,7 @@ describe('test git-proxy-cli', function () { await helper.removeCookiesFile(); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const cli = `${CLI_PATH} reject --id ${id}`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Error: Reject: Authentication required']; @@ -462,31 +461,31 @@ describe('test git-proxy-cli', function () { it('attempt to reject should fail when not authenticated (server restarted)', async function () { try { await helper.createCookiesFileWithExpiredCookie(); - await helper.startServer(service); + await helper.startServer(); const id = pushId; - const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const cli = `${CLI_PATH} reject --id ${id}`; const expectedExitCode = 3; const expectedMessages = null; const expectedErrorMessages = ['Error: Reject: Authentication required']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to reject should fail when git push ID not found', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); const id = GHOST_PUSH_ID; - const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const cli = `${CLI_PATH} reject --id ${id}`; const expectedExitCode = 4; const expectedMessages = null; const expectedErrorMessages = [`Error: Reject: ID: '${id}': Not Found`]; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); }); @@ -505,13 +504,13 @@ describe('test git-proxy-cli', function () { it('attempt to create user should fail when server is down', async function () { try { // start server -> login -> stop server - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } - const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const cli = `${CLI_PATH} create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; const expectedExitCode = 2; const expectedMessages = null; const expectedErrorMessages = ['Error: Create User:']; @@ -521,7 +520,7 @@ describe('test git-proxy-cli', function () { it('attempt to create user should fail when not authenticated', async function () { await helper.removeCookiesFile(); - const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const cli = `${CLI_PATH} create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; const expectedExitCode = 1; const expectedMessages = null; const expectedErrorMessages = ['Error: Create User: Authentication required']; @@ -530,43 +529,41 @@ describe('test git-proxy-cli', function () { it('attempt to create user should fail when not admin', async function () { try { - await helper.startServer(service); - await helper.runCli( - `npx -- @finos/git-proxy-cli login --username testuser --password testpassword`, - ); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username testuser --password testpassword`); - const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const cli = `${CLI_PATH} create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; const expectedExitCode = 3; const expectedMessages = null; const expectedErrorMessages = ['Error: Create User: Authentication required']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to create user should fail with missing required fields', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password "" --email new@email.com --gitAccount newgit`; + const cli = `${CLI_PATH} create-user --username newuser --password "" --email new@email.com --gitAccount newgit`; const expectedExitCode = 4; const expectedMessages = null; const expectedErrorMessages = ['Error: Create User: Missing required fields']; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('should successfully create a new user', async function () { const uniqueUsername = `newuser_${Date.now()}`; try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli create-user --username ${uniqueUsername} --password newpass --email new@email.com --gitAccount newgit`; + const cli = `${CLI_PATH} create-user --username ${uniqueUsername} --password newpass --email ${uniqueUsername}@email.com --gitAccount newgit`; const expectedExitCode = 0; const expectedMessages = [`User '${uniqueUsername}' created successfully`]; const expectedErrorMessages = null; @@ -574,17 +571,17 @@ describe('test git-proxy-cli', function () { // Verify we can login with the new user await helper.runCli( - `npx -- @finos/git-proxy-cli login --username ${uniqueUsername} --password newpass`, + `${CLI_PATH} login --username ${uniqueUsername} --password newpass`, 0, - [`Login "${uniqueUsername}" : OK`], + [`Login "${uniqueUsername}" <${uniqueUsername}@email.com>: OK`], null, ); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); // Clean up the created user try { await helper.removeUserFromDb(uniqueUsername); - } catch (error) { + } catch (error: any) { // Ignore cleanup errors } } @@ -593,10 +590,10 @@ describe('test git-proxy-cli', function () { it('should successfully create a new admin user', async function () { const uniqueUsername = `newadmin_${Date.now()}`; try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli create-user --username ${uniqueUsername} --password newpass --email ${uniqueUsername}@email.com --gitAccount newgit --admin`; + const cli = `${CLI_PATH} create-user --username ${uniqueUsername} --password newpass --email ${uniqueUsername}@email.com --gitAccount newgit --admin`; const expectedExitCode = 0; const expectedMessages = [`User '${uniqueUsername}' created successfully`]; const expectedErrorMessages = null; @@ -604,17 +601,17 @@ describe('test git-proxy-cli', function () { // Verify we can login with the new admin user await helper.runCli( - `npx -- @finos/git-proxy-cli login --username ${uniqueUsername} --password newpass`, + `${CLI_PATH} login --username ${uniqueUsername} --password newpass`, 0, [`Login "${uniqueUsername}" <${uniqueUsername}@email.com> (admin): OK`], null, ); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); // Clean up the created user try { await helper.removeUserFromDb(uniqueUsername); - } catch (error) { + } catch (error: any) { console.error('Error cleaning up user', error); } } @@ -627,7 +624,7 @@ describe('test git-proxy-cli', function () { const pushId = `0000000000000000000000000000000000000000__${Date.now()}`; before(async function () { - await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addRepoToDb(TEST_REPO_CONFIG as Repo); await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); }); @@ -640,10 +637,10 @@ describe('test git-proxy-cli', function () { it('attempt to ls should list existing push', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --authorised false --blocked true --canceled false --rejected false`; + const cli = `${CLI_PATH} ls --authorised false --blocked true --canceled false --rejected false`; const expectedExitCode = 0; const expectedMessages = [ pushId, @@ -657,148 +654,148 @@ describe('test git-proxy-cli', function () { const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to ls should not list existing push when filtered for authorised', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --authorised true`; + const cli = `${CLI_PATH} ls --authorised true`; const expectedExitCode = 0; const expectedMessages = ['[]']; const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to ls should not list existing push when filtered for canceled', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --canceled true`; + const cli = `${CLI_PATH} ls --canceled true`; const expectedExitCode = 0; const expectedMessages = ['[]']; const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to ls should not list existing push when filtered for rejected', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --rejected true`; + const cli = `${CLI_PATH} ls --rejected true`; const expectedExitCode = 0; const expectedMessages = ['[]']; const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('attempt to ls should not list existing push when filtered for non-blocked', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - const cli = `npx -- @finos/git-proxy-cli ls --blocked false`; + const cli = `${CLI_PATH} ls --blocked false`; const expectedExitCode = 0; const expectedMessages = ['[]']; const expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('authorise push and test if appears on authorised list', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - let cli = `npx -- @finos/git-proxy-cli ls --authorised true --canceled false --rejected false`; + let cli = `${CLI_PATH} ls --authorised true --canceled false --rejected false`; let expectedExitCode = 0; let expectedMessages = ['[]']; let expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli authorise --id ${pushId}`; + cli = `${CLI_PATH} authorise --id ${pushId}`; expectedExitCode = 0; expectedMessages = [`Authorise: ID: '${pushId}': OK`]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli ls --authorised true --canceled false --rejected false`; + cli = `${CLI_PATH} ls --authorised true --canceled false --rejected false`; expectedExitCode = 0; expectedMessages = [pushId, TEST_REPO]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('reject push and test if appears on rejected list', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - let cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled false --rejected true`; + let cli = `${CLI_PATH} ls --authorised false --canceled false --rejected true`; let expectedExitCode = 0; let expectedMessages = ['[]']; let expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli reject --id ${pushId}`; + cli = `${CLI_PATH} reject --id ${pushId}`; expectedExitCode = 0; expectedMessages = [`Reject: ID: '${pushId}': OK`]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled false --rejected true`; + cli = `${CLI_PATH} ls --authorised false --canceled false --rejected true`; expectedExitCode = 0; expectedMessages = [pushId, TEST_REPO]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); } }); it('cancel push and test if appears on canceled list', async function () { try { - await helper.startServer(service); - await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + await helper.startServer(); + await helper.runCli(`${CLI_PATH} login --username admin --password admin`); - let cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled true --rejected false`; + let cli = `${CLI_PATH} ls --authorised false --canceled true --rejected false`; let expectedExitCode = 0; let expectedMessages = ['[]']; let expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli cancel --id ${pushId}`; + cli = `${CLI_PATH} cancel --id ${pushId}`; expectedExitCode = 0; expectedMessages = [`Cancel: ID: '${pushId}': OK`]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); - cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled true --rejected false`; + cli = `${CLI_PATH} ls --authorised false --canceled true --rejected false`; expectedExitCode = 0; expectedMessages = [pushId, TEST_REPO]; expectedErrorMessages = null; await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); } finally { - await helper.closeServer(service.httpServer); + await helper.closeServer(); await helper.removeCookiesFile(); } }); diff --git a/packages/git-proxy-cli/test/testCliUtils.js b/packages/git-proxy-cli/test/testCliUtils.ts similarity index 74% rename from packages/git-proxy-cli/test/testCliUtils.js rename to packages/git-proxy-cli/test/testCliUtils.ts index 232ec0234..fd733f7e4 100644 --- a/packages/git-proxy-cli/test/testCliUtils.js +++ b/packages/git-proxy-cli/test/testCliUtils.ts @@ -1,13 +1,18 @@ -const fs = require('fs'); -const util = require('util'); -const { exec } = require('child_process'); -const execAsync = util.promisify(exec); -const { expect } = require('chai'); +import fs from 'fs'; +import util from 'util'; +import { exec } from 'child_process'; +import { expect } from 'chai'; + +import Proxy from '../../../src/proxy'; +import { Action } from '../../../src/proxy/actions/Action'; +import { Step } from '../../../src/proxy/actions/Step'; +import { exec as execProcessor } from '../../../src/proxy/processors/push-action/audit'; +import * as db from '../../../src/db'; +import { Server } from 'http'; +import { Repo } from '../../../src/db/types'; +import service from '../../../src/service'; -const actions = require('../../../src/proxy/actions/Action'); -const steps = require('../../../src/proxy/actions/Step'); -const processor = require('../../../src/proxy/processors/push-action/audit'); -const db = require('../../../src/db'); +const execAsync = util.promisify(exec); // cookie file name const GIT_PROXY_COOKIE_FILE = 'git-proxy-cookie'; @@ -26,11 +31,11 @@ const GIT_PROXY_COOKIE_FILE = 'git-proxy-cookie'; * match the `expectedExitCode`. */ async function runCli( - cli, - expectedExitCode = 0, - expectedMessages = null, - expectedErrorMessages = null, - debug = true, + cli: string, + expectedExitCode: number = 0, + expectedMessages: string[] | null = null, + expectedErrorMessages: string[] | null = null, + debug: boolean = true, ) { try { console.log(`cli: '${cli}'`); @@ -50,7 +55,7 @@ async function runCli( expect(stderr).to.include(expectedErrorMessage); }); } - } catch (error) { + } catch (error: any) { const exitCode = error.code; if (!exitCode) { // an AssertionError is thrown from failing some of the expectations @@ -81,12 +86,12 @@ async function runCli( /** * Starts the server. - * @param {Object} service - The GitProxy API service to be started. + * @param {*} service - The GitProxy API service to be started. * @return {Promise} A promise that resolves when the service has * successfully started. Does not return any value upon resolution. */ -async function startServer(service) { - await service.start(); +async function startServer() { + await service.start(new Proxy()); } /** @@ -95,7 +100,6 @@ async function startServer(service) { * async/await usage. It ensures the server stops accepting new connections * and terminates existing ones before shutting down. * - * @param {http.Server} server - The `http.Server` instance to close. * @param {number} waitTime - The wait time after close. * @return {Promise} A promise that resolves when the server has been * successfully closed, or rejects if an error occurs during closure. The @@ -104,8 +108,14 @@ async function startServer(service) { * @throws {Error} If the server cannot be closed properly or if an error * occurs during the close operation. */ -async function closeServer(server, waitTime = 0) { - return new Promise((resolve, reject) => { +async function closeServer(waitTime: number = 0) { + const server = service.httpServer; + + if (!server) { + throw new Error('Server not started'); + } + + return new Promise((resolve, reject) => { server.closeAllConnections(); server.close((err) => { if (err) { @@ -146,14 +156,14 @@ async function removeCookiesFile() { * @param {object} newRepo The new repo attributes. * @param {boolean} debug Print debug messages to console if true. */ -async function addRepoToDb(newRepo, debug = false) { +async function addRepoToDb(newRepo: Repo, debug = false) { const repos = await db.getRepos(); const found = repos.find((y) => y.project === newRepo.project && newRepo.name === y.name); if (!found) { await db.createRepo(newRepo); const repo = await db.getRepoByUrl(newRepo.url); - await db.addUserCanPush(repo._id, 'admin'); - await db.addUserCanAuthorise(repo._id, 'admin'); + await db.addUserCanPush(repo?._id || '', 'admin'); + await db.addUserCanAuthorise(repo?._id || '', 'admin'); if (debug) { console.log(`New repo added to database: ${newRepo}`); } @@ -168,9 +178,9 @@ async function addRepoToDb(newRepo, debug = false) { * Removes a repo from the DB. * @param {string} repoUrl The url of the repo to remove. */ -async function removeRepoFromDb(repoUrl) { +async function removeRepoFromDb(repoUrl: string) { const repo = await db.getRepoByUrl(repoUrl); - await db.deleteRepo(repo._id); + await db.deleteRepo(repo?._id || ''); } /** @@ -181,17 +191,23 @@ async function removeRepoFromDb(repoUrl) { * @param {string} userEmail The email of the user who pushed the git push. * @param {boolean} debug Flag to enable logging for debugging. */ -async function addGitPushToDb(id, repoUrl, user = null, userEmail = null, debug = false) { - const action = new actions.Action( +async function addGitPushToDb( + id: string, + repoUrl: string, + user: string | null = null, + userEmail: string | null = null, + debug: boolean = false, +) { + const action = new Action( id, 'push', // type 'get', // method Date.now(), // timestamp repoUrl, ); - action.user = user; - action.userEmail = userEmail; - const step = new steps.Step( + action.user = user || ''; + action.userEmail = userEmail || ''; + const step = new Step( 'authBlock', // stepName false, // error null, // errorMessage @@ -207,10 +223,12 @@ async function addGitPushToDb(id, repoUrl, user = null, userEmail = null, debug committer: 'committer', commitTs: 'commitTs', message: 'message', + authorEmail: 'authorEmail', + committerEmail: 'committerEmail', }); action.commitData = commitData; action.addStep(step); - const result = await processor.exec(null, action); + const result = await execProcessor(null, action); if (debug) { console.log(`New git push added to DB: ${util.inspect(result)}`); } @@ -220,7 +238,7 @@ async function addGitPushToDb(id, repoUrl, user = null, userEmail = null, debug * Removes a push from the DB * @param {string} id */ -async function removeGitPushFromDb(id) { +async function removeGitPushFromDb(id: string) { await db.deletePush(id); } @@ -233,7 +251,14 @@ async function removeGitPushFromDb(id) { * @param {boolean} admin Flag to make the user administrator. * @param {boolean} debug Flag to enable logging for debugging. */ -async function addUserToDb(username, password, email, gitAccount, admin = false, debug = false) { +async function addUserToDb( + username: string, + password: string, + email: string, + gitAccount: string, + admin: boolean = false, + debug: boolean = false, +) { const result = await db.createUser(username, password, email, gitAccount, admin); if (debug) { console.log(`New user added to DB: ${util.inspect(result)}`); @@ -244,20 +269,20 @@ async function addUserToDb(username, password, email, gitAccount, admin = false, * Remove a user record from the database if present. * @param {string} username The user name. */ -async function removeUserFromDb(username) { +async function removeUserFromDb(username: string) { await db.deleteUser(username); } -module.exports = { - runCli: runCli, - startServer: startServer, - closeServer: closeServer, - addRepoToDb: addRepoToDb, - removeRepoFromDb: removeRepoFromDb, - addGitPushToDb: addGitPushToDb, - removeGitPushFromDb: removeGitPushFromDb, - addUserToDb: addUserToDb, - removeUserFromDb: removeUserFromDb, - createCookiesFileWithExpiredCookie: createCookiesFileWithExpiredCookie, - removeCookiesFile: removeCookiesFile, +export { + runCli, + startServer, + closeServer, + addRepoToDb, + removeRepoFromDb, + addGitPushToDb, + removeGitPushFromDb, + addUserToDb, + removeUserFromDb, + createCookiesFileWithExpiredCookie, + removeCookiesFile, }; diff --git a/packages/git-proxy-cli/tsconfig.json b/packages/git-proxy-cli/tsconfig.json index 236bfabc5..1c96fb30f 100644 --- a/packages/git-proxy-cli/tsconfig.json +++ b/packages/git-proxy-cli/tsconfig.json @@ -5,14 +5,25 @@ "allowJs": true, "checkJs": false, "jsx": "react-jsx", - "moduleResolution": "Node", + "moduleResolution": "nodenext", "strict": true, - "noEmit": true, + "declaration": true, "skipLibCheck": true, "isolatedModules": true, - "module": "CommonJS", + "module": "NodeNext", "esModuleInterop": true, - "allowSyntheticDefaultImports": true + "allowSyntheticDefaultImports": true, + "resolveJsonModule": true, + "outDir": "./dist", + "rootDir": "." }, - "include": ["index.js", "test", "coverage"] + "include": ["index.ts", "types.ts"], + "exclude": [ + "src/config/**/*", + "src/db/**/*", + "src/proxy/**/*", + "src/service/**/*", + "src/ui/**/*", + "eslint.config.mjs" + ] } diff --git a/proxy.config.json b/proxy.config.json index 428f9c801..31023e745 100644 --- a/proxy.config.json +++ b/proxy.config.json @@ -67,11 +67,7 @@ } } ], - "api": { - "github": { - "baseUrl": "https://api.github.com" - } - }, + "api": {}, "commitConfig": { "author": { "email": { diff --git a/src/config/generated/config.ts b/src/config/generated/config.ts index a4db4851f..d61df7dcc 100644 --- a/src/config/generated/config.ts +++ b/src/config/generated/config.ts @@ -23,9 +23,10 @@ export interface GitProxyConfig { */ apiAuthentication?: AuthenticationElement[]; /** - * Customisable questions to add to attestation form + * Configuration for the attestation form displayed to reviewers. Reviewers will need to + * check the box next to each question in order to complete the review attestation. */ - attestationConfig?: { [key: string]: any }; + attestationConfig?: AttestationConfig; /** * List of authentication sources. The first source in the configuration with enabled=true * will be used. @@ -40,9 +41,10 @@ export interface GitProxyConfig { */ cache?: Cache; /** - * Enforce rules and patterns on commits including e-mail and message + * Block commits based on rules defined over author/committer e-mail addresses, commit + * message content and diff content */ - commitConfig?: { [key: string]: any }; + commitConfig?: CommitConfig; configurationSources?: any; /** * Customisable e-mail address to share in proxy responses and warnings @@ -54,16 +56,17 @@ export interface GitProxyConfig { */ csrfProtection?: boolean; /** - * Provide domains to use alternative to the defaults + * Provide custom URLs for the git proxy interfaces in case it cannot determine its own URL */ - domains?: { [key: string]: any }; + domains?: Domains; /** * List of plugins to integrate on GitProxy's push or pull actions. Each value is either a * file path or a module name. */ plugins?: string[]; /** - * Pattern searches for listed private organizations are disabled + * Provider searches for listed private organizations are disabled, see + * commitConfig.diff.block.providers */ privateOrganizations?: any[]; /** @@ -112,11 +115,6 @@ export interface GitProxyConfig { * Third party APIs */ export interface API { - /** - * Deprecated: Defunct property that was used to provide the API URL for GitHub. No longer - * referenced in the codebase. - */ - github?: Github; /** * Configuration for the gitleaks (https://github.com/gitleaks/gitleaks) plugin */ @@ -129,14 +127,6 @@ export interface API { ls?: Ls; } -/** - * Deprecated: Defunct property that was used to provide the API URL for GitHub. No longer - * referenced in the codebase. - */ -export interface Github { - baseUrl?: string; -} - /** * Configuration for the gitleaks (https://github.com/gitleaks/gitleaks) plugin */ @@ -261,6 +251,38 @@ export enum Type { Openidconnect = 'openidconnect', } +/** + * Configuration for the attestation form displayed to reviewers. Reviewers will need to + * check the box next to each question in order to complete the review attestation. + */ +export interface AttestationConfig { + /** + * Customisable attestation questions to add to attestation form. + */ + questions?: Question[]; +} + +export interface Question { + /** + * The text of the question that will be displayed to the reviewer + */ + label: string; + /** + * A tooltip and optional set of links that will be displayed on mouseover of the question + * and used to provide additional guidance to the reviewer. + */ + tooltip: QuestionTooltip; +} + +/** + * A tooltip and optional set of links that will be displayed on mouseover of the question + * and used to provide additional guidance to the reviewer. + */ +export interface QuestionTooltip { + links?: string[]; + text: string; +} + export interface AuthorisedRepo { name: string; project: string; @@ -286,6 +308,140 @@ export interface Cache { maxSizeGB: number; } +/** + * Block commits based on rules defined over author/committer e-mail addresses, commit + * message content and diff content + */ +export interface CommitConfig { + /** + * Rules applied to commit authors + */ + author?: Author; + /** + * Rules applied to commit diff content + */ + diff?: Diff; + /** + * Rules applied to commit messages + */ + message?: Message; +} + +/** + * Rules applied to commit authors + */ +export interface Author { + /** + * Rules applied to author email addresses + */ + email?: Email; +} + +/** + * Rules applied to author email addresses + */ +export interface Email { + /** + * Rules applied to the domain portion of the email address (i.e. section after the @ symbol) + */ + domain?: Domain; + /** + * Rules applied to the local portion of the email address (i.e. section before the @ symbol) + */ + local?: Local; +} + +/** + * Rules applied to the domain portion of the email address (i.e. section after the @ symbol) + */ +export interface Domain { + /** + * Allow only commits where the domain part of the email address matches this regular + * expression + */ + allow?: string; +} + +/** + * Rules applied to the local portion of the email address (i.e. section before the @ symbol) + */ +export interface Local { + /** + * Block commits with author email addresses where the first part matches this regular + * expression + */ + block?: string; +} + +/** + * Rules applied to commit diff content + */ +export interface Diff { + /** + * Block commits where the commit diff matches any of the given patterns + */ + block?: DiffBlock; +} + +/** + * Block commits where the commit diff matches any of the given patterns + */ +export interface DiffBlock { + /** + * Block commits where the commit diff content contains any of the given string literals + */ + literals?: string[]; + /** + * Block commits where the commit diff content matches any of the given regular expressions + */ + patterns?: any[]; + /** + * Block commits where the commit diff content matches any of the given regular expressions, + * except where the repository path (project/organisation) matches one of the listed + * privateOrganisations. The keys in this array are listed as the block type in logs. + */ + providers?: { [key: string]: string }; +} + +/** + * Rules applied to commit messages + */ +export interface Message { + /** + * Block commits where the commit message matches any of the given patterns + */ + block?: MessageBlock; +} + +/** + * Block commits where the commit message matches any of the given patterns + */ +export interface MessageBlock { + /** + * Block commits where the commit message contains any of the given string literals + */ + literals?: string[]; + /** + * Block commits where the commit message matches any of the given regular expressions + */ + patterns?: string[]; +} + +/** + * Provide custom URLs for the git proxy interfaces in case it cannot determine its own URL + */ +export interface Domains { + /** + * Override for the default proxy URL, should include the protocol + */ + proxy?: string; + /** + * Override for the service UI URL, should include the protocol + */ + service?: string; + [property: string]: any; +} + /** * API Rate limiting configuration. */ @@ -545,7 +701,11 @@ const typeMap: any = { js: 'apiAuthentication', typ: u(undefined, a(r('AuthenticationElement'))), }, - { json: 'attestationConfig', js: 'attestationConfig', typ: u(undefined, m('any')) }, + { + json: 'attestationConfig', + js: 'attestationConfig', + typ: u(undefined, r('AttestationConfig')), + }, { json: 'authentication', js: 'authentication', @@ -553,12 +713,12 @@ const typeMap: any = { }, { json: 'authorisedList', js: 'authorisedList', typ: u(undefined, a(r('AuthorisedRepo'))) }, { json: 'cache', js: 'cache', typ: u(undefined, r('Cache')) }, - { json: 'commitConfig', js: 'commitConfig', typ: u(undefined, m('any')) }, + { json: 'commitConfig', js: 'commitConfig', typ: u(undefined, r('CommitConfig')) }, { json: 'configurationSources', js: 'configurationSources', typ: u(undefined, 'any') }, { json: 'contactEmail', js: 'contactEmail', typ: u(undefined, '') }, { json: 'cookieSecret', js: 'cookieSecret', typ: u(undefined, '') }, { json: 'csrfProtection', js: 'csrfProtection', typ: u(undefined, true) }, - { json: 'domains', js: 'domains', typ: u(undefined, m('any')) }, + { json: 'domains', js: 'domains', typ: u(undefined, r('Domains')) }, { json: 'plugins', js: 'plugins', typ: u(undefined, a('')) }, { json: 'privateOrganizations', js: 'privateOrganizations', typ: u(undefined, a('any')) }, { json: 'proxyUrl', js: 'proxyUrl', typ: u(undefined, '') }, @@ -576,13 +736,11 @@ const typeMap: any = { ), API: o( [ - { json: 'github', js: 'github', typ: u(undefined, r('Github')) }, { json: 'gitleaks', js: 'gitleaks', typ: u(undefined, r('Gitleaks')) }, { json: 'ls', js: 'ls', typ: u(undefined, r('Ls')) }, ], false, ), - Github: o([{ json: 'baseUrl', js: 'baseUrl', typ: u(undefined, '') }], false), Gitleaks: o( [ { json: 'configPath', js: 'configPath', typ: u(undefined, '') }, @@ -632,6 +790,24 @@ const typeMap: any = { ], 'any', ), + AttestationConfig: o( + [{ json: 'questions', js: 'questions', typ: u(undefined, a(r('Question'))) }], + false, + ), + Question: o( + [ + { json: 'label', js: 'label', typ: '' }, + { json: 'tooltip', js: 'tooltip', typ: r('QuestionTooltip') }, + ], + false, + ), + QuestionTooltip: o( + [ + { json: 'links', js: 'links', typ: u(undefined, a('')) }, + { json: 'text', js: 'text', typ: '' }, + ], + false, + ), AuthorisedRepo: o( [ { json: 'name', js: 'name', typ: '' }, @@ -648,6 +824,48 @@ const typeMap: any = { ], false, ), + CommitConfig: o( + [ + { json: 'author', js: 'author', typ: u(undefined, r('Author')) }, + { json: 'diff', js: 'diff', typ: u(undefined, r('Diff')) }, + { json: 'message', js: 'message', typ: u(undefined, r('Message')) }, + ], + false, + ), + Author: o([{ json: 'email', js: 'email', typ: u(undefined, r('Email')) }], false), + Email: o( + [ + { json: 'domain', js: 'domain', typ: u(undefined, r('Domain')) }, + { json: 'local', js: 'local', typ: u(undefined, r('Local')) }, + ], + false, + ), + Domain: o([{ json: 'allow', js: 'allow', typ: u(undefined, '') }], false), + Local: o([{ json: 'block', js: 'block', typ: u(undefined, '') }], false), + Diff: o([{ json: 'block', js: 'block', typ: u(undefined, r('DiffBlock')) }], false), + DiffBlock: o( + [ + { json: 'literals', js: 'literals', typ: u(undefined, a('')) }, + { json: 'patterns', js: 'patterns', typ: u(undefined, a('any')) }, + { json: 'providers', js: 'providers', typ: u(undefined, m('')) }, + ], + false, + ), + Message: o([{ json: 'block', js: 'block', typ: u(undefined, r('MessageBlock')) }], false), + MessageBlock: o( + [ + { json: 'literals', js: 'literals', typ: u(undefined, a('')) }, + { json: 'patterns', js: 'patterns', typ: u(undefined, a('')) }, + ], + false, + ), + Domains: o( + [ + { json: 'proxy', js: 'proxy', typ: u(undefined, '') }, + { json: 'service', js: 'service', typ: u(undefined, '') }, + ], + 'any', + ), RateLimit: o( [ { json: 'limit', js: 'limit', typ: 3.14 }, diff --git a/src/config/index.ts b/src/config/index.ts index fad8cbab2..84ba8fafd 100644 --- a/src/config/index.ts +++ b/src/config/index.ts @@ -159,7 +159,7 @@ export const getDatabase = () => { * Get the list of enabled authentication methods * * At least one authentication method must be enabled. - * @return {Authentication[]} List of enabled authentication methods + * @return List of enabled authentication methods */ export const getAuthMethods = () => { const config = loadFullConfiguration(); @@ -178,7 +178,7 @@ export const getAuthMethods = () => { * Get the list of enabled authentication methods for API endpoints * * If no API authentication methods are enabled, all endpoints are public. - * @return {Authentication[]} List of enabled authentication methods + * @return List of enabled authentication methods */ export const getAPIAuthMethods = () => { const config = loadFullConfiguration(); diff --git a/src/config/types.ts b/src/config/types.ts deleted file mode 100644 index af71421df..000000000 --- a/src/config/types.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { Options as RateLimitOptions } from 'express-rate-limit'; - -export interface UserSettings { - uiRouteAuth: Record; - authorisedList: AuthorisedRepo[]; - sink: Database[]; - authentication: Authentication[]; - apiAuthentication: Authentication[]; - tempPassword?: TempPasswordConfig; - proxyUrl: string; - api: Record; - cookieSecret: string; - sessionMaxAgeHours: number; - tls?: TLSConfig; - sslCertPemPath?: string; // deprecated - sslKeyPemPath?: string; // deprecated - plugins: any[]; - commitConfig: Record; - attestationConfig: Record; - privateOrganizations: any[]; - urlShortener: string; - contactEmail: string; - csrfProtection: boolean; - domains: Record; - rateLimit: RateLimitConfig; - cache: CacheConfig; -} - -export interface TLSConfig { - enabled?: boolean; - cert?: string; - key?: string; -} - -export interface AuthorisedRepo { - project: string; - name: string; - url: string; -} - -export interface Database { - type: string; - enabled: boolean; - connectionString?: string; - params?: Record; - options?: Record; -} - -export interface Authentication { - type: string; - enabled: boolean; - options?: Record; -} - -export interface TempPasswordConfig { - sendEmail: boolean; - emailConfig: Record; -} - -export type RateLimitConfig = Partial< - Pick ->; - -export interface CacheConfig { - maxSizeGB: number; - maxRepositories: number; - cacheDir: string; -} diff --git a/src/db/file/helper.ts b/src/db/file/helper.ts new file mode 100644 index 000000000..281853242 --- /dev/null +++ b/src/db/file/helper.ts @@ -0,0 +1 @@ +export const getSessionStore = (): undefined => undefined; diff --git a/src/db/file/index.ts b/src/db/file/index.ts index c41227b84..3f746dcff 100644 --- a/src/db/file/index.ts +++ b/src/db/file/index.ts @@ -1,6 +1,9 @@ import * as users from './users'; import * as repo from './repo'; import * as pushes from './pushes'; +import * as helper from './helper'; + +export const { getSessionStore } = helper; export const { getPushes, writeAudit, getPush, deletePush, authorise, cancel, reject } = pushes; diff --git a/src/db/file/pushes.ts b/src/db/file/pushes.ts index 10cc2a4fd..2875b87f1 100644 --- a/src/db/file/pushes.ts +++ b/src/db/file/pushes.ts @@ -24,14 +24,15 @@ try { } db.setAutocompactionInterval(COMPACTION_INTERVAL); -const defaultPushQuery: PushQuery = { +const defaultPushQuery: Partial = { error: false, blocked: true, allowPush: false, authorised: false, + type: 'push', }; -export const getPushes = (query: PushQuery): Promise => { +export const getPushes = (query: Partial): Promise => { if (!query) query = defaultPushQuery; return new Promise((resolve, reject) => { db.find(query, (err: Error, docs: Action[]) => { diff --git a/src/db/file/repo.ts b/src/db/file/repo.ts index 11c3d775f..79027c490 100644 --- a/src/db/file/repo.ts +++ b/src/db/file/repo.ts @@ -1,9 +1,10 @@ import fs from 'fs'; import Datastore from '@seald-io/nedb'; -import { Repo } from '../types'; -import { toClass } from '../helper'; import _ from 'lodash'; +import { Repo, RepoQuery } from '../types'; +import { toClass } from '../helper'; + const COMPACTION_INTERVAL = 1000 * 60 * 60 * 24; // once per day // these don't get coverage in tests as they have already been run once before the test @@ -27,7 +28,7 @@ try { db.ensureIndex({ fieldName: 'name', unique: false }); db.setAutocompactionInterval(COMPACTION_INTERVAL); -export const getRepos = async (query: any = {}): Promise => { +export const getRepos = async (query: Partial = {}): Promise => { if (query?.name) { query.name = query.name.toLowerCase(); } diff --git a/src/db/file/users.ts b/src/db/file/users.ts index e449f7ff2..7bab7c1b1 100644 --- a/src/db/file/users.ts +++ b/src/db/file/users.ts @@ -1,6 +1,7 @@ import fs from 'fs'; import Datastore from '@seald-io/nedb'; -import { User } from '../types'; + +import { User, UserQuery } from '../types'; const COMPACTION_INTERVAL = 1000 * 60 * 60 * 24; // once per day @@ -115,11 +116,14 @@ export const deleteUser = (username: string): Promise => { }); }; -export const updateUser = (user: User): Promise => { - user.username = user.username.toLowerCase(); +export const updateUser = (user: Partial): Promise => { + if (user.username) { + user.username = user.username.toLowerCase(); + } if (user.email) { user.email = user.email.toLowerCase(); } + return new Promise((resolve, reject) => { // The mongo db adaptor adds fields to existing documents, where this adaptor replaces the document // hence, retrieve and merge documents to avoid dropping fields (such as the gitaccount) @@ -153,7 +157,7 @@ export const updateUser = (user: User): Promise => { }); }; -export const getUsers = (query: any = {}): Promise => { +export const getUsers = (query: Partial = {}): Promise => { if (query.username) { query.username = query.username.toLowerCase(); } diff --git a/src/db/index.ts b/src/db/index.ts index d743663b7..e978a1d20 100644 --- a/src/db/index.ts +++ b/src/db/index.ts @@ -1,5 +1,5 @@ -import { AuthorisedRepo } from '../config/types'; -import { PushQuery, Repo, Sink, User } from './types'; +import { AuthorisedRepo } from '../config/generated/config'; +import { PushQuery, Repo, RepoQuery, Sink, User, UserQuery } from './types'; import * as bcrypt from 'bcryptjs'; import * as config from '../config'; import * as mongo from './mongo'; @@ -140,9 +140,9 @@ export const canUserCancelPush = async (id: string, user: string) => { } }; -export const getSessionStore = (): MongoDBStore | null => - sink.getSessionStore ? sink.getSessionStore() : null; -export const getPushes = (query: PushQuery): Promise => sink.getPushes(query); +export const getSessionStore = (): MongoDBStore | undefined => + sink.getSessionStore ? sink.getSessionStore() : undefined; +export const getPushes = (query: Partial): Promise => sink.getPushes(query); export const writeAudit = (action: Action): Promise => sink.writeAudit(action); export const getPush = (id: string): Promise => sink.getPush(id); export const deletePush = (id: string): Promise => sink.deletePush(id); @@ -151,7 +151,7 @@ export const authorise = (id: string, attestation: any): Promise<{ message: stri export const cancel = (id: string): Promise<{ message: string }> => sink.cancel(id); export const reject = (id: string, attestation: any): Promise<{ message: string }> => sink.reject(id, attestation); -export const getRepos = (query?: object): Promise => sink.getRepos(query); +export const getRepos = (query?: Partial): Promise => sink.getRepos(query); export const getRepo = (name: string): Promise => sink.getRepo(name); export const getRepoByUrl = (url: string): Promise => sink.getRepoByUrl(url); export const getRepoById = (_id: string): Promise => sink.getRepoById(_id); @@ -167,6 +167,6 @@ export const deleteRepo = (_id: string): Promise => sink.deleteRepo(_id); export const findUser = (username: string): Promise => sink.findUser(username); export const findUserByEmail = (email: string): Promise => sink.findUserByEmail(email); export const findUserByOIDC = (oidcId: string): Promise => sink.findUserByOIDC(oidcId); -export const getUsers = (query?: object): Promise => sink.getUsers(query); +export const getUsers = (query?: Partial): Promise => sink.getUsers(query); export const deleteUser = (username: string): Promise => sink.deleteUser(username); -export const updateUser = (user: User): Promise => sink.updateUser(user); +export const updateUser = (user: Partial): Promise => sink.updateUser(user); diff --git a/src/db/mongo/pushes.ts b/src/db/mongo/pushes.ts index e1b3a4bbe..968b2858a 100644 --- a/src/db/mongo/pushes.ts +++ b/src/db/mongo/pushes.ts @@ -5,14 +5,17 @@ import { PushQuery } from '../types'; const collectionName = 'pushes'; -const defaultPushQuery: PushQuery = { +const defaultPushQuery: Partial = { error: false, blocked: true, allowPush: false, authorised: false, + type: 'push', }; -export const getPushes = async (query: PushQuery = defaultPushQuery): Promise => { +export const getPushes = async ( + query: Partial = defaultPushQuery, +): Promise => { return findDocuments(collectionName, query, { projection: { _id: 0, diff --git a/src/db/mongo/users.ts b/src/db/mongo/users.ts index 1b6a846f6..f4300c39e 100644 --- a/src/db/mongo/users.ts +++ b/src/db/mongo/users.ts @@ -50,8 +50,10 @@ export const createUser = async function (user: User): Promise { await collection.insertOne(user as OptionalId); }; -export const updateUser = async (user: User): Promise => { - user.username = user.username.toLowerCase(); +export const updateUser = async (user: Partial): Promise => { + if (user.username) { + user.username = user.username.toLowerCase(); + } if (user.email) { user.email = user.email.toLowerCase(); } diff --git a/src/db/types.ts b/src/db/types.ts index d95c352e0..0a179b233 100644 --- a/src/db/types.ts +++ b/src/db/types.ts @@ -6,8 +6,27 @@ export type PushQuery = { blocked: boolean; allowPush: boolean; authorised: boolean; + type: string; + [key: string]: QueryValue; + canceled: boolean; + rejected: boolean; }; +export type RepoQuery = { + name: string; + url: string; + project: string; + [key: string]: QueryValue; +}; + +export type UserQuery = { + username: string; + email: string; + [key: string]: QueryValue; +}; + +export type QueryValue = string | boolean | number | undefined; + export type UserRole = 'canPush' | 'canAuthorise'; export class Repo { @@ -39,6 +58,8 @@ export class User { email: string; admin: boolean; oidcId?: string | null; + displayName?: string | null; + title?: string | null; _id?: string; constructor( @@ -61,15 +82,15 @@ export class User { } export interface Sink { - getSessionStore?: () => MongoDBStore; - getPushes: (query: PushQuery) => Promise; + getSessionStore: () => MongoDBStore | undefined; + getPushes: (query: Partial) => Promise; writeAudit: (action: Action) => Promise; getPush: (id: string) => Promise; deletePush: (id: string) => Promise; authorise: (id: string, attestation: any) => Promise<{ message: string }>; cancel: (id: string) => Promise<{ message: string }>; reject: (id: string, attestation: any) => Promise<{ message: string }>; - getRepos: (query?: object) => Promise; + getRepos: (query?: Partial) => Promise; getRepo: (name: string) => Promise; getRepoByUrl: (url: string) => Promise; getRepoById: (_id: string) => Promise; @@ -82,8 +103,8 @@ export interface Sink { findUser: (username: string) => Promise; findUserByEmail: (email: string) => Promise; findUserByOIDC: (oidcId: string) => Promise; - getUsers: (query?: object) => Promise; + getUsers: (query?: Partial) => Promise; createUser: (user: User) => Promise; deleteUser: (username: string) => Promise; - updateUser: (user: User) => Promise; + updateUser: (user: Partial) => Promise; } diff --git a/src/proxy/actions/Step.ts b/src/proxy/actions/Step.ts index 504b5390c..e0db4d5b3 100644 --- a/src/proxy/actions/Step.ts +++ b/src/proxy/actions/Step.ts @@ -1,7 +1,18 @@ import { v4 as uuidv4 } from 'uuid'; +export interface StepData { + id: string; + stepName: string; + content: any; + error: boolean; + errorMessage: string | null; + blocked: boolean; + blockedMessage: string | null; + logs: string[]; +} + /** Class representing a Push Step. */ -class Step { +class Step implements StepData { id: string; stepName: string; content: any; diff --git a/src/proxy/processors/push-action/checkAuthorEmails.ts b/src/proxy/processors/push-action/checkAuthorEmails.ts index 00774cbe7..3c7cbb89c 100644 --- a/src/proxy/processors/push-action/checkAuthorEmails.ts +++ b/src/proxy/processors/push-action/checkAuthorEmails.ts @@ -13,14 +13,14 @@ const isEmailAllowed = (email: string): boolean => { const [emailLocal, emailDomain] = email.split('@'); if ( - commitConfig.author.email.domain.allow && + commitConfig?.author?.email?.domain?.allow && !new RegExp(commitConfig.author.email.domain.allow, 'g').test(emailDomain) ) { return false; } if ( - commitConfig.author.email.local.block && + commitConfig?.author?.email?.local?.block && new RegExp(commitConfig.author.email.local.block, 'g').test(emailLocal) ) { return false; diff --git a/src/proxy/processors/push-action/checkCommitMessages.ts b/src/proxy/processors/push-action/checkCommitMessages.ts index a85b2fa9c..79a259d8b 100644 --- a/src/proxy/processors/push-action/checkCommitMessages.ts +++ b/src/proxy/processors/push-action/checkCommitMessages.ts @@ -19,10 +19,10 @@ const isMessageAllowed = (commitMessage: string): boolean => { } // Configured blocked literals - const blockedLiterals: string[] = commitConfig.message.block.literals; + const blockedLiterals: string[] = commitConfig?.message?.block?.literals ?? []; // Configured blocked patterns - const blockedPatterns: string[] = commitConfig.message.block.patterns; + const blockedPatterns: string[] = commitConfig?.message?.block?.patterns ?? []; // Find all instances of blocked literals in commit message... const positiveLiterals = blockedLiterals.map((literal: string) => diff --git a/src/proxy/processors/push-action/gitleaks.ts b/src/proxy/processors/push-action/gitleaks.ts index af28a499b..1cf5b2236 100644 --- a/src/proxy/processors/push-action/gitleaks.ts +++ b/src/proxy/processors/push-action/gitleaks.ts @@ -171,7 +171,7 @@ const exec = async (req: any, action: Action): Promise => { step.setError('\n' + gitleaks.stdout + gitleaks.stderr); } } else { - console.log('succeded'); + console.log('succeeded'); console.log(gitleaks.stderr); } } catch (e) { diff --git a/src/proxy/processors/push-action/scanDiff.ts b/src/proxy/processors/push-action/scanDiff.ts index df797ec02..56f3ddc11 100644 --- a/src/proxy/processors/push-action/scanDiff.ts +++ b/src/proxy/processors/push-action/scanDiff.ts @@ -1,6 +1,7 @@ import { Action, Step } from '../../actions'; import { getCommitConfig, getPrivateOrganizations } from '../../../config'; import parseDiff, { File } from 'parse-diff'; +import escapeStringRegexp from 'escape-string-regexp'; const commitConfig = getCommitConfig(); const privateOrganizations = getPrivateOrganizations(); @@ -63,22 +64,23 @@ const getDiffViolations = (diff: string, organization: string): Match[] | string const combineMatches = (organization: string) => { // Configured blocked literals - const blockedLiterals: string[] = commitConfig.diff.block.literals; + const blockedLiterals: string[] = commitConfig?.diff?.block?.literals ?? []; // Configured blocked patterns - const blockedPatterns: string[] = commitConfig.diff.block.patterns; + const blockedPatterns: string[] = commitConfig?.diff?.block?.patterns ?? []; // Configured blocked providers const blockedProviders: [string, string][] = organization && privateOrganizations.includes(organization) ? [] - : Object.entries(commitConfig.diff.block.providers); + : Object.entries(commitConfig?.diff?.block?.providers ?? []); // Combine all matches (literals, patterns) + const combinedMatches = [ ...blockedLiterals.map((literal) => ({ type: BLOCK_TYPE.LITERAL, - match: new RegExp(literal, 'gi'), + match: new RegExp(escapeStringRegexp(literal), 'gi'), //TODO: swap out escapeStringRegexp() for RegExp.escape() when we require node 24 })), ...blockedPatterns.map((pattern) => ({ type: BLOCK_TYPE.PATTERN, diff --git a/src/service/emailSender.js b/src/service/emailSender.js deleted file mode 100644 index aa1ddeee1..000000000 --- a/src/service/emailSender.js +++ /dev/null @@ -1,20 +0,0 @@ -const nodemailer = require('nodemailer'); -const config = require('../config'); - -exports.sendEmail = async (from, to, subject, body) => { - const smtpHost = config.getSmtpHost(); - const smtpPort = config.getSmtpPort(); - const transporter = nodemailer.createTransport({ - host: smtpHost, - port: smtpPort, - }); - - const email = `${body}`; - const info = await transporter.sendMail({ - from, - to, - subject, - html: email, - }); - console.log('Message sent %s', info.messageId); -}; diff --git a/src/service/index.js b/src/service/index.js deleted file mode 100644 index f03d75b68..000000000 --- a/src/service/index.js +++ /dev/null @@ -1,142 +0,0 @@ -const express = require('express'); -const session = require('express-session'); -const http = require('http'); -const cors = require('cors'); -const app = express(); -const path = require('path'); -const config = require('../config'); -const db = require('../db'); -const rateLimit = require('express-rate-limit'); -const lusca = require('lusca'); -const configLoader = require('../config/ConfigLoader'); - -const limiter = rateLimit(config.getRateLimit()); - -const { GIT_PROXY_UI_PORT: uiPort } = require('../config/env').serverConfig; - -const _httpServer = http.createServer(app); - -const corsOptions = { - credentials: true, - origin: true, -}; - -/** - * Internal function used to bootstrap the Git Proxy API's express application. - * @param {proxy} proxy A reference to the proxy express application, used to restart it when necessary. - * @return {Promise} - */ -async function createApp(proxy) { - // configuration of passport is async - // Before we can bind the routes - we need the passport strategy - const passport = await require('./passport').configure(); - const routes = require('./routes'); - const absBuildPath = path.join(__dirname, '../../build'); - app.use(cors(corsOptions)); - app.set('trust proxy', 1); - app.use(limiter); - - // Add new admin-only endpoint to reload config - app.post('/api/v1/admin/reload-config', async (req, res) => { - if (!req.isAuthenticated() || !req.user.admin) { - return res.status(403).json({ error: 'Unauthorized' }); - } - - try { - // 1. Reload configuration - await configLoader.loadConfiguration(); - - // 2. Stop existing services - await proxy.stop(); - - // 3. Apply new configuration - config.validate(); - - // 4. Restart services with new config - await proxy.start(); - - console.log('Configuration reloaded and services restarted successfully'); - res.json({ status: 'success', message: 'Configuration reloaded and services restarted' }); - } catch (error) { - console.error('Failed to reload configuration and restart services:', error); - - // Attempt to restart with existing config if reload fails - try { - await proxy.start(); - } catch (startError) { - console.error('Failed to restart services:', startError); - } - - res.status(500).json({ error: 'Failed to reload configuration' }); - } - }); - - app.use( - session({ - store: config.getDatabase().type === 'mongo' ? db.getSessionStore(session) : null, - secret: config.getCookieSecret(), - resave: false, - saveUninitialized: false, - cookie: { - secure: 'auto', - httpOnly: true, - maxAge: config.getSessionMaxAgeHours() * 60 * 60 * 1000, - }, - }), - ); - if (config.getCSRFProtection() && process.env.NODE_ENV !== 'test') { - app.use( - lusca({ - csrf: { - cookie: { name: 'csrf' }, - }, - hsts: { maxAge: 31536000, includeSubDomains: true, preload: true }, - nosniff: true, - referrerPolicy: 'same-origin', - xframe: 'SAMEORIGIN', - xssProtection: true, - }), - ); - } - app.use(passport.initialize()); - app.use(passport.session()); - app.use(express.json()); - app.use(express.urlencoded({ extended: true })); - app.use('/', routes(proxy)); - app.use('/', express.static(absBuildPath)); - app.get('/*', (req, res) => { - res.sendFile(path.join(`${absBuildPath}/index.html`)); - }); - - return app; -} - -/** - * Starts the proxy service. - * @param {proxy?} proxy A reference to the proxy express application, used to restart it when necessary. - * @return {Promise} the express application (used for testing). - */ -async function start(proxy) { - if (!proxy) { - console.warn("WARNING: proxy is null and can't be controlled by the API service"); - } - - const app = await createApp(proxy); - - _httpServer.listen(uiPort); - - console.log(`Service Listening on ${uiPort}`); - app.emit('ready'); - - return app; -} - -/** - * Stops the proxy service. - */ -async function stop() { - console.log(`Stopping Service Listening on ${uiPort}`); - _httpServer.close(); -} - -module.exports = { start, stop, httpServer: _httpServer }; diff --git a/src/service/index.ts b/src/service/index.ts new file mode 100644 index 000000000..15c86307a --- /dev/null +++ b/src/service/index.ts @@ -0,0 +1,116 @@ +import express, { Express } from 'express'; +import session from 'express-session'; +import http from 'http'; +import cors from 'cors'; +import path from 'path'; +import rateLimit from 'express-rate-limit'; +import lusca from 'lusca'; + +import * as config from '../config'; +import * as db from '../db'; +import { serverConfig } from '../config/env'; +import Proxy from '../proxy'; +import routes from './routes'; +import { configure } from './passport'; + +const limiter = rateLimit(config.getRateLimit()); + +const { GIT_PROXY_UI_PORT: uiPort } = serverConfig; + +const DEFAULT_SESSION_MAX_AGE_HOURS = 12; + +const app: Express = express(); +const _httpServer = http.createServer(app); + +const corsOptions = { + credentials: true, + origin: true, +}; + +/** + * Internal function used to bootstrap the Git Proxy API's express application. + * @param {Proxy} proxy A reference to the proxy, used to restart it when necessary. + * @return {Promise} the express application + */ +async function createApp(proxy: Proxy): Promise { + // configuration of passport is async + // Before we can bind the routes - we need the passport strategy + const passport = await configure(); + const absBuildPath = path.join(__dirname, '../../build'); + app.use(cors(corsOptions)); + app.set('trust proxy', 1); + app.use(limiter); + + app.use( + session({ + store: db.getSessionStore(), + secret: config.getCookieSecret() as string, + resave: false, + saveUninitialized: false, + cookie: { + secure: 'auto', + httpOnly: true, + maxAge: (config.getSessionMaxAgeHours() || DEFAULT_SESSION_MAX_AGE_HOURS) * 60 * 60 * 1000, + }, + }), + ); + if (config.getCSRFProtection() && process.env.NODE_ENV !== 'test') { + app.use( + lusca({ + csrf: { + cookie: { name: 'csrf' }, + }, + hsts: { maxAge: 31536000, includeSubDomains: true, preload: true }, + nosniff: true, + referrerPolicy: 'same-origin', + xframe: 'SAMEORIGIN', + xssProtection: true, + }), + ); + } + app.use(passport.initialize()); + app.use(passport.session()); + app.use(express.json()); + app.use(express.urlencoded({ extended: true })); + app.use('/', routes(proxy)); + app.use('/', express.static(absBuildPath)); + app.get('/*', (req, res) => { + res.sendFile(path.join(`${absBuildPath}/index.html`)); + }); + + return app; +} + +/** + * Starts the proxy service. + * @param {Proxy} proxy A reference to the proxy, used to restart it when necessary. + * @return {Promise} the express application (used for testing). + */ +async function start(proxy: Proxy) { + if (!proxy) { + console.warn("WARNING: proxy is null and can't be controlled by the API service"); + } + + const app = await createApp(proxy); + + _httpServer.listen(uiPort); + + console.log(`Service Listening on ${uiPort}`); + app.emit('ready'); + + return app; +} + +/** + * Stops the proxy service. + */ +async function stop() { + console.log(`Stopping Service Listening on ${uiPort}`); + _httpServer.close(); +} + +export default { + start, + stop, + httpServer: _httpServer, +}; diff --git a/src/service/passport/activeDirectory.js b/src/service/passport/activeDirectory.ts similarity index 63% rename from src/service/passport/activeDirectory.js rename to src/service/passport/activeDirectory.ts index 28b8f0e54..6814bcacc 100644 --- a/src/service/passport/activeDirectory.js +++ b/src/service/passport/activeDirectory.ts @@ -1,18 +1,33 @@ -const ActiveDirectoryStrategy = require('passport-activedirectory'); -const ldaphelper = require('./ldaphelper'); +import ActiveDirectoryStrategy from 'passport-activedirectory'; +import { PassportStatic } from 'passport'; +import ActiveDirectory from 'activedirectory2'; +import { Request } from 'express'; -const type = 'activedirectory'; +import * as ldaphelper from './ldaphelper'; +import * as db from '../../db'; +import { getAuthMethods } from '../../config'; +import { ADProfile } from './types'; -const configure = (passport) => { - const db = require('../../db'); +export const type = 'activedirectory'; - // We can refactor this by normalizing auth strategy config and pass it directly into the configure() function, - // ideally when we convert this to TS. - const authMethods = require('../../config').getAuthMethods(); +export const configure = async (passport: PassportStatic): Promise => { + const authMethods = getAuthMethods(); const config = authMethods.find((method) => method.type.toLowerCase() === type); + + if (!config || !config.adConfig) { + throw new Error('AD authentication method not enabled'); + } + const adConfig = config.adConfig; - const { userGroup, adminGroup, domain } = config; + // Handle legacy config + const userGroup = adConfig.userGroup || config.userGroup; + const adminGroup = adConfig.adminGroup || config.adminGroup; + const domain = adConfig.domain || config.domain; + + if (!userGroup || !adminGroup || !domain) { + throw new Error('Invalid Active Directory configuration'); + } console.log(`AD User Group: ${userGroup}, AD Admin Group: ${adminGroup}`); @@ -24,7 +39,12 @@ const configure = (passport) => { integrated: false, ldap: adConfig, }, - async function (req, profile, ad, done) { + async function ( + req: Request & { user?: ADProfile }, + profile: ADProfile, + ad: ActiveDirectory, + done: (err: any, user: any) => void, + ) { try { profile.username = profile._json.sAMAccountName?.toLowerCase(); profile.email = profile._json.mail; @@ -43,8 +63,7 @@ const configure = (passport) => { const message = `User it not a member of ${userGroup}`; return done(message, null); } - } catch (err) { - console.log('ad test (isUser): e', err); + } catch (err: any) { const message = `An error occurred while checking if the user is a member of the user group: ${err.message}`; return done(message, null); } @@ -53,7 +72,7 @@ const configure = (passport) => { let isAdmin = false; try { isAdmin = await ldaphelper.isUserInAdGroup(req, profile, ad, domain, adminGroup); - } catch (err) { + } catch (err: any) { const message = `An error occurred while checking if the user is a member of the admin group: ${err.message}`; console.error(message, err); // don't return an error for this case as you may still be a user } @@ -72,7 +91,7 @@ const configure = (passport) => { await db.updateUser(user); return done(null, user); - } catch (err) { + } catch (err: any) { console.log(`Error authenticating AD user: ${err.message}`); return done(err, null); } @@ -80,16 +99,13 @@ const configure = (passport) => { ), ); - passport.serializeUser(function (user, done) { + passport.serializeUser(function (user: any, done: (err: any, user: any) => void) { done(null, user); }); - passport.deserializeUser(function (user, done) { + passport.deserializeUser(function (user: any, done: (err: any, user: any) => void) { done(null, user); }); - passport.type = 'ActiveDirectory'; return passport; }; - -module.exports = { configure, type }; diff --git a/src/service/passport/index.js b/src/service/passport/index.js deleted file mode 100644 index e1cc9e0b5..000000000 --- a/src/service/passport/index.js +++ /dev/null @@ -1,36 +0,0 @@ -const passport = require('passport'); -const local = require('./local'); -const activeDirectory = require('./activeDirectory'); -const oidc = require('./oidc'); -const config = require('../../config'); - -// Allows obtaining strategy config function and type -// Keep in mind to add AuthStrategy enum when refactoring this to TS -const authStrategies = { - local: local, - activedirectory: activeDirectory, - openidconnect: oidc, -}; - -const configure = async () => { - passport.initialize(); - - const authMethods = config.getAuthMethods(); - - for (const auth of authMethods) { - const strategy = authStrategies[auth.type.toLowerCase()]; - if (strategy && typeof strategy.configure === 'function') { - await strategy.configure(passport); - } - } - - if (authMethods.some((auth) => auth.type.toLowerCase() === 'local')) { - await local.createDefaultAdmin(); - } - - return passport; -}; - -const getPassport = () => passport; - -module.exports = { authStrategies, configure, getPassport }; diff --git a/src/service/passport/index.ts b/src/service/passport/index.ts new file mode 100644 index 000000000..f5caeda8c --- /dev/null +++ b/src/service/passport/index.ts @@ -0,0 +1,39 @@ +import passport, { type PassportStatic } from 'passport'; +import * as local from './local'; +import * as activeDirectory from './activeDirectory'; +import * as oidc from './oidc'; +import * as config from '../../config'; +import { AuthenticationElement } from '../../config/generated/config'; + +type StrategyModule = { + configure: (passport: PassportStatic) => Promise; + createDefaultAdmin?: () => Promise; + type: string; +}; + +export const authStrategies: Record = { + local, + activedirectory: activeDirectory, + openidconnect: oidc, +}; + +export const configure = async (): Promise => { + passport.initialize(); + + const authMethods: AuthenticationElement[] = config.getAuthMethods(); + + for (const auth of authMethods) { + const strategy = authStrategies[auth.type.toLowerCase()]; + if (strategy && typeof strategy.configure === 'function') { + await strategy.configure(passport); + } + } + + if (authMethods.some((auth) => auth.type.toLowerCase() === 'local')) { + await local.createDefaultAdmin?.(); + } + + return passport; +}; + +export const getPassport = (): PassportStatic => passport; diff --git a/src/service/passport/jwtAuthHandler.js b/src/service/passport/jwtAuthHandler.js deleted file mode 100644 index 6ecc9250f..000000000 --- a/src/service/passport/jwtAuthHandler.js +++ /dev/null @@ -1,57 +0,0 @@ -const { assignRoles, validateJwt } = require('./jwtUtils'); - -/** - * Middleware function to handle JWT authentication. - * @param {*} overrideConfig optional configuration to override the default JWT configuration (e.g. for testing) - * @return {Function} the middleware function - */ -const jwtAuthHandler = (overrideConfig = null) => { - return async (req, res, next) => { - const apiAuthMethods = overrideConfig - ? [{ type: 'jwt', jwtConfig: overrideConfig }] - : require('../../config').getAPIAuthMethods(); - - const jwtAuthMethod = apiAuthMethods.find((method) => method.type.toLowerCase() === 'jwt'); - if (!overrideConfig && (!jwtAuthMethod || !jwtAuthMethod.enabled)) { - return next(); - } - - const token = req.header('Authorization'); - if (!token) { - return res.status(401).send('No token provided\n'); - } - - const { clientID, authorityURL, expectedAudience, roleMapping } = jwtAuthMethod.jwtConfig; - const audience = expectedAudience || clientID; - - if (!authorityURL) { - return res.status(500).send({ - message: 'JWT handler: authority URL is not configured\n', - }); - } - - if (!clientID) { - return res.status(500).send({ - message: 'JWT handler: client ID is not configured\n', - }); - } - - const tokenParts = token.split(' '); - const { verifiedPayload, error } = await validateJwt( - tokenParts[1], - authorityURL, - audience, - clientID, - ); - if (error) { - return res.status(401).send(error); - } - - req.user = verifiedPayload; - assignRoles(roleMapping, verifiedPayload, req.user); - - return next(); - }; -}; - -module.exports = jwtAuthHandler; diff --git a/src/service/passport/jwtAuthHandler.ts b/src/service/passport/jwtAuthHandler.ts new file mode 100644 index 000000000..bb312e40f --- /dev/null +++ b/src/service/passport/jwtAuthHandler.ts @@ -0,0 +1,81 @@ +import { assignRoles, validateJwt } from './jwtUtils'; +import type { Request, Response, NextFunction } from 'express'; +import { getAPIAuthMethods } from '../../config'; +import { JwtConfig, AuthenticationElement, Type } from '../../config/generated/config'; +import { RoleMapping } from './types'; + +export const type = 'jwt'; + +export const jwtAuthHandler = (overrideConfig: JwtConfig | null = null) => { + return async (req: Request, res: Response, next: NextFunction): Promise => { + const apiAuthMethods: AuthenticationElement[] = overrideConfig + ? [{ type: 'jwt' as Type, enabled: true, jwtConfig: overrideConfig }] + : getAPIAuthMethods(); + + const jwtAuthMethod = apiAuthMethods.find((method) => method.type.toLowerCase() === type); + + if (!jwtAuthMethod || !jwtAuthMethod.enabled) { + return next(); + } + + if (req.isAuthenticated && req.isAuthenticated()) { + return next(); + } + + const token = req.header('Authorization'); + if (!token) { + res.status(401).send('No token provided\n'); + return; + } + + if (!jwtAuthMethod.jwtConfig) { + res.status(500).send({ + message: 'JWT configuration is missing\n', + }); + console.log('JWT configuration is missing\n'); + return; + } + + const config = jwtAuthMethod.jwtConfig!; + const { clientID, authorityURL, expectedAudience, roleMapping } = config; + const audience = expectedAudience || clientID; + + if (!authorityURL) { + res.status(500).send({ + message: 'OIDC authority URL is not configured\n', + }); + console.log('OIDC authority URL is not configured\n'); + return; + } + + if (!clientID) { + res.status(500).send({ + message: 'OIDC client ID is not configured\n', + }); + console.log('OIDC client ID is not configured\n'); + return; + } + + const tokenParts = token.split(' '); + const accessToken = tokenParts.length === 2 ? tokenParts[1] : tokenParts[0]; + + const { verifiedPayload, error } = await validateJwt( + accessToken, + authorityURL, + audience, + clientID, + ); + + if (error || !verifiedPayload) { + res.status(401).send(error || 'JWT validation failed\n'); + console.log('JWT validation failed\n'); + return; + } + + req.user = verifiedPayload; + assignRoles(roleMapping as RoleMapping, verifiedPayload, req.user); + + console.log('JWT validation successful\n'); + next(); + }; +}; diff --git a/src/service/passport/jwtUtils.js b/src/service/passport/jwtUtils.js deleted file mode 100644 index a1f8576fc..000000000 --- a/src/service/passport/jwtUtils.js +++ /dev/null @@ -1,99 +0,0 @@ -const axios = require('axios'); -const jwt = require('jsonwebtoken'); -const jwkToPem = require('jwk-to-pem'); - -/** - * Obtain the JSON Web Key Set (JWKS) from the OIDC authority. - * @param {string} authorityUrl the OIDC authority URL. e.g. https://login.microsoftonline.com/{tenantId} - * @return {Promise} the JWKS keys - */ -async function getJwks(authorityUrl) { - try { - const { data } = await axios.get(`${authorityUrl}/.well-known/openid-configuration`); - const jwksUri = data.jwks_uri; - - const { data: jwks } = await axios.get(jwksUri); - return jwks.keys; - } catch (error) { - console.error('Error fetching JWKS:', error); - throw new Error('Failed to fetch JWKS'); - } -} - -/** - * Validate a JWT token using the OIDC configuration. - * @param {*} token the JWT token - * @param {*} authorityUrl the OIDC authority URL - * @param {*} clientID the OIDC client ID - * @param {*} expectedAudience the expected audience for the token - * @param {*} getJwksInject the getJwks function to use (for dependency injection). Defaults to the built-in getJwks function. - * @return {Promise} the verified payload or an error - */ -async function validateJwt( - token, - authorityUrl, - clientID, - expectedAudience, - getJwksInject = getJwks, -) { - try { - const jwks = await getJwksInject(authorityUrl); - - const decodedHeader = await jwt.decode(token, { complete: true }); - if (!decodedHeader || !decodedHeader.header || !decodedHeader.header.kid) { - throw new Error('Invalid JWT: Missing key ID (kid)'); - } - - const { kid } = decodedHeader.header; - const jwk = jwks.find((key) => key.kid === kid); - if (!jwk) { - throw new Error('No matching key found in JWKS'); - } - - const pubKey = jwkToPem(jwk); - - const verifiedPayload = jwt.verify(token, pubKey, { - algorithms: ['RS256'], - issuer: authorityUrl, - audience: expectedAudience, - }); - - if (verifiedPayload.azp !== clientID) { - throw new Error('JWT client ID does not match'); - } - - return { verifiedPayload }; - } catch (error) { - const errorMessage = `JWT validation failed: ${error.message}\n`; - console.error(errorMessage); - return { error: errorMessage }; - } -} - -/** - * Assign roles to the user based on the role mappings provided in the jwtConfig. - * - * If no role mapping is provided, the user will not have any roles assigned (i.e. user.admin = false). - * @param {*} roleMapping the role mapping configuration - * @param {*} payload the JWT payload - * @param {*} user the req.user object to assign roles to - */ -function assignRoles(roleMapping, payload, user) { - if (roleMapping) { - for (const role of Object.keys(roleMapping)) { - const claimValuePair = roleMapping[role]; - const claim = Object.keys(claimValuePair)[0]; - const value = claimValuePair[claim]; - - if (payload[claim] && payload[claim] === value) { - user[role] = true; - } - } - } -} - -module.exports = { - getJwks, - validateJwt, - assignRoles, -}; diff --git a/src/service/passport/jwtUtils.ts b/src/service/passport/jwtUtils.ts new file mode 100644 index 000000000..8fcf214e4 --- /dev/null +++ b/src/service/passport/jwtUtils.ts @@ -0,0 +1,103 @@ +import axios from 'axios'; +import jwt, { type JwtPayload } from 'jsonwebtoken'; +import jwkToPem from 'jwk-to-pem'; + +import { JwkKey, JwksResponse, JwtValidationResult, RoleMapping } from './types'; + +/** + * Obtain the JSON Web Key Set (JWKS) from the OIDC authority. + * @param {string} authorityUrl the OIDC authority URL. e.g. https://login.microsoftonline.com/{tenantId} + * @return {Promise} the JWKS keys + */ +export async function getJwks(authorityUrl: string): Promise { + try { + const { data } = await axios.get(`${authorityUrl}/.well-known/openid-configuration`); + const jwksUri: string = data.jwks_uri; + + const { data: jwks }: { data: JwksResponse } = await axios.get(jwksUri); + return jwks.keys; + } catch (error) { + console.error('Error fetching JWKS:', error); + throw new Error('Failed to fetch JWKS'); + } +} + +/** + * Validate a JWT token using the OIDC configuration. + * @param {string} token the JWT token + * @param {string} authorityUrl the OIDC authority URL + * @param {string} expectedAudience the expected audience for the token + * @param {string} clientID the OIDC client ID + * @param {Function} getJwksInject the getJwks function to use (for dependency injection). Defaults to the built-in getJwks function. + * @return {Promise} the verified payload or an error + */ +export async function validateJwt( + token: string, + authorityUrl: string, + expectedAudience: string, + clientID: string, + getJwksInject: (authorityUrl: string) => Promise = getJwks, +): Promise { + try { + const jwks = await getJwksInject(authorityUrl); + + const decoded = jwt.decode(token, { complete: true }); + if (!decoded || typeof decoded !== 'object' || !decoded.header?.kid) { + throw new Error('Invalid JWT: Missing key ID (kid)'); + } + + const { kid } = decoded.header; + const jwk = jwks.find((key) => key.kid === kid); + if (!jwk) { + throw new Error('No matching key found in JWKS'); + } + + const pubKey = jwkToPem(jwk as any); + + const verifiedPayload = jwt.verify(token, pubKey, { + algorithms: ['RS256'], + issuer: authorityUrl, + audience: expectedAudience, + }); + + if (typeof verifiedPayload === 'string') { + throw new Error('Unexpected string payload in JWT'); + } + + if (verifiedPayload.azp && verifiedPayload.azp !== clientID) { + throw new Error('JWT client ID does not match'); + } + + return { verifiedPayload, error: null }; + } catch (error: any) { + const errorMessage = `JWT validation failed: ${error.message}\n`; + console.error(errorMessage); + return { error: errorMessage, verifiedPayload: null }; + } +} + +/** + * Assign roles to the user based on the role mappings provided in the jwtConfig. + * + * If no role mapping is provided, the user will not have any roles assigned (i.e. user.admin = false). + * @param {RoleMapping} roleMapping the role mapping configuration + * @param {JwtPayload} payload the JWT payload + * @param {Record} user the req.user object to assign roles to + */ +export function assignRoles( + roleMapping: RoleMapping | undefined, + payload: JwtPayload, + user: Record, +): void { + if (!roleMapping) return; + + for (const role of Object.keys(roleMapping)) { + const claimMap = roleMapping[role]; + const claim = Object.keys(claimMap)[0]; + const value = claimMap[claim]; + + if (payload[claim] && payload[claim] === value) { + user[role] = true; + } + } +} diff --git a/src/service/passport/ldaphelper.js b/src/service/passport/ldaphelper.js deleted file mode 100644 index 00ba01f00..000000000 --- a/src/service/passport/ldaphelper.js +++ /dev/null @@ -1,51 +0,0 @@ -const thirdpartyApiConfig = require('../../config').getAPIs(); -const axios = require('axios'); - -const isUserInAdGroup = (req, profile, ad, domain, name) => { - // determine, via config, if we're using HTTP or AD directly - if (thirdpartyApiConfig?.ls?.userInADGroup) { - return isUserInAdGroupViaHttp(profile.username, domain, name); - } else { - return isUserInAdGroupViaAD(req, profile, ad, domain, name); - } -}; - -const isUserInAdGroupViaAD = (req, profile, ad, domain, name) => { - return new Promise((resolve, reject) => { - ad.isUserMemberOf(profile.username, name, function (err, isMember) { - if (err) { - const msg = 'ERROR isUserMemberOf: ' + JSON.stringify(err); - reject(msg); - } else { - console.log(profile.username + ' isMemberOf ' + name + ': ' + isMember); - resolve(isMember); - } - }); - }); -}; - -const isUserInAdGroupViaHttp = (id, domain, name) => { - const url = String(thirdpartyApiConfig.ls.userInADGroup) - .replace('', domain) - .replace('', name) - .replace('', id); - - const client = axios.create({ - responseType: 'json', - headers: { - 'content-type': 'application/json', - }, - }); - - console.log(`checking if user is in group ${url}`); - return client - .get(url) - .then((res) => res.data) - .catch(() => { - return false; - }); -}; - -module.exports = { - isUserInAdGroup, -}; diff --git a/src/service/passport/ldaphelper.ts b/src/service/passport/ldaphelper.ts new file mode 100644 index 000000000..f75de2ba2 --- /dev/null +++ b/src/service/passport/ldaphelper.ts @@ -0,0 +1,64 @@ +import axios from 'axios'; +import type { Request } from 'express'; +import ActiveDirectory from 'activedirectory2'; +import { getAPIs } from '../../config'; +import { ADProfile } from './types'; + +const thirdpartyApiConfig = getAPIs(); + +export const isUserInAdGroup = ( + req: Request & { user?: ADProfile }, + profile: ADProfile, + ad: ActiveDirectory, + domain: string, + name: string, +): Promise => { + // determine, via config, if we're using HTTP or AD directly + if (thirdpartyApiConfig.ls?.userInADGroup) { + return isUserInAdGroupViaHttp(profile.username || '', domain, name); + } else { + return isUserInAdGroupViaAD(req, profile, ad, domain, name); + } +}; + +const isUserInAdGroupViaAD = ( + req: Request & { user?: ADProfile }, + profile: ADProfile, + ad: ActiveDirectory, + domain: string, + name: string, +): Promise => { + return new Promise((resolve, reject) => { + ad.isUserMemberOf(profile.username || '', name, function (err, isMember) { + if (err) { + const msg = 'ERROR isUserMemberOf: ' + JSON.stringify(err); + reject(msg); + } else { + console.log(profile.username + ' isMemberOf ' + name + ': ' + isMember); + resolve(isMember); + } + }); + }); +}; + +const isUserInAdGroupViaHttp = (id: string, domain: string, name: string): Promise => { + const url = String(thirdpartyApiConfig.ls?.userInADGroup) + .replace('', domain) + .replace('', name) + .replace('', id); + + const client = axios.create({ + responseType: 'json', + headers: { + 'content-type': 'application/json', + }, + }); + + console.log(`checking if user is in group ${url}`); + return client + .get(url) + .then((res) => Boolean(res.data)) + .catch(() => { + return false; + }); +}; diff --git a/src/service/passport/local.js b/src/service/passport/local.js deleted file mode 100644 index 588278bca..000000000 --- a/src/service/passport/local.js +++ /dev/null @@ -1,59 +0,0 @@ -const bcrypt = require('bcryptjs'); -const LocalStrategy = require('passport-local').Strategy; -const db = require('../../db'); - -const type = 'local'; - -const configure = async (passport) => { - passport.use( - new LocalStrategy(async (username, password, done) => { - try { - const user = await db.findUser(username); - if (!user) { - return done(null, false, { message: 'Incorrect username.' }); - } - - const passwordCorrect = await bcrypt.compare(password, user.password); - if (!passwordCorrect) { - return done(null, false, { message: 'Incorrect password.' }); - } - - return done(null, user); - } catch (err) { - return done(err); - } - }), - ); - - passport.serializeUser((user, done) => { - done(null, user.username); - }); - - passport.deserializeUser(async (username, done) => { - try { - const user = await db.findUser(username); - done(null, user); - } catch (err) { - done(err, null); - } - }); - - return passport; -}; - -/** - * Create the default admin and regular test users. - */ -const createDefaultAdmin = async () => { - const createIfNotExists = async (username, password, email, type, isAdmin) => { - const user = await db.findUser(username); - if (!user) { - await db.createUser(username, password, email, type, isAdmin); - } - }; - - await createIfNotExists('admin', 'admin', 'admin@place.com', 'none', true); - await createIfNotExists('user', 'user', 'user@place.com', 'none', false); -}; - -module.exports = { configure, createDefaultAdmin, type }; diff --git a/src/service/passport/local.ts b/src/service/passport/local.ts new file mode 100644 index 000000000..10324f772 --- /dev/null +++ b/src/service/passport/local.ts @@ -0,0 +1,70 @@ +import bcrypt from 'bcryptjs'; +import { Strategy as LocalStrategy } from 'passport-local'; +import type { PassportStatic } from 'passport'; +import * as db from '../../db'; + +export const type = 'local'; + +export const configure = async (passport: PassportStatic): Promise => { + passport.use( + new LocalStrategy( + async ( + username: string, + password: string, + done: (err: any, user?: any, info?: any) => void, + ) => { + try { + const user = await db.findUser(username); + if (!user) { + return done(null, false, { message: 'Incorrect username.' }); + } + + const passwordCorrect = await bcrypt.compare(password, user.password ?? ''); + if (!passwordCorrect) { + return done(null, false, { message: 'Incorrect password.' }); + } + + return done(null, user); + } catch (err) { + return done(err); + } + }, + ), + ); + + passport.serializeUser((user: any, done) => { + done(null, user.username); + }); + + passport.deserializeUser(async (username: string, done) => { + try { + const user = await db.findUser(username); + done(null, user); + } catch (err) { + done(err, null); + } + }); + + return passport; +}; + +/** + * Create the default admin and regular test users. + */ +export const createDefaultAdmin = async () => { + const createIfNotExists = async ( + username: string, + password: string, + email: string, + type: string, + isAdmin: boolean, + ) => { + const user = await db.findUser(username); + if (!user) { + await db.createUser(username, password, email, type, isAdmin); + } + }; + + await createIfNotExists('admin', 'admin', 'admin@place.com', 'none', true); + await createIfNotExists('user', 'user', 'user@place.com', 'none', false); +}; diff --git a/src/service/passport/oidc.js b/src/service/passport/oidc.js deleted file mode 100644 index 7e2aa5ee0..000000000 --- a/src/service/passport/oidc.js +++ /dev/null @@ -1,125 +0,0 @@ -const db = require('../../db'); - -const type = 'openidconnect'; - -const configure = async (passport) => { - // Temp fix for ERR_REQUIRE_ESM, will be changed when we refactor to ESM - const { discovery, fetchUserInfo } = await import('openid-client'); - const { Strategy } = await import('openid-client/passport'); - const authMethods = require('../../config').getAuthMethods(); - const oidcConfig = authMethods.find( - (method) => method.type.toLowerCase() === 'openidconnect', - )?.oidcConfig; - const { issuer, clientID, clientSecret, callbackURL, scope } = oidcConfig; - - if (!oidcConfig || !oidcConfig.issuer) { - throw new Error('Missing OIDC issuer in configuration'); - } - - const server = new URL(issuer); - let config; - - try { - config = await discovery(server, clientID, clientSecret); - } catch (error) { - console.error('Error during OIDC discovery:', error); - throw new Error('OIDC setup error (discovery): ' + error.message); - } - - try { - const strategy = new Strategy({ callbackURL, config, scope }, async (tokenSet, done) => { - // Validate token sub for added security - const idTokenClaims = tokenSet.claims(); - const expectedSub = idTokenClaims.sub; - const userInfo = await fetchUserInfo(config, tokenSet.access_token, expectedSub); - handleUserAuthentication(userInfo, done); - }); - - // currentUrl must be overridden to match the callback URL - strategy.currentUrl = function (request) { - const callbackUrl = new URL(callbackURL); - const currentUrl = Strategy.prototype.currentUrl.call(this, request); - currentUrl.host = callbackUrl.host; - currentUrl.protocol = callbackUrl.protocol; - return currentUrl; - }; - - // Prevent default strategy name from being overridden with the server host - passport.use(type, strategy); - - passport.serializeUser((user, done) => { - done(null, user.oidcId || user.username); - }); - - passport.deserializeUser(async (id, done) => { - try { - const user = await db.findUserByOIDC(id); - done(null, user); - } catch (err) { - done(err); - } - }); - - return passport; - } catch (error) { - console.error('Error during OIDC passport setup:', error); - throw new Error('OIDC setup error (strategy): ' + error.message); - } -}; - -/** - * Handles user authentication with OIDC. - * @param {Object} userInfo the OIDC user info object - * @param {Function} done the callback function - * @return {Promise} a promise with the authenticated user or an error - */ -const handleUserAuthentication = async (userInfo, done) => { - console.log('handleUserAuthentication called'); - try { - const user = await db.findUserByOIDC(userInfo.sub); - - if (!user) { - const email = safelyExtractEmail(userInfo); - if (!email) return done(new Error('No email found in OIDC profile')); - - const newUser = { - username: getUsername(email), - email, - oidcId: userInfo.sub, - }; - - await db.createUser(newUser.username, null, newUser.email, 'Edit me', false, newUser.oidcId); - return done(null, newUser); - } - - return done(null, user); - } catch (err) { - return done(err); - } -}; - -/** - * Extracts email from OIDC profile. - * This function is necessary because OIDC providers have different ways of storing emails. - * @param {object} profile the profile object from OIDC provider - * @return {string | null} the email address - */ -const safelyExtractEmail = (profile) => { - return ( - profile.email || (profile.emails && profile.emails.length > 0 ? profile.emails[0].value : null) - ); -}; - -/** - * Generates a username from email address. - * This helps differentiate users within the specific OIDC provider. - * Note: This is incompatible with multiple providers. Ideally, users are identified by - * OIDC ID (requires refactoring the database). - * @param {string} email the email address - * @return {string} the username - */ -const getUsername = (email) => { - return email ? email.split('@')[0] : ''; -}; - -module.exports = { configure, type }; diff --git a/src/service/passport/oidc.ts b/src/service/passport/oidc.ts new file mode 100644 index 000000000..9afe379b8 --- /dev/null +++ b/src/service/passport/oidc.ts @@ -0,0 +1,130 @@ +import * as db from '../../db'; +import { PassportStatic } from 'passport'; +import { getAuthMethods } from '../../config'; +import { type UserInfoResponse } from 'openid-client'; + +export const type = 'openidconnect'; + +export const configure = async (passport: PassportStatic): Promise => { + // Use dynamic imports to avoid ESM/CommonJS issues + const { discovery, fetchUserInfo } = await import('openid-client'); + // @ts-expect-error - throws error due to missing type definitions + const { Strategy } = await import('openid-client/passport'); + + const authMethods = getAuthMethods(); + const oidcConfig = authMethods.find((method) => method.type.toLowerCase() === type)?.oidcConfig; + + if (!oidcConfig || !oidcConfig.issuer) { + throw new Error('Missing OIDC issuer in configuration'); + } + + const { issuer, clientID, clientSecret, callbackURL, scope } = oidcConfig; + + const server = new URL(issuer); + let config; + + try { + config = await discovery(server, clientID, clientSecret); + } catch (error: any) { + console.error('Error during OIDC discovery:', error); + throw new Error('OIDC setup error (discovery): ' + error.message); + } + + try { + const strategy = new Strategy( + { callbackURL, config, scope }, + async (tokenSet: any, done: (err: any, user?: any) => void) => { + const idTokenClaims = tokenSet.claims(); + const expectedSub = idTokenClaims.sub; + const userInfo = await fetchUserInfo(config, tokenSet.access_token, expectedSub); + handleUserAuthentication(userInfo, done); + }, + ); + + strategy.currentUrl = function (request: any) { + const callbackUrl = new URL(callbackURL); + const currentUrl = Strategy.prototype.currentUrl.call(this, request); + currentUrl.host = callbackUrl.host; + currentUrl.protocol = callbackUrl.protocol; + return currentUrl; + }; + + passport.use(type, strategy); + + passport.serializeUser((user: any, done) => { + done(null, user.oidcId || user.username); + }); + + passport.deserializeUser(async (id: string, done) => { + try { + const user = await db.findUserByOIDC(id); + done(null, user); + } catch (err) { + done(err as Error); + } + }); + + return passport; + } catch (error: any) { + console.error('Error during OIDC passport setup:', error); + throw new Error('OIDC setup error (strategy): ' + error.message); + } +}; + +/** + * Handles user authentication with OIDC. + * @param {UserInfoResponse} userInfo - The user info response from the OIDC provider + * @param {Function} done - The callback function to handle the user authentication + * @return {Promise} - A promise that resolves when the user authentication is complete + */ +const handleUserAuthentication = async ( + userInfo: UserInfoResponse, + done: (err: any, user?: any) => void, +): Promise => { + console.log('handleUserAuthentication called'); + try { + const user = await db.findUserByOIDC(userInfo.sub); + + if (!user) { + const email = safelyExtractEmail(userInfo); + if (!email) return done(new Error('No email found in OIDC profile')); + + const newUser = { + username: getUsername(email), + email, + oidcId: userInfo.sub, + }; + + await db.createUser(newUser.username, '', newUser.email, 'Edit me', false, newUser.oidcId); + return done(null, newUser); + } + + return done(null, user); + } catch (err) { + return done(err); + } +}; + +/** + * Extracts email from OIDC profile. + * Different providers use different fields to store the email. + * @param {any} profile - The user profile from the OIDC provider + * @return {string | null} - The email address from the profile + */ +export const safelyExtractEmail = (profile: any): string | null => { + return ( + profile.email || (profile.emails && profile.emails.length > 0 ? profile.emails[0].value : null) + ); +}; + +/** + * Generates a username from an email address. + * This helps differentiate users within the specific OIDC provider. + * Note: This is incompatible with multiple providers. Ideally, users are identified by + * OIDC ID (requires refactoring the database). + * @param {string} email - The email address to generate a username from + * @return {string} - The username generated from the email address + */ +export const getUsername = (email: string): string => { + return email ? email.split('@')[0] : ''; +}; diff --git a/src/service/passport/types.ts b/src/service/passport/types.ts new file mode 100644 index 000000000..d433c782f --- /dev/null +++ b/src/service/passport/types.ts @@ -0,0 +1,55 @@ +import { JwtPayload } from 'jsonwebtoken'; + +export type JwkKey = { + kty: string; + kid: string; + use: string; + n?: string; + e?: string; + x5c?: string[]; + [key: string]: any; +}; + +export type JwksResponse = { + keys: JwkKey[]; +}; + +export type JwtValidationResult = { + verifiedPayload: JwtPayload | null; + error: string | null; +}; + +/** + * The JWT role mapping configuration. + * + * The key is the in-app role name (e.g. "admin"). + * The value is a pair of claim name and expected value. + * + * For example, the following role mapping will assign the "admin" role to users whose "name" claim is "John Doe": + * + * { + * "admin": { + * "name": "John Doe" + * } + * } + */ +export type RoleMapping = Record>; + +export type ADProfile = { + id?: string; + username?: string; + email?: string; + displayName?: string; + admin?: boolean; + _json: ADProfileJson; +}; + +export type ADProfileJson = { + sAMAccountName?: string; + mail?: string; + title?: string; + userPrincipalName?: string; + [key: string]: any; +}; + +export type ADVerifyCallback = (err: Error | null, user: ADProfile | null) => void; diff --git a/src/service/routes/auth.js b/src/service/routes/auth.ts similarity index 69% rename from src/service/routes/auth.js rename to src/service/routes/auth.ts index cba2fca69..f6347eb4f 100644 --- a/src/service/routes/auth.js +++ b/src/service/routes/auth.ts @@ -1,16 +1,24 @@ -const express = require('express'); -const router = new express.Router(); -const passport = require('../passport').getPassport(); -const { getAuthMethods } = require('../../config'); -const passportLocal = require('../passport/local'); -const passportAD = require('../passport/activeDirectory'); -const authStrategies = require('../passport').authStrategies; -const db = require('../../db'); -const { toPublicUser } = require('./publicApi'); +import express, { Request, Response, NextFunction } from 'express'; +import { getPassport, authStrategies } from '../passport'; +import { getAuthMethods } from '../../config'; + +import * as db from '../../db'; +import * as passportLocal from '../passport/local'; +import * as passportAD from '../passport/activeDirectory'; + +import { User } from '../../db/types'; +import { AuthenticationElement } from '../../config/generated/config'; + +import { toPublicUser } from './publicApi'; +import { isAdminUser } from './utils'; + +const router = express.Router(); +const passport = getPassport(); + const { GIT_PROXY_UI_HOST: uiHost = 'http://localhost', GIT_PROXY_UI_PORT: uiPort = 3000 } = process.env; -router.get('/', (req, res) => { +router.get('/', (_req: Request, res: Response) => { res.status(200).json({ login: { action: 'post', @@ -35,7 +43,7 @@ const appropriateLoginStrategies = [passportLocal.type, passportAD.type]; const getLoginStrategy = () => { // returns only enabled auth methods // returns at least one enabled auth method - const enabledAppropriateLoginStrategies = getAuthMethods().filter((am) => + const enabledAppropriateLoginStrategies = getAuthMethods().filter((am: AuthenticationElement) => appropriateLoginStrategies.includes(am.type.toLowerCase()), ); // for where no login strategies which work for /login are enabled @@ -47,10 +55,9 @@ const getLoginStrategy = () => { return enabledAppropriateLoginStrategies[0].type.toLowerCase(); }; -const loginSuccessHandler = () => async (req, res) => { +const loginSuccessHandler = () => async (req: Request, res: Response) => { try { - const currentUser = { ...req.user }; - delete currentUser.password; + const currentUser = toPublicUser({ ...req.user } as User); console.log( `serivce.routes.auth.login: user logged in, username=${ currentUser.username @@ -58,7 +65,7 @@ const loginSuccessHandler = () => async (req, res) => { ); res.send({ message: 'success', - user: toPublicUser(currentUser), + user: currentUser, }); } catch (e) { console.log(`service.routes.auth.login: Error logging user in ${JSON.stringify(e)}`); @@ -82,7 +89,7 @@ router.get('/config', (req, res) => { // TODO: if providing separate auth methods, inform the frontend so it has relevant UI elements and appropriate client-side behavior router.post( '/login', - (req, res, next) => { + (req: Request, res: Response, next: NextFunction) => { const authType = getLoginStrategy(); if (authType === null) { res.status(403).send('Username and Password based Login is not enabled at this time').end(); @@ -96,8 +103,8 @@ router.post( router.get('/openidconnect', passport.authenticate(authStrategies['openidconnect'].type)); -router.get('/openidconnect/callback', (req, res, next) => { - passport.authenticate(authStrategies['openidconnect'].type, (err, user, info) => { +router.get('/openidconnect/callback', (req: Request, res: Response, next: NextFunction) => { + passport.authenticate(authStrategies['openidconnect'].type, (err: any, user: any, info: any) => { if (err) { console.error('Authentication error:', err); return res.status(401).end(); @@ -117,28 +124,32 @@ router.get('/openidconnect/callback', (req, res, next) => { })(req, res, next); }); -router.post('/logout', (req, res, next) => { - req.logout(req.user, (err) => { +router.post('/logout', (req: Request, res: Response, next: NextFunction) => { + req.logout((err: any) => { if (err) return next(err); }); res.clearCookie('connect.sid'); res.send({ isAuth: req.isAuthenticated(), user: req.user }); }); -router.get('/profile', async (req, res) => { +router.get('/profile', async (req: Request, res: Response) => { if (req.user) { - const userVal = await db.findUser(req.user.username); + const userVal = await db.findUser((req.user as User).username); + if (!userVal) { + res.status(400).send('Error: Logged in user not found').end(); + return; + } res.send(toPublicUser(userVal)); } else { res.status(401).end(); } }); -router.post('/gitAccount', async (req, res) => { +router.post('/gitAccount', async (req: Request, res: Response) => { if (req.user) { try { let username = - req.body.username == null || req.body.username == 'undefined' + req.body.username == null || req.body.username === 'undefined' ? req.body.id : req.body.username; username = username?.split('@')[0]; @@ -148,17 +159,23 @@ router.post('/gitAccount', async (req, res) => { return; } - const reqUser = await db.findUser(req.user.username); - if (username !== reqUser.username && !reqUser.admin) { + const reqUser = await db.findUser((req.user as User).username); + if (username !== reqUser?.username && !reqUser?.admin) { res.status(403).send('Error: You must be an admin to update a different account').end(); return; } + const user = await db.findUser(username); + if (!user) { + res.status(400).send('Error: User not found').end(); + return; + } + console.log('Adding gitAccount' + req.body.gitAccount); user.gitAccount = req.body.gitAccount; db.updateUser(user); res.status(200).end(); - } catch (e) { + } catch (e: any) { res .status(500) .send({ @@ -171,29 +188,35 @@ router.post('/gitAccount', async (req, res) => { } }); -router.get('/me', async (req, res) => { +router.get('/me', async (req: Request, res: Response) => { if (req.user) { - const userVal = await db.findUser(req.user.username); + const userVal = await db.findUser((req.user as User).username); + if (!userVal) { + res.status(400).send('Error: Logged in user not found').end(); + return; + } res.send(toPublicUser(userVal)); } else { res.status(401).end(); } }); -router.post('/create-user', async (req, res) => { - if (!req.user || !req.user.admin) { - return res.status(401).send({ +router.post('/create-user', async (req: Request, res: Response) => { + if (!isAdminUser(req.user)) { + res.status(401).send({ message: 'You are not authorized to perform this action...', }); + return; } try { const { username, password, email, gitAccount, admin: isAdmin = false } = req.body; if (!username || !password || !email || !gitAccount) { - return res.status(400).send({ + res.status(400).send({ message: 'Missing required fields: username, password, email, and gitAccount are required', }); + return; } await db.createUser(username, password, email, gitAccount, isAdmin); @@ -201,7 +224,7 @@ router.post('/create-user', async (req, res) => { message: 'User created successfully', username, }); - } catch (error) { + } catch (error: any) { console.error('Error creating user:', error); res.status(400).send({ message: error.message || 'Failed to create user', @@ -209,9 +232,4 @@ router.post('/create-user', async (req, res) => { } }); -module.exports = router; - -module.exports = { - router, - loginSuccessHandler, -}; +export default { router, loginSuccessHandler }; diff --git a/src/service/routes/config.js b/src/service/routes/config.js deleted file mode 100644 index e80d70b5b..000000000 --- a/src/service/routes/config.js +++ /dev/null @@ -1,22 +0,0 @@ -const express = require('express'); -const router = new express.Router(); - -const config = require('../../config'); - -router.get('/attestation', function ({ res }) { - res.send(config.getAttestationConfig()); -}); - -router.get('/urlShortener', function ({ res }) { - res.send(config.getURLShortener()); -}); - -router.get('/contactEmail', function ({ res }) { - res.send(config.getContactEmail()); -}); - -router.get('/uiRouteAuth', function ({ res }) { - res.send(config.getUIRouteAuth()); -}); - -module.exports = router; diff --git a/src/service/routes/config.ts b/src/service/routes/config.ts new file mode 100644 index 000000000..0d8796fde --- /dev/null +++ b/src/service/routes/config.ts @@ -0,0 +1,22 @@ +import express, { Request, Response } from 'express'; +import * as config from '../../config'; + +const router = express.Router(); + +router.get('/attestation', (_req: Request, res: Response) => { + res.send(config.getAttestationConfig()); +}); + +router.get('/urlShortener', (_req: Request, res: Response) => { + res.send(config.getURLShortener()); +}); + +router.get('/contactEmail', (_req: Request, res: Response) => { + res.send(config.getContactEmail()); +}); + +router.get('/uiRouteAuth', (_req: Request, res: Response) => { + res.send(config.getUIRouteAuth()); +}); + +export default router; diff --git a/src/service/routes/healthcheck.js b/src/service/routes/healthcheck.js deleted file mode 100644 index 4745a8275..000000000 --- a/src/service/routes/healthcheck.js +++ /dev/null @@ -1,10 +0,0 @@ -const express = require('express'); -const router = new express.Router(); - -router.get('/', function (req, res) { - res.send({ - message: 'ok', - }); -}); - -module.exports = router; diff --git a/src/service/routes/healthcheck.ts b/src/service/routes/healthcheck.ts new file mode 100644 index 000000000..5a93bf0c9 --- /dev/null +++ b/src/service/routes/healthcheck.ts @@ -0,0 +1,11 @@ +import express, { Request, Response } from 'express'; + +const router = express.Router(); + +router.get('/', (_req: Request, res: Response) => { + res.send({ + message: 'ok', + }); +}); + +export default router; diff --git a/src/service/routes/home.js b/src/service/routes/home.js deleted file mode 100644 index ce11503f6..000000000 --- a/src/service/routes/home.js +++ /dev/null @@ -1,14 +0,0 @@ -const express = require('express'); -const router = new express.Router(); - -const resource = { - healthcheck: '/api/v1/healthcheck', - push: '/api/v1/push', - auth: '/api/auth', -}; - -router.get('/', function (req, res) { - res.send(resource); -}); - -module.exports = router; diff --git a/src/service/routes/home.ts b/src/service/routes/home.ts new file mode 100644 index 000000000..d0504bd7e --- /dev/null +++ b/src/service/routes/home.ts @@ -0,0 +1,15 @@ +import express, { Request, Response } from 'express'; + +const router = express.Router(); + +const resource = { + healthcheck: '/api/v1/healthcheck', + push: '/api/v1/push', + auth: '/api/auth', +}; + +router.get('/', (_req: Request, res: Response) => { + res.send(resource); +}); + +export default router; diff --git a/src/service/routes/index.js b/src/service/routes/index.js deleted file mode 100644 index e2e0cf1a8..000000000 --- a/src/service/routes/index.js +++ /dev/null @@ -1,23 +0,0 @@ -const express = require('express'); -const auth = require('./auth'); -const push = require('./push'); -const home = require('./home'); -const repo = require('./repo'); -const users = require('./users'); -const healthcheck = require('./healthcheck'); -const config = require('./config'); -const jwtAuthHandler = require('../passport/jwtAuthHandler'); - -const routes = (proxy) => { - const router = new express.Router(); - router.use('/api', home); - router.use('/api/auth', auth.router); - router.use('/api/v1/healthcheck', healthcheck); - router.use('/api/v1/push', jwtAuthHandler(), push); - router.use('/api/v1/repo', jwtAuthHandler(), repo(proxy)); - router.use('/api/v1/user', jwtAuthHandler(), users); - router.use('/api/v1/config', config); - return router; -}; - -module.exports = routes; diff --git a/src/service/routes/index.ts b/src/service/routes/index.ts new file mode 100644 index 000000000..23b63b02a --- /dev/null +++ b/src/service/routes/index.ts @@ -0,0 +1,23 @@ +import express from 'express'; +import auth from './auth'; +import push from './push'; +import home from './home'; +import repo from './repo'; +import users from './users'; +import healthcheck from './healthcheck'; +import config from './config'; +import { jwtAuthHandler } from '../passport/jwtAuthHandler'; + +const routes = (proxy: any) => { + const router = express.Router(); + router.use('/api', home); + router.use('/api/auth', auth.router); + router.use('/api/v1/healthcheck', healthcheck); + router.use('/api/v1/push', jwtAuthHandler(), push); + router.use('/api/v1/repo', jwtAuthHandler(), repo(proxy)); + router.use('/api/v1/user', jwtAuthHandler(), users); + router.use('/api/v1/config', config); + return router; +}; + +export default routes; diff --git a/src/service/routes/publicApi.js b/src/service/routes/publicApi.ts similarity index 72% rename from src/service/routes/publicApi.js rename to src/service/routes/publicApi.ts index c9b1b0566..d70b5aa08 100644 --- a/src/service/routes/publicApi.js +++ b/src/service/routes/publicApi.ts @@ -1,4 +1,6 @@ -export const toPublicUser = (user) => { +import { User } from '../../db/types'; + +export const toPublicUser = (user: User) => { return { username: user.username || '', displayName: user.displayName || '', diff --git a/src/service/routes/push.js b/src/service/routes/push.js deleted file mode 100644 index 808216001..000000000 --- a/src/service/routes/push.js +++ /dev/null @@ -1,181 +0,0 @@ -const express = require('express'); -const router = new express.Router(); -const db = require('../../db'); - -router.get('/', async (req, res) => { - const query = { - type: 'push', - }; - - for (const k in req.query) { - if (!k) continue; - - if (k === 'limit') continue; - if (k === 'skip') continue; - let v = req.query[k]; - if (v === 'false') v = false; - if (v === 'true') v = true; - query[k] = v; - } - - res.send(await db.getPushes(query)); -}); - -router.get('/:id', async (req, res) => { - const id = req.params.id; - const push = await db.getPush(id); - if (push) { - res.send(push); - } else { - res.status(404).send({ - message: 'not found', - }); - } -}); - -router.post('/:id/reject', async (req, res) => { - if (req.user) { - const id = req.params.id; - - // Get the push request - const push = await db.getPush(id); - - // Get the committer of the push via their email - const committerEmail = push.userEmail; - const list = await db.getUsers({ email: committerEmail }); - - if (list.length === 0) { - res.status(401).send({ - message: `There was no registered user with the committer's email address: ${committerEmail}`, - }); - return; - } - - if (list[0].username.toLowerCase() === req.user.username.toLowerCase() && !list[0].admin) { - res.status(401).send({ - message: `Cannot reject your own changes`, - }); - return; - } - - const isAllowed = await db.canUserApproveRejectPush(id, req.user.username); - console.log({ isAllowed }); - - if (isAllowed) { - const result = await db.reject(id); - console.log(`user ${req.user.username} rejected push request for ${id}`); - res.send(result); - } else { - res.status(401).send({ - message: 'User is not authorised to reject changes', - }); - } - } else { - res.status(401).send({ - message: 'not logged in', - }); - } -}); - -router.post('/:id/authorise', async (req, res) => { - const questions = req.body.params?.attestation; - console.log({ questions }); - - // TODO: compare attestation to configuration and ensure all questions are answered - // - we shouldn't go on the definition in the request! - const attestationComplete = questions?.every((question) => !!question.checked); - console.log({ attestationComplete }); - - if (req.user && attestationComplete) { - const id = req.params.id; - console.log({ id }); - - // Get the push request - const push = await db.getPush(id); - console.log({ push }); - - // Get the committer of the push via their email address - const committerEmail = push.userEmail; - const list = await db.getUsers({ email: committerEmail }); - console.log({ list }); - - if (list.length === 0) { - res.status(401).send({ - message: `There was no registered user with the committer's email address: ${committerEmail}`, - }); - return; - } - - if (list[0].username.toLowerCase() === req.user.username.toLowerCase() && !list[0].admin) { - res.status(401).send({ - message: `Cannot approve your own changes`, - }); - return; - } - - // If we are not the author, now check that we are allowed to authorise on this - // repo - const isAllowed = await db.canUserApproveRejectPush(id, req.user.username); - if (isAllowed) { - console.log(`user ${req.user.username} approved push request for ${id}`); - - const reviewerList = await db.getUsers({ username: req.user.username }); - console.log({ reviewerList }); - - const reviewerGitAccount = reviewerList[0].gitAccount; - console.log({ reviewerGitAccount }); - - if (!reviewerGitAccount) { - res.status(401).send({ - message: 'You must associate a GitHub account with your user before approving...', - }); - return; - } - - const attestation = { - questions, - timestamp: new Date(), - reviewer: { - username: req.user.username, - gitAccount: reviewerGitAccount, - }, - }; - const result = await db.authorise(id, attestation); - res.send(result); - } else { - res.status(401).send({ - message: `user ${req.user.username} not authorised to approve push's on this project`, - }); - } - } else { - res.status(401).send({ - message: 'You are unauthorized to perform this action...', - }); - } -}); - -router.post('/:id/cancel', async (req, res) => { - if (req.user) { - const id = req.params.id; - - const isAllowed = await db.canUserCancelPush(id, req.user.username); - - if (isAllowed) { - const result = await db.cancel(id); - console.log(`user ${req.user.username} canceled push request for ${id}`); - res.send(result); - } else { - console.log(`user ${req.user.username} not authorised to cancel push request for ${id}`); - res.status(401).send({ - message: - 'User ${req.user.username)} not authorised to cancel push requests on this project.', - }); - } - } else { - res.status(401).send({ - message: 'not logged in', - }); - } -}); - -module.exports = router; diff --git a/src/service/routes/push.ts b/src/service/routes/push.ts new file mode 100644 index 000000000..4a69fd355 --- /dev/null +++ b/src/service/routes/push.ts @@ -0,0 +1,206 @@ +import express, { Request, Response } from 'express'; +import * as db from '../../db'; +import { PushQuery } from '../../db/types'; + +const router = express.Router(); + +router.get('/', async (req: Request, res: Response) => { + const query: Partial = { + type: 'push', + }; + + for (const key in req.query) { + if (!key) continue; + if (key === 'limit' || key === 'skip') continue; + + const rawValue = req.query[key]; + let parsedValue: boolean | undefined; + if (rawValue === 'false') parsedValue = false; + if (rawValue === 'true') parsedValue = true; + query[key] = parsedValue ?? rawValue?.toString(); + } + + res.send(await db.getPushes(query)); +}); + +router.get('/:id', async (req: Request, res: Response) => { + const id = req.params.id; + const push = await db.getPush(id); + if (push) { + res.send(push); + } else { + res.status(404).send({ + message: 'not found', + }); + } +}); + +router.post('/:id/reject', async (req: Request, res: Response) => { + if (!req.user) { + res.status(401).send({ + message: 'not logged in', + }); + return; + } + + const id = req.params.id; + const { username } = req.user as { username: string }; + + // Get the push request + const push = await getValidPushOrRespond(id, res); + if (!push) return; + + // Get the committer of the push via their email + const committerEmail = push.userEmail; + const list = await db.getUsers({ email: committerEmail }); + + if (list.length === 0) { + res.status(401).send({ + message: `There was no registered user with the committer's email address: ${committerEmail}`, + }); + return; + } + + if (list[0].username.toLowerCase() === username.toLowerCase() && !list[0].admin) { + res.status(401).send({ + message: `Cannot reject your own changes`, + }); + return; + } + + const isAllowed = await db.canUserApproveRejectPush(id, username); + console.log({ isAllowed }); + + if (isAllowed) { + const result = await db.reject(id, null); + console.log(`user ${username} rejected push request for ${id}`); + res.send(result); + } else { + res.status(401).send({ + message: 'User is not authorised to reject changes', + }); + } +}); + +router.post('/:id/authorise', async (req: Request, res: Response) => { + const questions = req.body.params?.attestation; + console.log({ questions }); + + // TODO: compare attestation to configuration and ensure all questions are answered + // - we shouldn't go on the definition in the request! + const attestationComplete = questions?.every( + (question: { checked: boolean }) => !!question.checked, + ); + console.log({ attestationComplete }); + + if (req.user && attestationComplete) { + const id = req.params.id; + console.log({ id }); + + const { username } = req.user as { username: string }; + + // Get the push request + const push = await db.getPush(id); + console.log({ push }); + + // Get the committer of the push via their email address + const committerEmail = push?.userEmail; + const list = await db.getUsers({ email: committerEmail }); + console.log({ list }); + + if (list.length === 0) { + res.status(401).send({ + message: `There was no registered user with the committer's email address: ${committerEmail}`, + }); + return; + } + + if (list[0].username.toLowerCase() === username.toLowerCase() && !list[0].admin) { + res.status(401).send({ + message: `Cannot approve your own changes`, + }); + return; + } + + // If we are not the author, now check that we are allowed to authorise on this + // repo + const isAllowed = await db.canUserApproveRejectPush(id, username); + if (isAllowed) { + console.log(`user ${username} approved push request for ${id}`); + + const reviewerList = await db.getUsers({ username }); + const reviewerEmail = reviewerList[0].email; + + if (!reviewerEmail) { + res.status(401).send({ + message: `There was no registered email address for the reviewer: ${username}`, + }); + return; + } + + const attestation = { + questions, + timestamp: new Date(), + reviewer: { + username, + reviewerEmail, + }, + }; + const result = await db.authorise(id, attestation); + res.send(result); + } else { + res.status(401).send({ + message: `user ${username} not authorised to approve push's on this project`, + }); + } + } else { + res.status(401).send({ + message: 'You are unauthorized to perform this action...', + }); + } +}); + +router.post('/:id/cancel', async (req: Request, res: Response) => { + if (!req.user) { + res.status(401).send({ + message: 'not logged in', + }); + return; + } + + const id = req.params.id; + const { username } = req.user as { username: string }; + + const isAllowed = await db.canUserCancelPush(id, username); + + if (isAllowed) { + const result = await db.cancel(id); + console.log(`user ${username} canceled push request for ${id}`); + res.send(result); + } else { + console.log(`user ${username} not authorised to cancel push request for ${id}`); + res.status(401).send({ + message: 'User ${req.user.username)} not authorised to cancel push requests on this project.', + }); + } +}); + +async function getValidPushOrRespond(id: string, res: Response) { + console.log('getValidPushOrRespond', { id }); + const push = await db.getPush(id); + console.log({ push }); + + if (!push) { + res.status(404).send({ message: `Push request not found` }); + return null; + } + + if (!push.userEmail) { + res.status(400).send({ message: `Push request has no user email` }); + return null; + } + + return push; +} + +export default router; diff --git a/src/service/routes/repo.js b/src/service/routes/repo.ts similarity index 75% rename from src/service/routes/repo.js rename to src/service/routes/repo.ts index 7ebbb62e3..659767b23 100644 --- a/src/service/routes/repo.js +++ b/src/service/routes/repo.ts @@ -1,43 +1,46 @@ -const express = require('express'); -const db = require('../../db'); -const { getProxyURL } = require('../urls'); -const { getAllProxiedHosts } = require('../../proxy/routes/helper'); +import express, { Request, Response } from 'express'; + +import * as db from '../../db'; +import { getProxyURL } from '../urls'; +import { getAllProxiedHosts } from '../../proxy/routes/helper'; +import { RepoQuery } from '../../db/types'; +import { isAdminUser } from './utils'; // create a reference to the proxy service as arrow functions will lose track of the `proxy` parameter // used to restart the proxy when a new host is added -let theProxy = null; -const repo = (proxy) => { +let theProxy: any = null; +const repo = (proxy: any) => { theProxy = proxy; - const router = new express.Router(); + const router = express.Router(); - router.get('/', async (req, res) => { + router.get('/', async (req: Request, res: Response) => { const proxyURL = getProxyURL(req); - const query = {}; + const query: Partial = {}; - for (const k in req.query) { - if (!k) continue; + for (const key in req.query) { + if (!key) continue; + if (key === 'limit' || key === 'skip') continue; - if (k === 'limit') continue; - if (k === 'skip') continue; - let v = req.query[k]; - if (v === 'false') v = false; - if (v === 'true') v = true; - query[k] = v; + const rawValue = req.query[key]; + let parsedValue: boolean | undefined; + if (rawValue === 'false') parsedValue = false; + if (rawValue === 'true') parsedValue = true; + query[key] = parsedValue ?? rawValue?.toString(); } const qd = await db.getRepos(query); res.send(qd.map((d) => ({ ...d, proxyURL }))); }); - router.get('/:id', async (req, res) => { + router.get('/:id', async (req: Request, res: Response) => { const proxyURL = getProxyURL(req); const _id = req.params.id; const qd = await db.getRepoById(_id); res.send({ ...qd, proxyURL }); }); - router.patch('/:id/user/push', async (req, res) => { - if (req.user && req.user.admin) { + router.patch('/:id/user/push', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { const _id = req.params.id; const username = req.body.username.toLowerCase(); const user = await db.findUser(username); @@ -56,8 +59,8 @@ const repo = (proxy) => { } }); - router.patch('/:id/user/authorise', async (req, res) => { - if (req.user && req.user.admin) { + router.patch('/:id/user/authorise', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { const _id = req.params.id; const username = req.body.username; const user = await db.findUser(username); @@ -76,8 +79,8 @@ const repo = (proxy) => { } }); - router.delete('/:id/user/authorise/:username', async (req, res) => { - if (req.user && req.user.admin) { + router.delete('/:id/user/authorise/:username', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { const _id = req.params.id; const username = req.params.username; const user = await db.findUser(username); @@ -96,8 +99,8 @@ const repo = (proxy) => { } }); - router.delete('/:id/user/push/:username', async (req, res) => { - if (req.user && req.user.admin) { + router.delete('/:id/user/push/:username', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { const _id = req.params.id; const username = req.params.username; const user = await db.findUser(username); @@ -116,8 +119,8 @@ const repo = (proxy) => { } }); - router.delete('/:id/delete', async (req, res) => { - if (req.user && req.user.admin) { + router.delete('/:id/delete', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { const _id = req.params.id; // determine if we need to restart the proxy @@ -140,8 +143,8 @@ const repo = (proxy) => { } }); - router.post('/', async (req, res) => { - if (req.user && req.user.admin) { + router.post('/', async (req: Request, res: Response) => { + if (isAdminUser(req.user)) { if (!req.body.url) { res.status(400).send({ message: 'Repository url is required', @@ -184,7 +187,7 @@ const repo = (proxy) => { await theProxy.stop(); await theProxy.start(); } - } catch (e) { + } catch (e: any) { console.error('Repository creation failed due to error: ', e.message ? e.message : e); console.error(e.stack); res.status(500).send({ message: 'Failed to create repository due to error' }); @@ -200,4 +203,4 @@ const repo = (proxy) => { return router; }; -module.exports = repo; +export default repo; diff --git a/src/service/routes/users.js b/src/service/routes/users.js deleted file mode 100644 index 733314f04..000000000 --- a/src/service/routes/users.js +++ /dev/null @@ -1,19 +0,0 @@ -const express = require('express'); -const router = new express.Router(); -const db = require('../../db'); -const { toPublicUser } = require('./publicApi'); - -router.get('/', async (req, res) => { - console.log(`fetching users`); - const users = await db.getUsers({}); - res.send(users.map(toPublicUser)); -}); - -router.get('/:id', async (req, res) => { - const username = req.params.id.toLowerCase(); - console.log(`Retrieving details for user: ${username}`); - const user = await db.findUser(username); - res.send(toPublicUser(user)); -}); - -module.exports = router; diff --git a/src/service/routes/users.ts b/src/service/routes/users.ts new file mode 100644 index 000000000..40b2ead5d --- /dev/null +++ b/src/service/routes/users.ts @@ -0,0 +1,25 @@ +import express, { Request, Response } from 'express'; +const router = express.Router(); + +import * as db from '../../db'; +import { toPublicUser } from './publicApi'; +import { UserQuery } from '../../db/types'; + +router.get('/', async (req: Request, res: Response) => { + console.log('fetching users'); + const users = await db.getUsers(); + res.send(users.map(toPublicUser)); +}); + +router.get('/:id', async (req: Request, res: Response) => { + const username = req.params.id.toLowerCase(); + console.log(`Retrieving details for user: ${username}`); + const user = await db.findUser(username); + if (!user) { + res.status(404).send('Error: User not found').end(); + return; + } + res.send(toPublicUser(user)); +}); + +export default router; diff --git a/src/service/routes/utils.ts b/src/service/routes/utils.ts new file mode 100644 index 000000000..3c72064ce --- /dev/null +++ b/src/service/routes/utils.ts @@ -0,0 +1,10 @@ +interface User { + username: string; + admin?: boolean; +} + +export function isAdminUser(user: any): user is User & { admin: true } { + return ( + typeof user === 'object' && user !== null && user !== undefined && (user as User).admin === true + ); +} diff --git a/src/service/urls.js b/src/service/urls.js deleted file mode 100644 index 2d1a60de9..000000000 --- a/src/service/urls.js +++ /dev/null @@ -1,20 +0,0 @@ -const { GIT_PROXY_SERVER_PORT: PROXY_HTTP_PORT, GIT_PROXY_UI_PORT: UI_PORT } = - require('../config/env').serverConfig; -const config = require('../config'); - -module.exports = { - getProxyURL: (req) => { - const defaultURL = `${req.protocol}://${req.headers.host}`.replace( - `:${UI_PORT}`, - `:${PROXY_HTTP_PORT}`, - ); - return config.getDomains().proxy ?? defaultURL; - }, - getServiceUIURL: (req) => { - const defaultURL = `${req.protocol}://${req.headers.host}`.replace( - `:${PROXY_HTTP_PORT}`, - `:${UI_PORT}`, - ); - return config.getDomains().service ?? defaultURL; - }, -}; diff --git a/src/service/urls.ts b/src/service/urls.ts new file mode 100644 index 000000000..ca92953c7 --- /dev/null +++ b/src/service/urls.ts @@ -0,0 +1,20 @@ +import { Request } from 'express'; + +import { serverConfig } from '../config/env'; +import * as config from '../config'; + +const { GIT_PROXY_SERVER_PORT: PROXY_HTTP_PORT, GIT_PROXY_UI_PORT: UI_PORT } = serverConfig; + +export const getProxyURL = (req: Request): string => { + return ( + config.getDomains().proxy ?? + `${req.protocol}://${req.headers.host}`.replace(`:${UI_PORT}`, `:${PROXY_HTTP_PORT}`) + ); +}; + +export const getServiceUIURL = (req: Request): string => { + return ( + config.getDomains().service ?? + `${req.protocol}://${req.headers.host}`.replace(`:${PROXY_HTTP_PORT}`, `:${UI_PORT}`) + ); +}; diff --git a/src/types/models.ts b/src/types/models.ts index 0ecbce141..d583ebd76 100644 --- a/src/types/models.ts +++ b/src/types/models.ts @@ -1,3 +1,4 @@ +import { StepData } from '../proxy/actions/Step'; import { AttestationData } from '../ui/views/PushDetails/attestation.types'; export interface UserData { @@ -34,12 +35,16 @@ export interface PushData { diff: { content: string; }; + error: boolean; canceled?: boolean; rejected?: boolean; + blocked?: boolean; authorised?: boolean; attestation?: AttestationData; autoApproved?: boolean; timestamp: string | Date; + allowPush?: boolean; + lastStep?: StepData; } export interface Route { diff --git a/src/types/passport-activedirectory.d.ts b/src/types/passport-activedirectory.d.ts new file mode 100644 index 000000000..1578409ae --- /dev/null +++ b/src/types/passport-activedirectory.d.ts @@ -0,0 +1,7 @@ +declare module 'passport-activedirectory' { + import { Strategy as PassportStrategy } from 'passport'; + class Strategy extends PassportStrategy { + constructor(options: any, verify: (...args: any[]) => void); + } + export = Strategy; +} diff --git a/test/1.test.js b/test/1.test.js index edb6a01fb..46eab9b9b 100644 --- a/test/1.test.js +++ b/test/1.test.js @@ -13,7 +13,7 @@ const chaiHttp = require('chai-http'); const sinon = require('sinon'); const proxyquire = require('proxyquire'); -const service = require('../src/service'); +const service = require('../src/service').default; const db = require('../src/db'); const expect = chai.expect; diff --git a/test/generated-config.test.js b/test/generated-config.test.js index cecee7038..cdeed2349 100644 --- a/test/generated-config.test.js +++ b/test/generated-config.test.js @@ -1,7 +1,6 @@ const chai = require('chai'); const { Convert } = require('../src/config/generated/config'); const defaultSettings = require('../proxy.config.json'); -const { isUserInAdGroup } = require('../src/service/passport/ldaphelper'); const { expect } = chai; @@ -212,9 +211,6 @@ describe('Generated Config (QuickType)', () => { userInADGroup: 'https://somedomain.com/some/path/checkUserGroups?domain=&name=&id=', }, - github: { - baseUrl: 'https://api.github.com', - }, }, domains: { @@ -224,13 +220,10 @@ describe('Generated Config (QuickType)', () => { // Complex nested structures attestationConfig: { - enabled: true, questions: [ { - id: 'q1', - type: 'boolean', - required: true, label: 'Test Question', + tooltip: { text: 'Test tooltip content', links: ['https://git-proxy.finos.org./'] }, }, ], }, diff --git a/test/processors/gitLeaks.test.js b/test/processors/gitLeaks.test.js index eca181c61..1461611f8 100644 --- a/test/processors/gitLeaks.test.js +++ b/test/processors/gitLeaks.test.js @@ -117,7 +117,7 @@ describe('gitleaks', () => { expect(result.error).to.be.false; expect(result.steps).to.have.lengthOf(1); expect(result.steps[0].error).to.be.false; - expect(logStub.calledWith('succeded')).to.be.true; + expect(logStub.calledWith('succeeded')).to.be.true; expect(logStub.calledWith('No leaks found')).to.be.true; }); diff --git a/test/processors/scanDiff.test.js b/test/processors/scanDiff.test.js index bd8afd99d..28d949385 100644 --- a/test/processors/scanDiff.test.js +++ b/test/processors/scanDiff.test.js @@ -53,14 +53,15 @@ index 8b97e49..de18d43 100644 Project to test gitproxy +AKIAIOSFODNN7EXAMPLE +AKIAIOSFODNN8EXAMPLE -+blockedTestLiteral ++emdedded_blocked.Te$t.Literal? `; }; describe('Scan commit diff...', async () => { privateOrganizations[0] = 'private-org-test'; commitConfig.diff = { block: { - literals: ['blockedTestLiteral'], + //n.b. the example literal includes special chars that would be interpreted as RegEx if not escaped properly + literals: ['blocked.Te$t.Literal?'], patterns: [], providers: { 'AWS (Amazon Web Services) Access Key ID': diff --git a/test/services/routes/auth.test.js b/test/services/routes/auth.test.js index 52106184b..171f70009 100644 --- a/test/services/routes/auth.test.js +++ b/test/services/routes/auth.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const chaiHttp = require('chai-http'); const sinon = require('sinon'); const express = require('express'); -const { router, loginSuccessHandler } = require('../../../src/service/routes/auth'); +const authRoutes = require('../../../src/service/routes/auth').default; const db = require('../../../src/db'); const { expect } = chai; @@ -19,7 +19,7 @@ const newApp = (username) => { }); } - app.use('/auth', router); + app.use('/auth', authRoutes.router); return app; }; @@ -151,7 +151,7 @@ describe('Auth API', function () { send: sinon.spy(), }; - await loginSuccessHandler()({ user }, res); + await authRoutes.loginSuccessHandler()({ user }, res); expect(res.send.calledOnce).to.be.true; expect(res.send.firstCall.args[0]).to.deep.equal({ diff --git a/test/services/routes/users.test.js b/test/services/routes/users.test.js index d97afeee3..ae4fe9cce 100644 --- a/test/services/routes/users.test.js +++ b/test/services/routes/users.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const chaiHttp = require('chai-http'); const sinon = require('sinon'); const express = require('express'); -const usersRouter = require('../../../src/service/routes/users'); +const usersRouter = require('../../../src/service/routes/users').default; const db = require('../../../src/db'); const { expect } = chai; diff --git a/test/testJwtAuthHandler.test.js b/test/testJwtAuthHandler.test.js index 9c7ada52e..cf0ee8f09 100644 --- a/test/testJwtAuthHandler.test.js +++ b/test/testJwtAuthHandler.test.js @@ -5,7 +5,7 @@ const jwt = require('jsonwebtoken'); const { jwkToBuffer } = require('jwk-to-pem'); const { assignRoles, getJwks, validateJwt } = require('../src/service/passport/jwtUtils'); -const jwtAuthHandler = require('../src/service/passport/jwtAuthHandler'); +const { jwtAuthHandler } = require('../src/service/passport/jwtAuthHandler'); describe('getJwks', () => { it('should fetch JWKS keys from authority', async () => { @@ -182,8 +182,7 @@ describe('jwtAuthHandler', () => { await jwtAuthHandler(jwtConfig)(req, res, next); expect(res.status.calledWith(500)).to.be.true; - expect(res.send.calledWith({ message: 'JWT handler: authority URL is not configured\n' })).to.be - .true; + expect(res.send.calledWith({ message: 'OIDC authority URL is not configured\n' })).to.be.true; }); it('should return 500 if clientID not configured', async () => { @@ -194,8 +193,7 @@ describe('jwtAuthHandler', () => { await jwtAuthHandler(jwtConfig)(req, res, next); expect(res.status.calledWith(500)).to.be.true; - expect(res.send.calledWith({ message: 'JWT handler: client ID is not configured\n' })).to.be - .true; + expect(res.send.calledWith({ message: 'OIDC client ID is not configured\n' })).to.be.true; }); it('should return 401 if JWT validation fails', async () => { diff --git a/test/testLogin.test.js b/test/testLogin.test.js index 3000f8b74..cb6a0e922 100644 --- a/test/testLogin.test.js +++ b/test/testLogin.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const chaiHttp = require('chai-http'); const db = require('../src/db'); -const service = require('../src/service'); +const service = require('../src/service').default; chai.use(chaiHttp); chai.should(); diff --git a/test/testOidc.test.js b/test/testOidc.test.js new file mode 100644 index 000000000..46eb74550 --- /dev/null +++ b/test/testOidc.test.js @@ -0,0 +1,176 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const expect = chai.expect; +const { safelyExtractEmail, getUsername } = require('../src/service/passport/oidc'); + +describe('OIDC auth method', () => { + let dbStub; + let passportStub; + let configure; + let discoveryStub; + let fetchUserInfoStub; + let strategyCtorStub; + let strategyCallback; + + const newConfig = JSON.stringify({ + authentication: [ + { + type: 'openidconnect', + enabled: true, + oidcConfig: { + issuer: 'https://fake-issuer.com', + clientID: 'test-client-id', + clientSecret: 'test-client-secret', + callbackURL: 'https://example.com/callback', + scope: 'openid profile email', + }, + }, + ], + }); + + beforeEach(() => { + dbStub = { + findUserByOIDC: sinon.stub(), + createUser: sinon.stub(), + }; + + passportStub = { + use: sinon.stub(), + serializeUser: sinon.stub(), + deserializeUser: sinon.stub(), + }; + + discoveryStub = sinon.stub().resolves({ some: 'config' }); + fetchUserInfoStub = sinon.stub(); + + // Fake Strategy constructor + strategyCtorStub = function (options, verifyFn) { + strategyCallback = verifyFn; + return { + name: 'openidconnect', + currentUrl: sinon.stub().returns({}), + }; + }; + + const fsStub = { + existsSync: sinon.stub().returns(true), + readFileSync: sinon.stub().returns(newConfig), + }; + + const config = proxyquire('../src/config', { + fs: fsStub, + }); + config.initUserConfig(); + + ({ configure } = proxyquire('../src/service/passport/oidc', { + '../../db': dbStub, + '../../config': config, + 'openid-client': { + discovery: discoveryStub, + fetchUserInfo: fetchUserInfoStub, + }, + 'openid-client/passport': { + Strategy: strategyCtorStub, + }, + })); + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should configure passport with OIDC strategy', async () => { + await configure(passportStub); + + expect(discoveryStub.calledOnce).to.be.true; + expect(passportStub.use.calledOnce).to.be.true; + expect(passportStub.serializeUser.calledOnce).to.be.true; + expect(passportStub.deserializeUser.calledOnce).to.be.true; + }); + + it('should authenticate an existing user', async () => { + await configure(passportStub); + + const mockTokenSet = { + claims: () => ({ sub: 'user123' }), + access_token: 'access-token', + }; + dbStub.findUserByOIDC.resolves({ id: 'user123', username: 'test-user' }); + fetchUserInfoStub.resolves({ sub: 'user123', email: 'user@test.com' }); + + const done = sinon.spy(); + + await strategyCallback(mockTokenSet, done); + + expect(done.calledOnce).to.be.true; + const [err, user] = done.firstCall.args; + expect(err).to.be.null; + expect(user).to.have.property('username', 'test-user'); + }); + + it('should handle discovery errors', async () => { + discoveryStub.rejects(new Error('discovery failed')); + + try { + await configure(passportStub); + throw new Error('Expected configure to throw'); + } catch (err) { + expect(err.message).to.include('discovery failed'); + } + }); + + it('should fail if no email in new user profile', async () => { + await configure(passportStub); + + const mockTokenSet = { + claims: () => ({ sub: 'sub-no-email' }), + access_token: 'access-token', + }; + dbStub.findUserByOIDC.resolves(null); + fetchUserInfoStub.resolves({ sub: 'sub-no-email' }); + + const done = sinon.spy(); + + await strategyCallback(mockTokenSet, done); + + const [err, user] = done.firstCall.args; + expect(err).to.be.instanceOf(Error); + expect(err.message).to.include('No email found'); + expect(user).to.be.undefined; + }); + + describe('safelyExtractEmail', () => { + it('should extract email from profile', () => { + const profile = { email: 'test@test.com' }; + const email = safelyExtractEmail(profile); + expect(email).to.equal('test@test.com'); + }); + + it('should extract email from profile with emails array', () => { + const profile = { emails: [{ value: 'test@test.com' }] }; + const email = safelyExtractEmail(profile); + expect(email).to.equal('test@test.com'); + }); + + it('should return null if no email in profile', () => { + const profile = { name: 'test' }; + const email = safelyExtractEmail(profile); + expect(email).to.be.null; + }); + }); + + describe('getUsername', () => { + it('should generate username from email', () => { + const email = 'test@test.com'; + const username = getUsername(email); + expect(username).to.equal('test'); + }); + + it('should return empty string if no email', () => { + const email = ''; + const username = getUsername(email); + expect(username).to.equal(''); + }); + }); +}); diff --git a/test/testProxyRoute.test.js b/test/testProxyRoute.test.js index da97f37b3..47fd3b775 100644 --- a/test/testProxyRoute.test.js +++ b/test/testProxyRoute.test.js @@ -10,7 +10,7 @@ const getRouter = require('../src/proxy/routes').getRouter; const chain = require('../src/proxy/chain'); const proxyquire = require('proxyquire'); const { Action, Step } = require('../src/proxy/actions'); -const service = require('../src/service'); +const service = require('../src/service').default; const db = require('../src/db'); import Proxy from '../src/proxy'; diff --git a/test/testPush.test.js b/test/testPush.test.js index 62836b3a5..696acafb0 100644 --- a/test/testPush.test.js +++ b/test/testPush.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const chaiHttp = require('chai-http'); const db = require('../src/db'); -const service = require('../src/service'); +const service = require('../src/service').default; chai.use(chaiHttp); chai.should(); @@ -314,6 +314,51 @@ describe('auth', async () => { .set('Cookie', `${cookie}`); res.should.have.status(401); }); + + it('should fetch all pushes', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsApprover(); + const res = await chai.request(app).get('/api/v1/push').set('Cookie', `${cookie}`); + res.should.have.status(200); + res.body.should.be.an('array'); + + const push = res.body.find((push) => push.id === TEST_PUSH.id); + expect(push).to.exist; + expect(push).to.deep.equal(TEST_PUSH); + expect(push.canceled).to.be.false; + }); + + it('should allow a committer to cancel a push', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsCommitter(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/cancel`) + .set('Cookie', `${cookie}`); + res.should.have.status(200); + + const pushes = await chai.request(app).get('/api/v1/push').set('Cookie', `${cookie}`); + const push = pushes.body.find((push) => push.id === TEST_PUSH.id); + + expect(push).to.exist; + expect(push.canceled).to.be.true; + }); + + it('should not allow a non-committer to cancel a push (even if admin)', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsAdmin(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/cancel`) + .set('Cookie', `${cookie}`); + res.should.have.status(401); + + const pushes = await chai.request(app).get('/api/v1/push').set('Cookie', `${cookie}`); + const push = pushes.body.find((push) => push.id === TEST_PUSH.id); + + expect(push).to.exist; + expect(push.canceled).to.be.false; + }); }); after(async function () { diff --git a/test/testRepoApi.test.js b/test/testRepoApi.test.js index 23dc40bac..8c06cf79b 100644 --- a/test/testRepoApi.test.js +++ b/test/testRepoApi.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const chaiHttp = require('chai-http'); const db = require('../src/db'); -const service = require('../src/service'); +const service = require('../src/service').default; const { getAllProxiedHosts } = require('../src/proxy/routes/helper'); import Proxy from '../src/proxy'; diff --git a/tsconfig.publish.json b/tsconfig.publish.json index d1ff7049c..b55358d42 100644 --- a/tsconfig.publish.json +++ b/tsconfig.publish.json @@ -11,6 +11,7 @@ "src/ui/**", "**/*.tsx", "**/*.jsx", - "./src/context.js" + "./src/context.js", + "eslint.config.mjs" ] } diff --git a/website/docs/configuration/reference.mdx b/website/docs/configuration/reference.mdx index 4063aa0c5..2bb5d4c8c 100644 --- a/website/docs/configuration/reference.mdx +++ b/website/docs/configuration/reference.mdx @@ -114,35 +114,158 @@ description: JSON schema reference documentation for GitProxy
- 4.2. [Optional] Property GitProxy configuration file > api > github + 4.2. [Optional] Property GitProxy configuration file > api > gitleaks
+| | | +| ------------------------- | ---------------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Any type allowed | + +**Description:** Configuration for the gitleaks (https://github.com/gitleaks/gitleaks) plugin + +
+ + 4.2.1. [Optional] Property GitProxy configuration file > api > gitleaks > enabled + +
+ +| | | +| ------------ | --------- | +| **Type** | `boolean` | +| **Required** | No | + +
+
+ +
+ + 4.2.2. [Optional] Property GitProxy configuration file > api > gitleaks > ignoreGitleaksAllow + +
+ +| | | +| ------------ | --------- | +| **Type** | `boolean` | +| **Required** | No | + +
+
+ +
+ + 4.2.3. [Optional] Property GitProxy configuration file > api > gitleaks > noColor + +
+ +| | | +| ------------ | --------- | +| **Type** | `boolean` | +| **Required** | No | + +
+
+ +
+ + 4.2.4. [Optional] Property GitProxy configuration file > api > gitleaks > configPath + +
+ +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | + +
+
+ +
+
+ + + + +
+ + 5. [Optional] Property GitProxy configuration file > commitConfig + +
+ +**Title:** CommitConfig + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Block commits based on rules defined over author/committer e-mail addresses, commit message content and diff content + +
+ + 5.1. [Optional] Property GitProxy configuration file > commitConfig > author + +
+ +**Title:** Author + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Rules applied to commit authors + +
+ + 5.1.1. [Optional] Property GitProxy configuration file > commitConfig > author > email + +
+ +**Title:** Email + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Rules applied to author email addresses + +
+ + 5.1.1.1. [Optional] Property GitProxy configuration file > commitConfig > author > email > local + +
+ +**Title:** Local + | | | | ------------------------- | ----------- | | **Type** | `object` | | **Required** | No | | **Additional properties** | Not allowed | -**Description:** Deprecated: Defunct property that was used to provide the API URL for GitHub. No longer referenced in the codebase. +**Description:** Rules applied to the local portion of the email address (i.e. section before the @ symbol)
- 4.2.1. [Optional] Property GitProxy configuration file > api > github > baseUrl + 5.1.1.1.1. [Optional] Property GitProxy configuration file > commitConfig > author > email > local > block
+**Title:** Block + | | | | ------------ | -------- | | **Type** | `string` | | **Required** | No | -| **Format** | `uri` | - -**Example:** -```json -"https://api.github.com" -``` +**Description:** Block commits with author email addresses where the first part matches this regular expression
@@ -152,66 +275,129 @@ description: JSON schema reference documentation for GitProxy
- 4.3. [Optional] Property GitProxy configuration file > api > gitleaks + 5.1.1.2. [Optional] Property GitProxy configuration file > commitConfig > author > email > domain
-| | | -| ------------------------- | ---------------- | -| **Type** | `object` | -| **Required** | No | -| **Additional properties** | Any type allowed | +**Title:** Domain -**Description:** Configuration for the gitleaks (https://github.com/gitleaks/gitleaks) plugin +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Rules applied to the domain portion of the email address (i.e. section after the @ symbol)
- 4.3.1. [Optional] Property GitProxy configuration file > api > gitleaks > enabled + 5.1.1.2.1. [Optional] Property GitProxy configuration file > commitConfig > author > email > domain > allow
-| | | -| ------------ | --------- | -| **Type** | `boolean` | -| **Required** | No | +**Title:** Allow + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | + +**Description:** Allow only commits where the domain part of the email address matches this regular expression + +
+
+ +
+
+ +
+
- 4.3.2. [Optional] Property GitProxy configuration file > api > gitleaks > ignoreGitleaksAllow + 5.2. [Optional] Property GitProxy configuration file > commitConfig > message
-| | | -| ------------ | --------- | -| **Type** | `boolean` | -| **Required** | No | +**Title:** Message -
-
+| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Rules applied to commit messages
- 4.3.3. [Optional] Property GitProxy configuration file > api > gitleaks > noColor + 5.2.1. [Optional] Property GitProxy configuration file > commitConfig > message > block
-| | | -| ------------ | --------- | -| **Type** | `boolean` | -| **Required** | No | +**Title:** MessageBlock + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Block commits where the commit message matches any of the given patterns + +
+ + 5.2.1.1. [Optional] Property GitProxy configuration file > commitConfig > message > block > literals + +
+ +**Title:** MessageBlockLiteral + +| | | +| ------------ | ----------------- | +| **Type** | `array of string` | +| **Required** | No | + +**Description:** Block commits where the commit message contains any of the given string literals + +| Each item of this array must be | Description | +| ------------------------------------------------------------ | ----------- | +| [literals items](#commitConfig_message_block_literals_items) | - | + +###### 5.2.1.1.1. GitProxy configuration file > commitConfig > message > block > literals > literals items + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No |
- 4.3.4. [Optional] Property GitProxy configuration file > api > gitleaks > configPath + 5.2.1.2. [Optional] Property GitProxy configuration file > commitConfig > message > block > patterns
+**Title:** MessageBlockLiteral + +| | | +| ------------ | ----------------- | +| **Type** | `array of string` | +| **Required** | No | + +**Description:** Block commits where the commit message matches any of the given regular expressions + +| Each item of this array must be | Description | +| ------------------------------------------------------------ | ----------- | +| [patterns items](#commitConfig_message_block_patterns_items) | - | + +###### 5.2.1.2.1. GitProxy configuration file > commitConfig > message > block > patterns > patterns items + | | | | ------------ | -------- | | **Type** | `string` | @@ -228,17 +414,133 @@ description: JSON schema reference documentation for GitProxy
- 5. [Optional] Property GitProxy configuration file > commitConfig + 5.3. [Optional] Property GitProxy configuration file > commitConfig > diff + +
+ +**Title:** Diff + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Rules applied to commit diff content + +
+ + 5.3.1. [Optional] Property GitProxy configuration file > commitConfig > diff > block + +
+ +**Title:** DiffBlock + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Block commits where the commit diff matches any of the given patterns + +
+ + 5.3.1.1. [Optional] Property GitProxy configuration file > commitConfig > diff > block > literals + +
+ +**Title:** DiffBlockLiteral + +| | | +| ------------ | ----------------- | +| **Type** | `array of string` | +| **Required** | No | + +**Description:** Block commits where the commit diff content contains any of the given string literals + +| Each item of this array must be | Description | +| --------------------------------------------------------- | ----------- | +| [literals items](#commitConfig_diff_block_literals_items) | - | + +###### 5.3.1.1.1. GitProxy configuration file > commitConfig > diff > block > literals > literals items + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | + +
+
+ +
+ + 5.3.1.2. [Optional] Property GitProxy configuration file > commitConfig > diff > block > patterns
+**Title:** MessageBlockPatterns + +| | | +| ------------ | ------- | +| **Type** | `array` | +| **Required** | No | + +**Description:** Block commits where the commit diff content matches any of the given regular expressions + +| Each item of this array must be | Description | +| --------------------------------------------------------- | ----------- | +| [patterns items](#commitConfig_diff_block_patterns_items) | - | + +###### 5.3.1.2.1. GitProxy configuration file > commitConfig > diff > block > patterns > patterns items + | | | | ------------------------- | ---------------- | | **Type** | `object` | | **Required** | No | | **Additional properties** | Any type allowed | -**Description:** Enforce rules and patterns on commits including e-mail and message +
+
+ +
+ + 5.3.1.3. [Optional] Property GitProxy configuration file > commitConfig > diff > block > providers + +
+ +**Title:** MessageBlockProviders + +| | | +| ------------------------- | -------------------------------------------------------------------------------------------------------------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | [Each additional property must conform to the schema](#commitConfig_diff_block_providers_additionalProperties) | + +**Description:** Block commits where the commit diff content matches any of the given regular expressions, except where the repository path (project/organisation) matches one of the listed privateOrganisations. The keys in this array are listed as the block type in logs. + +
+ + 5.3.1.3.1. Property GitProxy configuration file > commitConfig > diff > block > providers > additionalProperties + +
+ +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | + +
+
+ +
+
+ +
+
+ +
+
@@ -249,13 +551,124 @@ description: JSON schema reference documentation for GitProxy
-| | | -| ------------------------- | ---------------- | -| **Type** | `object` | -| **Required** | No | -| **Additional properties** | Any type allowed | +**Title:** AttestationConfig + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +**Description:** Configuration for the attestation form displayed to reviewers. Reviewers will need to check the box next to each question in order to complete the review attestation. + +
+ + 6.1. [Optional] Property GitProxy configuration file > attestationConfig > questions + +
+ +**Title:** AttestationQuestions + +| | | +| ------------ | ----------------- | +| **Type** | `array of object` | +| **Required** | No | + +**Description:** Customisable attestation questions to add to attestation form. -**Description:** Customisable questions to add to attestation form +| Each item of this array must be | Description | +| ---------------------------------------------- | ----------- | +| [Question](#attestationConfig_questions_items) | - | + +#### 6.1.1. GitProxy configuration file > attestationConfig > questions > Question + +**Title:** Question + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | No | +| **Additional properties** | Not allowed | + +
+ + 6.1.1.1. [Required] Property GitProxy configuration file > attestationConfig > questions > Question > label + +
+ +**Title:** QuestionLabel + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | Yes | + +**Description:** The text of the question that will be displayed to the reviewer + +
+
+ +
+ + 6.1.1.2. [Required] Property GitProxy configuration file > attestationConfig > questions > Question > tooltip + +
+ +**Title:** QuestionTooltip + +| | | +| ------------------------- | ----------- | +| **Type** | `object` | +| **Required** | Yes | +| **Additional properties** | Not allowed | + +**Description:** A tooltip and optional set of links that will be displayed on mouseover of the question and used to provide additional guidance to the reviewer. + +
+ + 6.1.1.2.1. [Required] Property GitProxy configuration file > attestationConfig > questions > Question > tooltip > text + +
+ +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | Yes | + +
+
+ +
+ + 6.1.1.2.2. [Optional] Property GitProxy configuration file > attestationConfig > questions > Question > tooltip > links + +
+ +| | | +| ------------ | ----------------- | +| **Type** | `array of string` | +| **Required** | No | + +| Each item of this array must be | Description | +| --------------------------------------------------------------------- | ----------- | +| [links items](#attestationConfig_questions_items_tooltip_links_items) | - | + +###### 6.1.1.2.2.1. GitProxy configuration file > attestationConfig > questions > Question > tooltip > links > links items + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | +| **Format** | `url` | + +
+
+ +
+
+ +
+
@@ -272,7 +685,45 @@ description: JSON schema reference documentation for GitProxy | **Required** | No | | **Additional properties** | Any type allowed | -**Description:** Provide domains to use alternative to the defaults +**Description:** Provide custom URLs for the git proxy interfaces in case it cannot determine its own URL + +
+ + 7.1. [Optional] Property GitProxy configuration file > domains > proxy + +
+ +**Title:** ProxyUrl + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | +| **Format** | `url` | + +**Description:** Override for the default proxy URL, should include the protocol + +
+
+ +
+ + 7.2. [Optional] Property GitProxy configuration file > domains > service + +
+ +**Title:** Service UI URL + +| | | +| ------------ | -------- | +| **Type** | `string` | +| **Required** | No | +| **Format** | `url` | + +**Description:** Override for the service UI URL, should include the protocol + +
+
@@ -369,7 +820,7 @@ description: JSON schema reference documentation for GitProxy | **Type** | `array` | | **Required** | No | -**Description:** Pattern searches for listed private organizations are disabled +**Description:** Provider searches for listed private organizations are disabled, see commitConfig.diff.block.providers
@@ -439,7 +890,7 @@ description: JSON schema reference documentation for GitProxy | ------------------------------- | ----------- | | [plugins items](#plugins_items) | - | -### 13.1. GitProxy configuration file > plugins > plugins items +### 13.1. GitProxy configuration file > plugins > plugins items | | | | ------------ | -------- | @@ -466,7 +917,7 @@ description: JSON schema reference documentation for GitProxy | --------------------------------------- | ----------- | | [authorisedRepo](#authorisedList_items) | - | -### 14.1. GitProxy configuration file > authorisedList > authorisedRepo +### 14.1. GitProxy configuration file > authorisedList > authorisedRepo | | | | ------------------------- | ---------------------------- | @@ -537,7 +988,7 @@ description: JSON schema reference documentation for GitProxy | ------------------------------- | ----------- | | [database](#sink_items) | - | -### 15.1. GitProxy configuration file > sink > database +### 15.1. GitProxy configuration file > sink > database | | | | ------------------------- | ---------------------- | @@ -638,7 +1089,7 @@ description: JSON schema reference documentation for GitProxy | ---------------------------------------------- | ------------------------------------------ | | [authenticationElement](#authentication_items) | Configuration for an authentication source | -### 16.1. GitProxy configuration file > authentication > authenticationElement +### 16.1. GitProxy configuration file > authentication > authenticationElement | | | | ------------------------- | ----------------------------------- | @@ -1170,7 +1621,7 @@ Specific value: `"jwt"` | ------------------------------------------------- | ------------------------------------------ | | [authenticationElement](#apiAuthentication_items) | Configuration for an authentication source | -### 18.1. GitProxy configuration file > apiAuthentication > authenticationElement +### 18.1. GitProxy configuration file > apiAuthentication > authenticationElement | | | | ------------------------- | --------------------------------------------- | @@ -1333,7 +1784,7 @@ Specific value: `"jwt"` | ----------------------------------------- | ----------- | | [routeAuthRule](#uiRouteAuth_rules_items) | - | -#### 23.2.1. GitProxy configuration file > uiRouteAuth > rules > routeAuthRule +#### 23.2.1. GitProxy configuration file > uiRouteAuth > rules > routeAuthRule | | | | ------------------------- | --------------------------- | @@ -1391,4 +1842,4 @@ Specific value: `"jwt"` ---------------------------------------------------------------------------------------------------------------------------- -Generated using [json-schema-for-humans](https://github.com/coveooss/json-schema-for-humans) on 2025-09-30 at 14:41:38 +0100 +Generated using [json-schema-for-humans](https://github.com/coveooss/json-schema-for-humans) on 2025-10-08 at 17:43:40 +0100 From 168d9b09d48d2af629e4db2ce280e1eb0bf06d3e Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 22 Oct 2025 16:05:43 +0200 Subject: [PATCH 06/26] feat: implement git-operations module with native git commands --- .../processors/push-action/git-operations.ts | 216 ++++++++++-------- 1 file changed, 125 insertions(+), 91 deletions(-) diff --git a/src/proxy/processors/push-action/git-operations.ts b/src/proxy/processors/push-action/git-operations.ts index 262609c09..92a7a5b3b 100644 --- a/src/proxy/processors/push-action/git-operations.ts +++ b/src/proxy/processors/push-action/git-operations.ts @@ -1,122 +1,156 @@ -import { Step } from '../../actions'; -import { cacheManager } from './cache-manager'; +import { execSync } from 'child_process'; +import fs from 'fs'; /** - * Git Operations for Hybrid Cache + * Git operations using native git commands */ +interface CloneOptions { + dir: string; + url: string; + username?: string; + password?: string; + bare?: boolean; + depth?: number; + singleBranch?: boolean; +} + +interface FetchOptions { + dir: string; + url: string; + username?: string; + password?: string; + depth?: number; + prune?: boolean; + bare?: boolean; +} + /** - * Execute a git command with credentials sanitization + * Clone a repository using native git */ -async function execGitCommand( - command: string, - step: Step, - maxBuffer: number = 50 * 1024 * 1024, -): Promise<{ stdout: string; stderr: string }> { - const { exec } = await import('child_process'); - const { promisify } = await import('util'); - const execAsync = promisify(exec); +export async function clone(options: CloneOptions): Promise { + const { dir, url, username, password, bare = false, depth, singleBranch = false } = options; + + // Build URL with credentials if provided + let authUrl = url; + if (username && password) { + authUrl = url.replace( + /^(https?:\/\/)/, + `$1${encodeURIComponent(username)}:${encodeURIComponent(password)}@`, + ); + } - const { stdout, stderr } = await execAsync(command, { maxBuffer }); + const args: string[] = ['git', 'clone']; - if (stdout) step.log(stdout.trim()); - if (stderr) step.log(stderr.trim()); + if (bare) { + args.push('--bare'); + } - return { stdout, stderr }; -} + if (depth) { + args.push('--depth', depth.toString()); + } -/** - * Build URL with embedded credentials - */ -function buildUrlWithCredentials(url: string, username: string, password: string): string { - return url.replace('://', `://${encodeURIComponent(username)}:${encodeURIComponent(password)}@`); -} + if (singleBranch) { + args.push('--single-branch'); + } else { + // Explicitly clone all branches (needed when using --depth) + args.push('--no-single-branch'); + } -/** - * Remove credentials from bare repository config - */ -async function sanitizeRepositoryConfig(bareRepo: string, cleanUrl: string): Promise { - const { exec } = await import('child_process'); - const { promisify } = await import('util'); - const execAsync = promisify(exec); - - // Remove any URL with credentials - await execAsync(`cd "${bareRepo}" && git config --unset remote.origin.url 2>/dev/null || true`); - // Set clean URL without credentials - await execAsync(`cd "${bareRepo}" && git config remote.origin.url "${cleanUrl}"`); -} + args.push(`"${authUrl}"`, `"${dir}"`); -/** - * Clone working copy from bare repository using native git - */ -export async function cloneWorkingCopy( - bareRepo: string, - workCopyPath: string, - step: Step, -): Promise { - try { - await execGitCommand(`git clone "${bareRepo}" "${workCopyPath}"`, step); - step.log(`Working copy created at ${workCopyPath}`); - } catch (error: any) { - step.log(`Failed to create working copy: ${error.message}`); - throw error; + execSync(args.join(' '), { stdio: 'pipe' }); + + // Sanitize credentials from git config + if (username && password) { + sanitizeCredentials(dir, url, bare); } } /** - * Fetch updates in bare repository using native git command + * Fetch updates in a repository using native git */ -export async function fetchBareRepository( - bareRepo: string, - url: string, - username: string, - password: string, - step: Step, -): Promise { - const urlWithCreds = buildUrlWithCredentials(url, username, password); - - try { - // Fetch all branches with depth=1 - await execGitCommand( - `cd "${bareRepo}" && git fetch --depth=1 "${urlWithCreds}" "+refs/heads/*:refs/heads/*"`, - step, +export async function fetch(options: FetchOptions): Promise { + const { dir, url, username, password, depth, prune = false, bare = false } = options; + + // Build URL with credentials if provided + let authUrl = url; + if (username && password) { + authUrl = url.replace( + /^(https?:\/\/)/, + `$1${encodeURIComponent(username)}:${encodeURIComponent(password)}@`, ); + } + + const args: string[] = ['git', '-C', `"${dir}"`, 'fetch']; + + if (depth) { + args.push('--depth', depth.toString()); + } + + if (prune) { + args.push('--prune'); + } + + args.push(`"${authUrl}"`); + args.push('"+refs/heads/*:refs/heads/*"'); // Fetch all branches - // SECURITY: Remove credentials from config - await sanitizeRepositoryConfig(bareRepo, url); + execSync(args.join(' '), { stdio: 'pipe' }); - step.log(`Bare repository updated (credentials removed)`); - } catch (error: any) { - step.log(`Failed to fetch bare repository: ${error.message}`); - throw error; + // Sanitize credentials from git config + if (username && password) { + sanitizeCredentials(dir, url, bare); } } /** - * Clone bare repository using native git command + * Remove credentials from git config and set clean URL */ -export async function cloneBareRepository( - bareRepo: string, - url: string, - username: string, - password: string, - step: Step, -): Promise { - const urlWithCreds = buildUrlWithCredentials(url, username, password); - +function sanitizeCredentials(dir: string, cleanUrl: string, isBare: boolean): void { try { - await execGitCommand(`git clone --bare --depth=1 "${urlWithCreds}" "${bareRepo}"`, step); + // For bare repositories, git clone --bare doesn't set up a remote by default + // We need to add it first if it doesn't exist + if (isBare) { + try { + execSync(`git -C "${dir}" remote add origin "${cleanUrl}"`, { stdio: 'pipe' }); + } catch (e) { + // If remote already exists, update it + execSync(`git -C "${dir}" remote set-url origin "${cleanUrl}"`, { stdio: 'pipe' }); + } + } else { + // For non-bare repositories, remote origin should exist + try { + // Unset the URL with credentials + execSync(`git -C "${dir}" config --unset remote.origin.url`, { stdio: 'pipe' }); + } catch (e) { + // Ignore error if already unset + } + + // Set clean URL without credentials + execSync(`git -C "${dir}" remote set-url origin "${cleanUrl}"`, { stdio: 'pipe' }); + } + } catch (e) { + console.warn(`Warning: Failed to sanitize credentials for ${dir}:`, e); + } +} - // SECURITY: Remove credentials from config immediately after clone - await sanitizeRepositoryConfig(bareRepo, url); +/** + * Clone from local repository (for working copy from bare cache) + */ +export async function cloneLocal(options: { + sourceDir: string; + targetDir: string; + depth?: number; +}): Promise { + const { sourceDir, targetDir, depth } = options; - step.log(`Bare repository created at ${bareRepo} (credentials sanitized)`); + const args: string[] = ['git', 'clone']; - // Update access time for LRU after successful clone - const repoName = bareRepo.split('/').pop() || ''; - cacheManager.touchRepository(repoName); - } catch (error: any) { - step.log(`Failed to clone bare repository: ${error.message}`); - throw error; + if (depth) { + args.push('--depth', depth.toString()); } + + args.push(`"${sourceDir}"`, `"${targetDir}"`); + + execSync(args.join(' '), { stdio: 'pipe' }); } From 08116ba6d2ab7db5d4fd98509733adf3602c5c27 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 22 Oct 2025 16:05:57 +0200 Subject: [PATCH 07/26] refactor: update pullRemote to use git-operations module --- .../processors/push-action/pullRemote.ts | 63 ++++++++++++++----- 1 file changed, 49 insertions(+), 14 deletions(-) diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index e21b10208..ed9fa42e3 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -2,7 +2,7 @@ import { Action, Step } from '../../actions'; import fs from 'fs'; import { PerformanceTimer } from './metrics'; import { cacheManager } from './cache-manager'; -import { cloneWorkingCopy, fetchBareRepository, cloneBareRepository } from './git-operations'; +import * as gitOps from './git-operations'; const BARE_CACHE = './.remote/cache'; const WORK_DIR = './.remote/work'; @@ -13,11 +13,7 @@ const exec = async (req: any, action: Action): Promise => { try { // Paths for hybrid architecture - // Ensure repoName ends with .git for bare repository convention - const repoNameWithGit = action.repoName.endsWith('.git') - ? action.repoName - : `${action.repoName}.git`; - const bareRepo = `${BARE_CACHE}/${repoNameWithGit}`; + const bareRepo = `${BARE_CACHE}/${action.repoName}`; const workCopy = `${WORK_DIR}/${action.id}`; // Check if bare cache exists @@ -43,39 +39,79 @@ const exec = async (req: any, action: Action): Promise => { .toString() .split(':'); - // PHASE 1: Bare Cache (persistent, shared) === + // PHASE 1: Bare Cache (persistent, shared) if (bareExists) { // CACHE HIT: Fetch updates in bare repository step.log(`Fetching updates in bare cache...`); try { - await fetchBareRepository(bareRepo, action.url, username, password, step); + await gitOps.fetch({ + dir: bareRepo, + url: action.url, + username, + password, + depth: 1, + prune: true, + bare: true, + }); // Update access time for LRU - cacheManager.touchRepository(`${action.repoName}.git`); + cacheManager.touchRepository(action.repoName); timer.mark('Fetch complete'); + step.log(`Bare repository updated`); } catch (fetchError) { step.log(`Fetch failed, rebuilding bare cache: ${fetchError}`); // Remove broken cache and re-clone if (fs.existsSync(bareRepo)) { fs.rmSync(bareRepo, { recursive: true, force: true }); } - await cloneBareRepository(bareRepo, action.url, username, password, step); + + // Re-clone as fallback + await gitOps.clone({ + dir: bareRepo, + url: action.url, + username, + password, + bare: true, + depth: 1, + }); + timer.mark('Bare clone complete (fallback)'); } } else { // CACHE MISS: Clone bare repository step.log(`Cloning bare repository to cache...`); - await cloneBareRepository(bareRepo, action.url, username, password, step); + + await gitOps.clone({ + dir: bareRepo, + url: action.url, + username, + password, + bare: true, + depth: 1, + }); + timer.mark('Bare clone complete'); + step.log(`Bare repository created at ${bareRepo}`); + + // Update access time for LRU after successful clone + cacheManager.touchRepository(action.repoName); } - // PHASE 2: Working Copy (temporary, isolated) === + // PHASE 2: Working Copy (temporary, isolated) step.log(`Creating isolated working copy for push ${action.id}...`); - await cloneWorkingCopy(bareRepo, `${workCopy}/${action.repoName}`, step); + const workCopyPath = `${workCopy}/${action.repoName}`; + + // Clone from local bare cache (fast local operation) + await gitOps.cloneLocal({ + sourceDir: bareRepo, + targetDir: workCopyPath, + depth: 1, + }); timer.mark('Working copy ready'); + step.log(`Working copy created at ${workCopyPath}`); // Set action path to working copy action.proxyGitPath = workCopy; @@ -87,7 +123,6 @@ const exec = async (req: any, action: Action): Promise => { step.log(completedMsg); step.setContent(completedMsg); - // End timing timer.end(); // Enforce cache limits (LRU eviction on bare cache) From 992c862a6bff2a64d47e88f108bba97090daef5e Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 22 Oct 2025 16:06:12 +0200 Subject: [PATCH 08/26] test: add comprehensive hybrid cache integration tests --- test/processors/cacheManager.test.js | 164 +++++++++++++++ .../hybridCache.integration.test.js | 199 ++++++++++++++++++ 2 files changed, 363 insertions(+) create mode 100644 test/processors/cacheManager.test.js create mode 100644 test/processors/hybridCache.integration.test.js diff --git a/test/processors/cacheManager.test.js b/test/processors/cacheManager.test.js new file mode 100644 index 000000000..e19695163 --- /dev/null +++ b/test/processors/cacheManager.test.js @@ -0,0 +1,164 @@ +const { expect } = require('chai'); +const fs = require('fs'); +const path = require('path'); +const { CacheManager } = require('../../src/proxy/processors/push-action/cache-manager'); + +describe('CacheManager', () => { + let testCacheDir; + let cacheManager; + + beforeEach(() => { + // Create temporary test cache directory + testCacheDir = path.join('./.remote', 'test-cache-' + Date.now()); + if (!fs.existsSync(testCacheDir)) { + fs.mkdirSync(testCacheDir, { recursive: true }); + } + cacheManager = new CacheManager(testCacheDir, 0.001, 3); // 1MB, 3 repos max + }); + + afterEach(() => { + // Clean up test cache directory + if (fs.existsSync(testCacheDir)) { + fs.rmSync(testCacheDir, { recursive: true, force: true }); + } + }); + + describe('getCacheStats', () => { + it('should return empty stats for empty cache', () => { + const stats = cacheManager.getCacheStats(); + expect(stats.totalRepositories).to.equal(0); + expect(stats.totalSizeMB).to.equal(0); + expect(stats.repositories).to.be.an('array').that.is.empty; + }); + + it('should calculate stats for repositories in cache', () => { + const repo1 = path.join(testCacheDir, 'repo1.git'); + const repo2 = path.join(testCacheDir, 'repo2.git'); + + fs.mkdirSync(repo1); + fs.mkdirSync(repo2); + + fs.writeFileSync(path.join(repo1, 'file1.txt'), 'a'.repeat(1024 * 1024)); // 1MB + fs.writeFileSync(path.join(repo2, 'file2.txt'), 'b'.repeat(1024 * 1024)); // 1MB + + const stats = cacheManager.getCacheStats(); + expect(stats.totalRepositories).to.equal(2); + expect(stats.totalSizeMB).to.be.at.least(2); // At least 2MB total + expect(stats.repositories).to.have.lengthOf(2); + expect(stats.repositories[0]).to.have.property('name'); + expect(stats.repositories[0]).to.have.property('sizeMB'); + expect(stats.repositories[0]).to.have.property('lastAccessed'); + }); + + it('should have timestamps for repositories', () => { + const repo1 = path.join(testCacheDir, 'repo1.git'); + const repo2 = path.join(testCacheDir, 'repo2.git'); + + fs.mkdirSync(repo1); + fs.writeFileSync(path.join(repo1, 'file1.txt'), 'test'); + + fs.mkdirSync(repo2); + fs.writeFileSync(path.join(repo2, 'file2.txt'), 'test'); + + const stats = cacheManager.getCacheStats(); + expect(stats.repositories).to.have.lengthOf(2); + // Each should have a valid timestamp + stats.repositories.forEach((repo) => { + expect(repo.lastAccessed).to.be.instanceOf(Date); + expect(repo.lastAccessed.getTime()).to.be.greaterThan(0); + }); + }); + }); + + describe('touchRepository', () => { + it('should update repository access time', async () => { + const repoName = 'test-repo.git'; + const repoPath = path.join(testCacheDir, repoName); + + fs.mkdirSync(repoPath); + fs.writeFileSync(path.join(repoPath, 'file.txt'), 'test'); + + const statsBefore = cacheManager.getCacheStats(); + const timeBefore = statsBefore.repositories[0].lastAccessed.getTime(); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + cacheManager.touchRepository(repoName); + + const statsAfter = cacheManager.getCacheStats(); + const timeAfter = statsAfter.repositories[0].lastAccessed.getTime(); + + expect(timeAfter).to.be.greaterThan(timeBefore); + }); + + it('should not throw error for non-existent repository', () => { + expect(() => cacheManager.touchRepository('non-existent.git')).to.not.throw(); + }); + }); + + describe('enforceLimits', () => { + it('should remove oldest repositories when exceeding count limit', () => { + // Create 4 repos (exceeds limit of 3) + for (let i = 1; i <= 4; i++) { + const repoPath = path.join(testCacheDir, `repo${i}.git`); + fs.mkdirSync(repoPath); + fs.writeFileSync(path.join(repoPath, 'file.txt'), 'a'.repeat(100 * 1024)); // 100KB + } + + const statsBefore = cacheManager.getCacheStats(); + expect(statsBefore.totalRepositories).to.equal(4); + + const result = cacheManager.enforceLimits(); + + expect(result.removedRepos).to.have.lengthOf.at.least(1); + expect(result.freedMB).to.be.at.least(0); + + const statsAfter = cacheManager.getCacheStats(); + expect(statsAfter.totalRepositories).to.be.at.most(3); + }); + + it('should remove repositories when exceeding size limit', () => { + // Create repo that exceeds size limit (1MB) + const repo1 = path.join(testCacheDir, 'repo1.git'); + fs.mkdirSync(repo1); + fs.writeFileSync(path.join(repo1, 'largefile.txt'), 'a'.repeat(2 * 1024 * 1024)); // 2MB + + const statsBefore = cacheManager.getCacheStats(); + expect(statsBefore.totalSizeMB).to.be.greaterThan(1); + + const result = cacheManager.enforceLimits(); + + expect(result.removedRepos).to.have.lengthOf(1); + expect(result.freedMB).to.be.greaterThan(1); + + const statsAfter = cacheManager.getCacheStats(); + expect(statsAfter.totalRepositories).to.equal(0); + }); + + it('should not remove anything if limits not exceeded', () => { + // Create 2 repos (under limit of 3) + for (let i = 1; i <= 2; i++) { + const repoPath = path.join(testCacheDir, `repo${i}.git`); + fs.mkdirSync(repoPath); + fs.writeFileSync(path.join(repoPath, 'file.txt'), 'test'); + } + + const result = cacheManager.enforceLimits(); + + expect(result.removedRepos).to.be.empty; + expect(result.freedMB).to.equal(0); + }); + }); + + describe('getConfig', () => { + it('should return cache configuration', () => { + const config = cacheManager.getConfig(); + + expect(config).to.deep.equal({ + maxSizeGB: 0.001, + maxRepositories: 3, + cacheDir: testCacheDir, + }); + }); + }); +}); diff --git a/test/processors/hybridCache.integration.test.js b/test/processors/hybridCache.integration.test.js new file mode 100644 index 000000000..81f78f08a --- /dev/null +++ b/test/processors/hybridCache.integration.test.js @@ -0,0 +1,199 @@ +const fs = require('fs'); +const chai = require('chai'); +const pullRemote = require('../../src/proxy/processors/push-action/pullRemote').exec; +const clearBareClone = require('../../src/proxy/processors/push-action/clearBareClone').exec; +const { Action } = require('../../src/proxy/actions/Action'); +const { cacheManager } = require('../../src/proxy/processors/push-action/cache-manager'); + +chai.should(); +const expect = chai.expect; + +describe('Hybrid Cache Integration Tests', () => { + const testRepoUrl = 'https://github.com/finos/git-proxy.git'; + const testRepoName = 'finos/git-proxy.git'; + const authorization = `Basic ${Buffer.from('test:test').toString('base64')}`; + + // Shared test data populated by before() hook + let testData = { + cacheMissAction: null, + cacheHitAction: null, + cacheMissDuration: 0, + cacheHitDuration: 0, + bareRepoPath: './.remote/cache/git-proxy.git', + inodeBefore: null, + inodeAfter: null, + }; + + before(async function () { + this.timeout(30000); + + console.log('\n === Setting up test data (one-time setup) ==='); + + // Clean up before starting + if (fs.existsSync('./.remote')) { + fs.rmSync('./.remote', { recursive: true, force: true }); + } + + const cacheMissActionId = 'cache-miss-' + Date.now(); + const cacheHitActionId = 'cache-hit-' + Date.now(); + + // First clone - cache MISS + console.log('Executing cache MISS...'); + const cacheMissAction = new Action(cacheMissActionId, 'push', 'POST', Date.now(), testRepoName); + cacheMissAction.url = testRepoUrl; + + const cacheMissStart = Date.now(); + await pullRemote({ headers: { authorization } }, cacheMissAction); + testData.cacheMissDuration = Date.now() - cacheMissStart; + testData.cacheMissAction = cacheMissAction; + + console.log(`Cache MISS completed in ${testData.cacheMissDuration}ms`); + + // Get inode before second clone + const bareRepoStatsBefore = fs.statSync(testData.bareRepoPath); + testData.inodeBefore = bareRepoStatsBefore.ino; + + // Wait a bit to ensure different timestamps + await new Promise((resolve) => setTimeout(resolve, 1000)); + + // Second clone - cache HIT + console.log('Executing cache HIT...'); + const cacheHitAction = new Action(cacheHitActionId, 'push', 'POST', Date.now(), testRepoName); + cacheHitAction.url = testRepoUrl; + + const cacheHitStart = Date.now(); + await pullRemote({ headers: { authorization } }, cacheHitAction); + testData.cacheHitDuration = Date.now() - cacheHitStart; + testData.cacheHitAction = cacheHitAction; + + console.log(`Cache HIT completed in ${testData.cacheHitDuration}ms`); + + // Get inode after second clone + const bareRepoStatsAfter = fs.statSync(testData.bareRepoPath); + testData.inodeAfter = bareRepoStatsAfter.ino; + }); + + after(() => { + // Clean up all .remote directories after all tests + if (fs.existsSync('./.remote')) { + fs.rmSync('./.remote', { recursive: true, force: true }); + } + }); + + describe('Cache MISS (first clone)', () => { + it('should create bare cache repository', () => { + // Verify bare cache was created + expect(fs.existsSync(testData.bareRepoPath)).to.be.true; + + // Verify it's a bare repository (has config, refs, objects) + expect(fs.existsSync(`${testData.bareRepoPath}/config`)).to.be.true; + expect(fs.existsSync(`${testData.bareRepoPath}/refs`)).to.be.true; + expect(fs.existsSync(`${testData.bareRepoPath}/objects`)).to.be.true; + }); + + it('should create working copy with actual files', () => { + const actionId = testData.cacheMissAction.id; + + // Verify working copy was created + expect(fs.existsSync(`./.remote/work/${actionId}`)).to.be.true; + + // Check the content inside working copy directory + const workCopyContents = fs.readdirSync(`./.remote/work/${actionId}`); + expect(workCopyContents.length).to.be.greaterThan(0); + + // Verify we have a git repository directory inside + const repoDir = workCopyContents.find((item) => item.includes('git-proxy')); + expect(repoDir).to.exist; + + // Verify it has .git folder (not bare) + expect(fs.existsSync(`./.remote/work/${actionId}/${repoDir}/.git`)).to.be.true; + + // Verify working copy has actual files + expect(fs.existsSync(`./.remote/work/${actionId}/${repoDir}/package.json`)).to.be.true; + }); + }); + + describe('Cache HIT (second clone)', () => { + it('should reuse existing bare cache (not recreate)', () => { + // Verify bare cache still exists + expect(fs.existsSync(testData.bareRepoPath)).to.be.true; + + // Same inode means same directory (not recreated) + expect(testData.inodeAfter).to.equal(testData.inodeBefore); + }); + + it('should create new isolated working copy', () => { + const cacheMissActionId = testData.cacheMissAction.id; + const cacheHitActionId = testData.cacheHitAction.id; + + // Verify new working copy was created + expect(fs.existsSync(`./.remote/work/${cacheHitActionId}`)).to.be.true; + + // Verify both working copies exist (isolated) + expect(fs.existsSync(`./.remote/work/${cacheMissActionId}`)).to.be.true; + expect(fs.existsSync(`./.remote/work/${cacheHitActionId}`)).to.be.true; + + // Verify they are different directories + expect(cacheMissActionId).to.not.equal(cacheHitActionId); + }); + + it('should be faster than cache MISS', () => { + console.log(` Cache MISS: ${testData.cacheMissDuration}ms`); + console.log(` Cache HIT: ${testData.cacheHitDuration}ms`); + console.log( + ` Performance improvement: ${Math.round((1 - testData.cacheHitDuration / testData.cacheMissDuration) * 100)}%`, + ); + + expect(testData.cacheHitDuration).to.be.lessThan(testData.cacheMissDuration); + }); + }); + + describe('Hybrid cache structure', () => { + it('should maintain separate bare cache and working directories', () => { + // Verify directory structure + expect(fs.existsSync('./.remote/cache')).to.be.true; + expect(fs.existsSync('./.remote/work')).to.be.true; + + // Verify bare cache contains .git repositories + const cacheContents = fs.readdirSync('./.remote/cache'); + expect(cacheContents.some((name) => name.endsWith('.git'))).to.be.true; + + // Verify work directory contains action-specific folders + const workContents = fs.readdirSync('./.remote/work'); + expect(workContents.length).to.be.at.least(2); // At least 2 working copies + }); + + it('should share one bare cache for multiple working copies', () => { + const cacheContents = fs.readdirSync('./.remote/cache'); + const gitProxyRepos = cacheContents.filter((name) => name.includes('git-proxy')); + + // Should be only one bare cache for git-proxy + expect(gitProxyRepos.length).to.equal(1); + }); + }); + + describe('Cache manager integration', () => { + it('should track cache statistics', () => { + const stats = cacheManager.getCacheStats(); + + expect(stats.totalRepositories).to.be.at.least(1); + expect(stats.repositories).to.be.an('array'); + expect(stats.repositories.length).to.be.at.least(1); + + const gitProxyRepo = stats.repositories.find((r) => r.name === 'git-proxy.git'); + expect(gitProxyRepo).to.exist; + expect(gitProxyRepo.sizeMB).to.be.greaterThan(0); + expect(gitProxyRepo.lastAccessed).to.be.instanceOf(Date); + }); + }); + + describe('Cache cleanup', () => { + it('should clear entire .remote directory in test mode', async () => { + expect(fs.existsSync('./.remote')).to.be.true; + + await clearBareClone(null, testData.cacheMissAction); + + expect(fs.existsSync('./.remote')).to.be.false; + }); + }); +}); From d5e1b5b0afd25a6a6c3b7349c11ea9fce8d0f021 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 22 Oct 2025 16:06:35 +0200 Subject: [PATCH 09/26] chore: remove unused isomorphic-git dependency --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index 6b4e9dbf9..9f356ff51 100644 --- a/package.json +++ b/package.json @@ -61,7 +61,6 @@ "express-rate-limit": "^8.1.0", "express-session": "^1.18.2", "history": "5.3.0", - "isomorphic-git": "^1.34.0", "jsonwebtoken": "^9.0.2", "jwk-to-pem": "^2.0.7", "load-plugin": "^6.0.3", From 235e1527323a4c5ecf82b3d93865415dbea0fb75 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 11:13:20 +0100 Subject: [PATCH 10/26] chore: remove redundant cache config fallback --- src/config/index.ts | 8 +------- test/testConfig.test.js | 13 +++++++++++++ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/config/index.ts b/src/config/index.ts index 84ba8fafd..5534ac8ba 100644 --- a/src/config/index.ts +++ b/src/config/index.ts @@ -291,13 +291,7 @@ export const getRateLimit = () => { export const getCacheConfig = () => { const config = loadFullConfiguration(); - return ( - config.cache || { - maxSizeGB: 2, - maxRepositories: 50, - cacheDir: './.remote/cache', - } - ); + return config.cache; }; // Function to handle configuration updates diff --git a/test/testConfig.test.js b/test/testConfig.test.js index c099dffea..76440158f 100644 --- a/test/testConfig.test.js +++ b/test/testConfig.test.js @@ -140,6 +140,19 @@ describe('user configuration', function () { expect(config.getRateLimit().limit).to.be.eql(limitConfig.rateLimit.limit); }); + it('should merge partial cache config with defaults', function () { + const user = { cache: { maxSizeGB: 5 } }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + const cacheConfig = config.getCacheConfig(); + expect(cacheConfig.maxSizeGB).to.be.eql(5); + expect(cacheConfig.maxRepositories).to.be.eql(defaultSettings.cache.maxRepositories); + expect(cacheConfig.cacheDir).to.be.eql(defaultSettings.cache.cacheDir); + }); + it('should override default settings for attestation config', function () { const user = { attestationConfig: { From c762b5e9ca169294428cf99fbdd88b6ba016a43c Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 11:34:43 +0100 Subject: [PATCH 11/26] refactor: use bytes internally in CacheManager for consistency --- .../processors/push-action/cache-manager.ts | 40 +++++++++---------- test/processors/cacheManager.test.js | 14 +++---- .../hybridCache.integration.test.js | 2 +- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 8cab9a4c7..3b9db637c 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -4,10 +4,10 @@ import { getCacheConfig } from '../../../config'; export interface CacheStats { totalRepositories: number; - totalSizeMB: number; + totalSizeBytes: number; repositories: Array<{ name: string; - sizeMB: number; + sizeBytes: number; lastAccessed: Date; }>; } @@ -45,29 +45,29 @@ export class CacheManager { if (!fs.existsSync(this.cacheDir)) { return { totalRepositories: 0, - totalSizeMB: 0, + totalSizeBytes: 0, repositories: [], }; } - const repositories: Array<{ name: string; sizeMB: number; lastAccessed: Date }> = []; - let totalSizeMB = 0; + const repositories: Array<{ name: string; sizeBytes: number; lastAccessed: Date }> = []; + let totalSizeBytes = 0; const entries = fs.readdirSync(this.cacheDir, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory()) { const repoPath = path.join(this.cacheDir, entry.name); - const sizeMB = this.getDirectorySize(repoPath); + const sizeBytes = this.getDirectorySize(repoPath); const stats = fs.statSync(repoPath); repositories.push({ name: entry.name, - sizeMB, + sizeBytes, lastAccessed: stats.atime, }); - totalSizeMB += sizeMB; + totalSizeBytes += sizeBytes; } } @@ -76,7 +76,7 @@ export class CacheManager { return { totalRepositories: repositories.length, - totalSizeMB, + totalSizeBytes, repositories, }; } @@ -84,36 +84,36 @@ export class CacheManager { /** * Enforce cache limits using LRU eviction */ - enforceLimits(): { removedRepos: string[]; freedMB: number } { + enforceLimits(): { removedRepos: string[]; freedBytes: number } { const stats = this.getCacheStats(); const removedRepos: string[] = []; - let freedMB = 0; + let freedBytes = 0; // Sort repositories by last accessed (oldest first for removal) const reposToEvaluate = [...stats.repositories].sort( (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), ); - // Check size limit - let currentSizeMB = stats.totalSizeMB; - const maxSizeMB = this.maxSizeGB * 1024; + // Check size limit - convert GB to bytes once + let currentSizeBytes = stats.totalSizeBytes; + const maxSizeBytes = this.maxSizeGB * 1024 * 1024 * 1024; for (const repo of reposToEvaluate) { const shouldRemove = - currentSizeMB > maxSizeMB || // Over size limit + currentSizeBytes > maxSizeBytes || // Over size limit stats.totalRepositories - removedRepos.length > this.maxRepositories; // Over count limit if (shouldRemove) { this.removeRepository(repo.name); removedRepos.push(repo.name); - freedMB += repo.sizeMB; - currentSizeMB -= repo.sizeMB; + freedBytes += repo.sizeBytes; + currentSizeBytes -= repo.sizeBytes; } else { break; // We've cleaned enough } } - return { removedRepos, freedMB }; + return { removedRepos, freedBytes }; } /** @@ -127,7 +127,7 @@ export class CacheManager { } /** - * Calculate directory size in MB + * Calculate directory size in bytes */ private getDirectorySize(dirPath: string): number { let totalBytes = 0; @@ -157,7 +157,7 @@ export class CacheManager { return 0; } - return Math.round(totalBytes / (1024 * 1024)); // Convert to MB + return totalBytes; } /** diff --git a/test/processors/cacheManager.test.js b/test/processors/cacheManager.test.js index e19695163..f106cda29 100644 --- a/test/processors/cacheManager.test.js +++ b/test/processors/cacheManager.test.js @@ -27,7 +27,7 @@ describe('CacheManager', () => { it('should return empty stats for empty cache', () => { const stats = cacheManager.getCacheStats(); expect(stats.totalRepositories).to.equal(0); - expect(stats.totalSizeMB).to.equal(0); + expect(stats.totalSizeBytes).to.equal(0); expect(stats.repositories).to.be.an('array').that.is.empty; }); @@ -43,10 +43,10 @@ describe('CacheManager', () => { const stats = cacheManager.getCacheStats(); expect(stats.totalRepositories).to.equal(2); - expect(stats.totalSizeMB).to.be.at.least(2); // At least 2MB total + expect(stats.totalSizeBytes).to.be.at.least(2 * 1024 * 1024); // At least 2MB total in bytes expect(stats.repositories).to.have.lengthOf(2); expect(stats.repositories[0]).to.have.property('name'); - expect(stats.repositories[0]).to.have.property('sizeMB'); + expect(stats.repositories[0]).to.have.property('sizeBytes'); expect(stats.repositories[0]).to.have.property('lastAccessed'); }); @@ -111,7 +111,7 @@ describe('CacheManager', () => { const result = cacheManager.enforceLimits(); expect(result.removedRepos).to.have.lengthOf.at.least(1); - expect(result.freedMB).to.be.at.least(0); + expect(result.freedBytes).to.be.at.least(0); const statsAfter = cacheManager.getCacheStats(); expect(statsAfter.totalRepositories).to.be.at.most(3); @@ -124,12 +124,12 @@ describe('CacheManager', () => { fs.writeFileSync(path.join(repo1, 'largefile.txt'), 'a'.repeat(2 * 1024 * 1024)); // 2MB const statsBefore = cacheManager.getCacheStats(); - expect(statsBefore.totalSizeMB).to.be.greaterThan(1); + expect(statsBefore.totalSizeBytes).to.be.greaterThan(1024 * 1024); // Greater than 1MB in bytes const result = cacheManager.enforceLimits(); expect(result.removedRepos).to.have.lengthOf(1); - expect(result.freedMB).to.be.greaterThan(1); + expect(result.freedBytes).to.be.greaterThan(1024 * 1024); // Greater than 1MB in bytes const statsAfter = cacheManager.getCacheStats(); expect(statsAfter.totalRepositories).to.equal(0); @@ -146,7 +146,7 @@ describe('CacheManager', () => { const result = cacheManager.enforceLimits(); expect(result.removedRepos).to.be.empty; - expect(result.freedMB).to.equal(0); + expect(result.freedBytes).to.equal(0); }); }); diff --git a/test/processors/hybridCache.integration.test.js b/test/processors/hybridCache.integration.test.js index 81f78f08a..547619b4d 100644 --- a/test/processors/hybridCache.integration.test.js +++ b/test/processors/hybridCache.integration.test.js @@ -182,7 +182,7 @@ describe('Hybrid Cache Integration Tests', () => { const gitProxyRepo = stats.repositories.find((r) => r.name === 'git-proxy.git'); expect(gitProxyRepo).to.exist; - expect(gitProxyRepo.sizeMB).to.be.greaterThan(0); + expect(gitProxyRepo.sizeBytes).to.be.greaterThan(0); expect(gitProxyRepo.lastAccessed).to.be.instanceOf(Date); }); }); From ab28d78d978166752cb619f0b61dd8069e753003 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 12:02:49 +0100 Subject: [PATCH 12/26] chore: use toSorted() --- src/proxy/processors/push-action/cache-manager.ts | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 3b9db637c..2409f14d5 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -72,12 +72,14 @@ export class CacheManager { } // Sort by last accessed (newest first) - repositories.sort((a, b) => b.lastAccessed.getTime() - a.lastAccessed.getTime()); + const sortedRepositories = repositories.toSorted( + (a, b) => b.lastAccessed.getTime() - a.lastAccessed.getTime(), + ); return { - totalRepositories: repositories.length, + totalRepositories: sortedRepositories.length, totalSizeBytes, - repositories, + repositories: sortedRepositories, }; } @@ -90,7 +92,7 @@ export class CacheManager { let freedBytes = 0; // Sort repositories by last accessed (oldest first for removal) - const reposToEvaluate = [...stats.repositories].sort( + const reposToEvaluate = stats.repositories.toSorted( (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), ); From 88bbce81a5970a8193ced20608eba476250591fc Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 14:38:17 +0100 Subject: [PATCH 13/26] refactor: remove test-only cleanup, rely on CacheManager limits --- .../processors/push-action/clearBareClone.ts | 38 ++++++------------- .../processors/push-action/pullRemote.ts | 5 +-- test/processors/clearBareClone.test.js | 7 ++-- .../hybridCache.integration.test.js | 6 ++- 4 files changed, 22 insertions(+), 34 deletions(-) diff --git a/src/proxy/processors/push-action/clearBareClone.ts b/src/proxy/processors/push-action/clearBareClone.ts index 143dd3d39..c1a38c0cd 100644 --- a/src/proxy/processors/push-action/clearBareClone.ts +++ b/src/proxy/processors/push-action/clearBareClone.ts @@ -3,40 +3,26 @@ import fs from 'node:fs'; const WORK_DIR = './.remote/work'; -const exec = async (req: any, action: Action): Promise => { +const exec = async (_req: any, action: Action): Promise => { const step = new Step('clearBareClone'); - // In test environment, clean up EVERYTHING to prevent memory leaks - if (process.env.NODE_ENV === 'test') { - // TEST: Full cleanup (bare cache + all working copies) + // Delete ONLY this push's working copy + const workCopy = `${WORK_DIR}/${action.id}`; + + if (fs.existsSync(workCopy)) { try { - if (fs.existsSync('./.remote')) { - fs.rmSync('./.remote', { recursive: true, force: true }); - step.log('Test environment: Full .remote directory cleaned'); - } else { - step.log('Test environment: .remote directory already clean'); - } + fs.rmSync(workCopy, { recursive: true, force: true }); + step.log(`Cleaned working copy for push ${action.id}`); } catch (err) { - step.log(`Warning: Could not clean .remote directory: ${err}`); + step.log(`Warning: Could not clean working copy ${workCopy}: ${err}`); } } else { - // PRODUCTION: Delete ONLY this push's working copy - const workCopy = `${WORK_DIR}/${action.id}`; - - if (fs.existsSync(workCopy)) { - try { - fs.rmSync(workCopy, { recursive: true, force: true }); - step.log(`Cleaned working copy for push ${action.id}`); - } catch (err) { - step.log(`Warning: Could not clean working copy ${workCopy}: ${err}`); - } - } else { - step.log(`Working copy ${workCopy} not found (may have been already cleaned)`); - } - - step.log('Bare cache preserved for reuse'); + step.log(`Working copy ${workCopy} not found (may have been already cleaned)`); } + // Note: Cache limit enforcement is handled by pullRemote after cloning + step.log('Working copy cleanup complete'); + action.addStep(step); return action; }; diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index ed9fa42e3..cdea06e4e 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -128,9 +128,8 @@ const exec = async (req: any, action: Action): Promise => { // Enforce cache limits (LRU eviction on bare cache) const evictionResult = cacheManager.enforceLimits(); if (evictionResult.removedRepos.length > 0) { - step.log( - `LRU evicted ${evictionResult.removedRepos.length} bare repos, freed ${evictionResult.freedMB}MB`, - ); + const freedMB = (evictionResult.freedBytes / (1024 * 1024)).toFixed(2); + step.log(`LRU evicted ${evictionResult.removedRepos.length} bare repos, freed ${freedMB}MB`); } } catch (e: any) { step.setError(e.toString('utf-8')); diff --git a/test/processors/clearBareClone.test.js b/test/processors/clearBareClone.test.js index 611a895f2..4aaa01bc1 100644 --- a/test/processors/clearBareClone.test.js +++ b/test/processors/clearBareClone.test.js @@ -31,11 +31,12 @@ describe('clear bare and local clones', async () => { expect(fs.existsSync(`./.remote/cache/git-proxy.git`)).to.be.true; }).timeout(20000); - it('clear bare clone function purges .remote folder in test environment', async () => { + it('clear bare clone function removes working copy and enforces cache limits', async () => { const action = new Action(actionId, 'type', 'get', timestamp, 'finos/git-proxy.git'); await clearBareClone(null, action); - // In test environment, clearBareClone removes the entire .remote directory - expect(fs.existsSync(`./.remote`)).to.be.false; + // clearBareClone removes only the working copy for this push + expect(fs.existsSync(`./.remote/work/${actionId}`)).to.be.false; + expect(action.steps.some((s) => s.stepName === 'clearBareClone')).to.be.true; }); afterEach(() => { diff --git a/test/processors/hybridCache.integration.test.js b/test/processors/hybridCache.integration.test.js index 547619b4d..cc6bfc412 100644 --- a/test/processors/hybridCache.integration.test.js +++ b/test/processors/hybridCache.integration.test.js @@ -188,12 +188,14 @@ describe('Hybrid Cache Integration Tests', () => { }); describe('Cache cleanup', () => { - it('should clear entire .remote directory in test mode', async () => { + it('should remove working copy and enforce cache limits', async () => { expect(fs.existsSync('./.remote')).to.be.true; + const actionId = testData.cacheMissAction.id; await clearBareClone(null, testData.cacheMissAction); - expect(fs.existsSync('./.remote')).to.be.false; + expect(fs.existsSync(`./.remote/work/${actionId}`)).to.be.false; + expect(fs.existsSync('./.remote/cache')).to.be.true; }); }); }); From 1737bfd6f7612b8b99fe74bcd6589fb4a463e6c2 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 14:55:58 +0100 Subject: [PATCH 14/26] refactor: use spawnSync instead of execSync --- .../processors/push-action/git-operations.ts | 93 +++++++++++-------- 1 file changed, 55 insertions(+), 38 deletions(-) diff --git a/src/proxy/processors/push-action/git-operations.ts b/src/proxy/processors/push-action/git-operations.ts index 92a7a5b3b..59335e7e8 100644 --- a/src/proxy/processors/push-action/git-operations.ts +++ b/src/proxy/processors/push-action/git-operations.ts @@ -1,10 +1,23 @@ -import { execSync } from 'child_process'; +import { spawnSync } from 'child_process'; import fs from 'fs'; /** * Git operations using native git commands */ +/** + * Build URL with credentials if provided + */ +function buildAuthUrl(url: string, username?: string, password?: string): string { + if (username && password) { + return url.replace( + /^(https?:\/\/)/, + `$1${encodeURIComponent(username)}:${encodeURIComponent(password)}@`, + ); + } + return url; +} + interface CloneOptions { dir: string; url: string; @@ -31,16 +44,9 @@ interface FetchOptions { export async function clone(options: CloneOptions): Promise { const { dir, url, username, password, bare = false, depth, singleBranch = false } = options; - // Build URL with credentials if provided - let authUrl = url; - if (username && password) { - authUrl = url.replace( - /^(https?:\/\/)/, - `$1${encodeURIComponent(username)}:${encodeURIComponent(password)}@`, - ); - } + const authUrl = buildAuthUrl(url, username, password); - const args: string[] = ['git', 'clone']; + const args: string[] = ['clone']; if (bare) { args.push('--bare'); @@ -57,9 +63,12 @@ export async function clone(options: CloneOptions): Promise { args.push('--no-single-branch'); } - args.push(`"${authUrl}"`, `"${dir}"`); + args.push(authUrl, dir); - execSync(args.join(' '), { stdio: 'pipe' }); + const result = spawnSync('git', args, { stdio: 'pipe' }); + if (result.status !== 0) { + throw new Error(`Git clone failed: ${result.stderr?.toString() || 'Unknown error'}`); + } // Sanitize credentials from git config if (username && password) { @@ -73,16 +82,9 @@ export async function clone(options: CloneOptions): Promise { export async function fetch(options: FetchOptions): Promise { const { dir, url, username, password, depth, prune = false, bare = false } = options; - // Build URL with credentials if provided - let authUrl = url; - if (username && password) { - authUrl = url.replace( - /^(https?:\/\/)/, - `$1${encodeURIComponent(username)}:${encodeURIComponent(password)}@`, - ); - } + const authUrl = buildAuthUrl(url, username, password); - const args: string[] = ['git', '-C', `"${dir}"`, 'fetch']; + const args: string[] = ['-C', dir, 'fetch']; if (depth) { args.push('--depth', depth.toString()); @@ -92,10 +94,13 @@ export async function fetch(options: FetchOptions): Promise { args.push('--prune'); } - args.push(`"${authUrl}"`); - args.push('"+refs/heads/*:refs/heads/*"'); // Fetch all branches + args.push(authUrl); + args.push('+refs/heads/*:refs/heads/*'); // Fetch all branches - execSync(args.join(' '), { stdio: 'pipe' }); + const result = spawnSync('git', args, { stdio: 'pipe' }); + if (result.status !== 0) { + throw new Error(`Git fetch failed: ${result.stderr?.toString() || 'Unknown error'}`); + } // Sanitize credentials from git config if (username && password) { @@ -111,23 +116,32 @@ function sanitizeCredentials(dir: string, cleanUrl: string, isBare: boolean): vo // For bare repositories, git clone --bare doesn't set up a remote by default // We need to add it first if it doesn't exist if (isBare) { - try { - execSync(`git -C "${dir}" remote add origin "${cleanUrl}"`, { stdio: 'pipe' }); - } catch (e) { + let result = spawnSync('git', ['-C', dir, 'remote', 'add', 'origin', cleanUrl], { + stdio: 'pipe', + }); + if (result.status !== 0) { // If remote already exists, update it - execSync(`git -C "${dir}" remote set-url origin "${cleanUrl}"`, { stdio: 'pipe' }); + result = spawnSync('git', ['-C', dir, 'remote', 'set-url', 'origin', cleanUrl], { + stdio: 'pipe', + }); + if (result.status !== 0) { + throw new Error(`Failed to set remote: ${result.stderr?.toString()}`); + } } } else { // For non-bare repositories, remote origin should exist - try { - // Unset the URL with credentials - execSync(`git -C "${dir}" config --unset remote.origin.url`, { stdio: 'pipe' }); - } catch (e) { - // Ignore error if already unset - } + // Unset the URL with credentials (ignore error if already unset) + spawnSync('git', ['-C', dir, 'config', '--unset', 'remote.origin.url'], { + stdio: 'pipe', + }); // Set clean URL without credentials - execSync(`git -C "${dir}" remote set-url origin "${cleanUrl}"`, { stdio: 'pipe' }); + const result = spawnSync('git', ['-C', dir, 'remote', 'set-url', 'origin', cleanUrl], { + stdio: 'pipe', + }); + if (result.status !== 0) { + throw new Error(`Failed to set remote: ${result.stderr?.toString()}`); + } } } catch (e) { console.warn(`Warning: Failed to sanitize credentials for ${dir}:`, e); @@ -144,13 +158,16 @@ export async function cloneLocal(options: { }): Promise { const { sourceDir, targetDir, depth } = options; - const args: string[] = ['git', 'clone']; + const args: string[] = ['clone']; if (depth) { args.push('--depth', depth.toString()); } - args.push(`"${sourceDir}"`, `"${targetDir}"`); + args.push(sourceDir, targetDir); - execSync(args.join(' '), { stdio: 'pipe' }); + const result = spawnSync('git', args, { stdio: 'pipe' }); + if (result.status !== 0) { + throw new Error(`Git local clone failed: ${result.stderr?.toString() || 'Unknown error'}`); + } } From d35d10945cef8f613eff32b9a141e5f71f962490 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 15:00:43 +0100 Subject: [PATCH 15/26] test: increase timeout for git clone tests in ConfigLoader --- test/ConfigLoader.test.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/ConfigLoader.test.js b/test/ConfigLoader.test.js index 76c659855..7eb06e74d 100644 --- a/test/ConfigLoader.test.js +++ b/test/ConfigLoader.test.js @@ -480,6 +480,8 @@ describe('ConfigLoader', () => { }); it('should throw error if config path was not found', async function () { + this.timeout(10000); + const source = { type: 'git', repository: 'https://github.com/finos/git-proxy.git', @@ -497,6 +499,8 @@ describe('ConfigLoader', () => { }); it('should throw error if config file is not valid JSON', async function () { + this.timeout(10000); + const source = { type: 'git', repository: 'https://github.com/finos/git-proxy.git', From 0a547736258b26a70d7eefa63bc18535d5901acd Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 15:03:28 +0100 Subject: [PATCH 16/26] perf: use performance.now() instead of Date.now() --- src/proxy/processors/push-action/metrics.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/proxy/processors/push-action/metrics.ts b/src/proxy/processors/push-action/metrics.ts index 081d84ebc..d07c4f853 100644 --- a/src/proxy/processors/push-action/metrics.ts +++ b/src/proxy/processors/push-action/metrics.ts @@ -1,4 +1,5 @@ import { Step } from '../../actions'; +import { performance } from 'perf_hooks'; /** * Performance Timer @@ -16,21 +17,21 @@ export class PerformanceTimer { start(operation: string): void { this.operation = operation; - this.startTime = Date.now(); + this.startTime = performance.now(); this.step.log(`${operation} started`); } mark(message: string): void { if (this.startTime > 0) { - const elapsed = Date.now() - this.startTime; - this.step.log(`${message}: ${elapsed}ms`); + const elapsed = performance.now() - this.startTime; + this.step.log(`${message}: ${elapsed.toFixed(2)}ms`); } } end(): void { if (this.startTime > 0) { - const totalTime = Date.now() - this.startTime; - this.step.log(`${this.operation} completed: ${totalTime}ms`); + const totalTime = performance.now() - this.startTime; + this.step.log(`${this.operation} completed: ${totalTime.toFixed(2)}ms`); this.startTime = 0; } } From 133e5e653e926b9b1aec40c00402993222081820 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Mon, 27 Oct 2025 15:07:35 +0100 Subject: [PATCH 17/26] refactor: use cache paths from configuration --- .../processors/push-action/clearBareClone.ts | 10 +++++++--- src/proxy/processors/push-action/pullRemote.ts | 15 +++++++++------ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/proxy/processors/push-action/clearBareClone.ts b/src/proxy/processors/push-action/clearBareClone.ts index c1a38c0cd..973dcb2d8 100644 --- a/src/proxy/processors/push-action/clearBareClone.ts +++ b/src/proxy/processors/push-action/clearBareClone.ts @@ -1,13 +1,17 @@ import { Action, Step } from '../../actions'; import fs from 'node:fs'; - -const WORK_DIR = './.remote/work'; +import path from 'node:path'; +import { cacheManager } from './cache-manager'; const exec = async (_req: any, action: Action): Promise => { const step = new Step('clearBareClone'); + // Get work directory from configuration + const config = cacheManager.getConfig(); + const WORK_DIR = path.join(path.dirname(config.cacheDir), 'work'); + // Delete ONLY this push's working copy - const workCopy = `${WORK_DIR}/${action.id}`; + const workCopy = path.join(WORK_DIR, action.id); if (fs.existsSync(workCopy)) { try { diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index cdea06e4e..65f5ccf04 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -1,20 +1,23 @@ import { Action, Step } from '../../actions'; import fs from 'fs'; +import path from 'path'; import { PerformanceTimer } from './metrics'; import { cacheManager } from './cache-manager'; import * as gitOps from './git-operations'; -const BARE_CACHE = './.remote/cache'; -const WORK_DIR = './.remote/work'; - const exec = async (req: any, action: Action): Promise => { const step = new Step('pullRemote'); const timer = new PerformanceTimer(step); try { + // Get cache directories from configuration + const config = cacheManager.getConfig(); + const BARE_CACHE = config.cacheDir; + const WORK_DIR = path.join(path.dirname(BARE_CACHE), 'work'); + // Paths for hybrid architecture - const bareRepo = `${BARE_CACHE}/${action.repoName}`; - const workCopy = `${WORK_DIR}/${action.id}`; + const bareRepo = path.join(BARE_CACHE, action.repoName); + const workCopy = path.join(WORK_DIR, action.id); // Check if bare cache exists const bareExists = fs.existsSync(bareRepo); @@ -101,7 +104,7 @@ const exec = async (req: any, action: Action): Promise => { // PHASE 2: Working Copy (temporary, isolated) step.log(`Creating isolated working copy for push ${action.id}...`); - const workCopyPath = `${workCopy}/${action.repoName}`; + const workCopyPath = path.join(workCopy, action.repoName); // Clone from local bare cache (fast local operation) await gitOps.cloneLocal({ From f03d6860e63e2c37fcb67a29efc746424dd8c29e Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 29 Oct 2025 10:11:59 +0100 Subject: [PATCH 18/26] feat: add mutex to prevent race conditions in cache operations --- .../processors/push-action/cache-manager.ts | 98 ++++++++++++------- .../processors/push-action/pullRemote.ts | 6 +- test/processors/cacheManager.test.js | 19 ++-- 3 files changed, 74 insertions(+), 49 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 2409f14d5..4d3dfb027 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -16,6 +16,7 @@ export class CacheManager { private cacheDir: string; private maxSizeGB: number; private maxRepositories: number; + private mutex: Promise = Promise.resolve(); constructor( cacheDir: string = './.remote/cache', @@ -28,16 +29,37 @@ export class CacheManager { } /** - * Update access time for repository (for LRU purposes) + * Acquire mutex lock for cache operations */ - touchRepository(repoName: string): void { - const repoPath = path.join(this.cacheDir, repoName); - if (fs.existsSync(repoPath)) { - const now = new Date(); - fs.utimesSync(repoPath, now, now); + private async acquireLock(operation: () => T | Promise): Promise { + const previousLock = this.mutex; + let releaseLock: () => void; + + this.mutex = new Promise((resolve) => { + releaseLock = resolve; + }); + + try { + await previousLock; + return await operation(); + } finally { + releaseLock!(); } } + /** + * Update access time for repository (for LRU purposes) + */ + async touchRepository(repoName: string): Promise { + return this.acquireLock(() => { + const repoPath = path.join(this.cacheDir, repoName); + if (fs.existsSync(repoPath)) { + const now = new Date(); + fs.utimesSync(repoPath, now, now); + } + }); + } + /** * Get cache statistics */ @@ -86,36 +108,38 @@ export class CacheManager { /** * Enforce cache limits using LRU eviction */ - enforceLimits(): { removedRepos: string[]; freedBytes: number } { - const stats = this.getCacheStats(); - const removedRepos: string[] = []; - let freedBytes = 0; - - // Sort repositories by last accessed (oldest first for removal) - const reposToEvaluate = stats.repositories.toSorted( - (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), - ); - - // Check size limit - convert GB to bytes once - let currentSizeBytes = stats.totalSizeBytes; - const maxSizeBytes = this.maxSizeGB * 1024 * 1024 * 1024; - - for (const repo of reposToEvaluate) { - const shouldRemove = - currentSizeBytes > maxSizeBytes || // Over size limit - stats.totalRepositories - removedRepos.length > this.maxRepositories; // Over count limit - - if (shouldRemove) { - this.removeRepository(repo.name); - removedRepos.push(repo.name); - freedBytes += repo.sizeBytes; - currentSizeBytes -= repo.sizeBytes; - } else { - break; // We've cleaned enough + async enforceLimits(): Promise<{ removedRepos: string[]; freedBytes: number }> { + return this.acquireLock(() => { + const stats = this.getCacheStats(); + const removedRepos: string[] = []; + let freedBytes = 0; + + // Sort repositories by last accessed (oldest first for removal) + const reposToEvaluate = stats.repositories.toSorted( + (a, b) => a.lastAccessed.getTime() - b.lastAccessed.getTime(), + ); + + // Check size limit - convert GB to bytes once + let currentSizeBytes = stats.totalSizeBytes; + const maxSizeBytes = this.maxSizeGB * 1024 * 1024 * 1024; + + for (const repo of reposToEvaluate) { + const shouldRemove = + currentSizeBytes > maxSizeBytes || // Over size limit + stats.totalRepositories - removedRepos.length > this.maxRepositories; // Over count limit + + if (shouldRemove) { + this.removeRepository(repo.name); + removedRepos.push(repo.name); + freedBytes += repo.sizeBytes; + currentSizeBytes -= repo.sizeBytes; + } else { + break; // We've cleaned enough + } } - } - return { removedRepos, freedBytes }; + return { removedRepos, freedBytes }; + }); } /** @@ -177,7 +201,7 @@ export class CacheManager { // Global instance initialized with config const config = getCacheConfig(); export const cacheManager = new CacheManager( - config.cacheDir, - config.maxSizeGB, - config.maxRepositories, + config?.cacheDir, + config?.maxSizeGB, + config?.maxRepositories, ); diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index 65f5ccf04..3903a495d 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -59,7 +59,7 @@ const exec = async (req: any, action: Action): Promise => { }); // Update access time for LRU - cacheManager.touchRepository(action.repoName); + await cacheManager.touchRepository(action.repoName); timer.mark('Fetch complete'); step.log(`Bare repository updated`); } catch (fetchError) { @@ -98,7 +98,7 @@ const exec = async (req: any, action: Action): Promise => { step.log(`Bare repository created at ${bareRepo}`); // Update access time for LRU after successful clone - cacheManager.touchRepository(action.repoName); + await cacheManager.touchRepository(action.repoName); } // PHASE 2: Working Copy (temporary, isolated) @@ -129,7 +129,7 @@ const exec = async (req: any, action: Action): Promise => { timer.end(); // Enforce cache limits (LRU eviction on bare cache) - const evictionResult = cacheManager.enforceLimits(); + const evictionResult = await cacheManager.enforceLimits(); if (evictionResult.removedRepos.length > 0) { const freedMB = (evictionResult.freedBytes / (1024 * 1024)).toFixed(2); step.log(`LRU evicted ${evictionResult.removedRepos.length} bare repos, freed ${freedMB}MB`); diff --git a/test/processors/cacheManager.test.js b/test/processors/cacheManager.test.js index f106cda29..b2be1bfc0 100644 --- a/test/processors/cacheManager.test.js +++ b/test/processors/cacheManager.test.js @@ -83,7 +83,7 @@ describe('CacheManager', () => { await new Promise((resolve) => setTimeout(resolve, 100)); - cacheManager.touchRepository(repoName); + await cacheManager.touchRepository(repoName); const statsAfter = cacheManager.getCacheStats(); const timeAfter = statsAfter.repositories[0].lastAccessed.getTime(); @@ -91,13 +91,14 @@ describe('CacheManager', () => { expect(timeAfter).to.be.greaterThan(timeBefore); }); - it('should not throw error for non-existent repository', () => { - expect(() => cacheManager.touchRepository('non-existent.git')).to.not.throw(); + it('should not throw error for non-existent repository', async () => { + // Should not throw + await cacheManager.touchRepository('non-existent.git'); }); }); describe('enforceLimits', () => { - it('should remove oldest repositories when exceeding count limit', () => { + it('should remove oldest repositories when exceeding count limit', async () => { // Create 4 repos (exceeds limit of 3) for (let i = 1; i <= 4; i++) { const repoPath = path.join(testCacheDir, `repo${i}.git`); @@ -108,7 +109,7 @@ describe('CacheManager', () => { const statsBefore = cacheManager.getCacheStats(); expect(statsBefore.totalRepositories).to.equal(4); - const result = cacheManager.enforceLimits(); + const result = await cacheManager.enforceLimits(); expect(result.removedRepos).to.have.lengthOf.at.least(1); expect(result.freedBytes).to.be.at.least(0); @@ -117,7 +118,7 @@ describe('CacheManager', () => { expect(statsAfter.totalRepositories).to.be.at.most(3); }); - it('should remove repositories when exceeding size limit', () => { + it('should remove repositories when exceeding size limit', async () => { // Create repo that exceeds size limit (1MB) const repo1 = path.join(testCacheDir, 'repo1.git'); fs.mkdirSync(repo1); @@ -126,7 +127,7 @@ describe('CacheManager', () => { const statsBefore = cacheManager.getCacheStats(); expect(statsBefore.totalSizeBytes).to.be.greaterThan(1024 * 1024); // Greater than 1MB in bytes - const result = cacheManager.enforceLimits(); + const result = await cacheManager.enforceLimits(); expect(result.removedRepos).to.have.lengthOf(1); expect(result.freedBytes).to.be.greaterThan(1024 * 1024); // Greater than 1MB in bytes @@ -135,7 +136,7 @@ describe('CacheManager', () => { expect(statsAfter.totalRepositories).to.equal(0); }); - it('should not remove anything if limits not exceeded', () => { + it('should not remove anything if limits not exceeded', async () => { // Create 2 repos (under limit of 3) for (let i = 1; i <= 2; i++) { const repoPath = path.join(testCacheDir, `repo${i}.git`); @@ -143,7 +144,7 @@ describe('CacheManager', () => { fs.writeFileSync(path.join(repoPath, 'file.txt'), 'test'); } - const result = cacheManager.enforceLimits(); + const result = await cacheManager.enforceLimits(); expect(result.removedRepos).to.be.empty; expect(result.freedBytes).to.equal(0); From bc0be9fdb8a4438df252475cec901dc4f952ee84 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 29 Oct 2025 10:26:10 +0100 Subject: [PATCH 19/26] perf: remove unnecessary sort from getCacheStats --- src/proxy/processors/push-action/cache-manager.ts | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 4d3dfb027..62325c389 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -93,15 +93,10 @@ export class CacheManager { } } - // Sort by last accessed (newest first) - const sortedRepositories = repositories.toSorted( - (a, b) => b.lastAccessed.getTime() - a.lastAccessed.getTime(), - ); - return { - totalRepositories: sortedRepositories.length, + totalRepositories: repositories.length, totalSizeBytes, - repositories: sortedRepositories, + repositories, }; } From d07ed9c90265c999cc3aad4c9b2a40db2e81efde Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 29 Oct 2025 10:28:00 +0100 Subject: [PATCH 20/26] fix: add logging for silent errors in getDirectorySize --- src/proxy/processors/push-action/cache-manager.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 62325c389..84d7240a0 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -166,7 +166,7 @@ export class CacheManager { const stats = fs.statSync(itemPath); totalBytes += stats.size; } catch (error) { - // Skip files that can't be read + console.warn(`[CacheManager] Failed to stat file ${itemPath}:`, error); } } } @@ -175,6 +175,7 @@ export class CacheManager { try { calculateSize(dirPath); } catch (error) { + console.warn(`[CacheManager] Failed to calculate size for ${dirPath}:`, error); return 0; } From 2acaee7a71b01cda731d19e563c49de360c6a81c Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 29 Oct 2025 10:52:24 +0100 Subject: [PATCH 21/26] refactor: rename cacheDir to repoCacheDir to disambiguate from ConfigLoader.cacheDir --- .../processors/push-action/cache-manager.ts | 18 +++++++++--------- src/proxy/processors/push-action/pullRemote.ts | 2 +- test/processors/cacheManager.test.js | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 84d7240a0..fc711f2ea 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -13,17 +13,17 @@ export interface CacheStats { } export class CacheManager { - private cacheDir: string; + private repoCacheDir: string; private maxSizeGB: number; private maxRepositories: number; private mutex: Promise = Promise.resolve(); constructor( - cacheDir: string = './.remote/cache', + repoCacheDir: string = './.remote/cache', maxSizeGB: number = 2, maxRepositories: number = 50, ) { - this.cacheDir = cacheDir; + this.repoCacheDir = repoCacheDir; this.maxSizeGB = maxSizeGB; this.maxRepositories = maxRepositories; } @@ -52,7 +52,7 @@ export class CacheManager { */ async touchRepository(repoName: string): Promise { return this.acquireLock(() => { - const repoPath = path.join(this.cacheDir, repoName); + const repoPath = path.join(this.repoCacheDir, repoName); if (fs.existsSync(repoPath)) { const now = new Date(); fs.utimesSync(repoPath, now, now); @@ -64,7 +64,7 @@ export class CacheManager { * Get cache statistics */ getCacheStats(): CacheStats { - if (!fs.existsSync(this.cacheDir)) { + if (!fs.existsSync(this.repoCacheDir)) { return { totalRepositories: 0, totalSizeBytes: 0, @@ -75,11 +75,11 @@ export class CacheManager { const repositories: Array<{ name: string; sizeBytes: number; lastAccessed: Date }> = []; let totalSizeBytes = 0; - const entries = fs.readdirSync(this.cacheDir, { withFileTypes: true }); + const entries = fs.readdirSync(this.repoCacheDir, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory()) { - const repoPath = path.join(this.cacheDir, entry.name); + const repoPath = path.join(this.repoCacheDir, entry.name); const sizeBytes = this.getDirectorySize(repoPath); const stats = fs.statSync(repoPath); @@ -141,7 +141,7 @@ export class CacheManager { * Remove specific repository from cache */ private removeRepository(repoName: string): void { - const repoPath = path.join(this.cacheDir, repoName); + const repoPath = path.join(this.repoCacheDir, repoName); if (fs.existsSync(repoPath)) { fs.rmSync(repoPath, { recursive: true, force: true }); } @@ -189,7 +189,7 @@ export class CacheManager { return { maxSizeGB: this.maxSizeGB, maxRepositories: this.maxRepositories, - cacheDir: this.cacheDir, + repoCacheDir: this.repoCacheDir, }; } } diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index 3903a495d..305b6e890 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -12,7 +12,7 @@ const exec = async (req: any, action: Action): Promise => { try { // Get cache directories from configuration const config = cacheManager.getConfig(); - const BARE_CACHE = config.cacheDir; + const BARE_CACHE = config.repoCacheDir; const WORK_DIR = path.join(path.dirname(BARE_CACHE), 'work'); // Paths for hybrid architecture diff --git a/test/processors/cacheManager.test.js b/test/processors/cacheManager.test.js index b2be1bfc0..7cb84b9f0 100644 --- a/test/processors/cacheManager.test.js +++ b/test/processors/cacheManager.test.js @@ -158,7 +158,7 @@ describe('CacheManager', () => { expect(config).to.deep.equal({ maxSizeGB: 0.001, maxRepositories: 3, - cacheDir: testCacheDir, + repoCacheDir: testCacheDir, }); }); }); From b5bc3d8b0f76e69379c9996d7f49d20958812aa3 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 29 Oct 2025 10:52:24 +0100 Subject: [PATCH 22/26] refactor: rename cacheDir to repoCacheDir to disambiguate from ConfigLoader.cacheDir --- .../processors/push-action/cache-manager.ts | 18 +++++++++--------- .../processors/push-action/clearBareClone.ts | 2 +- src/proxy/processors/push-action/pullRemote.ts | 2 +- test/processors/cacheManager.test.js | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/proxy/processors/push-action/cache-manager.ts b/src/proxy/processors/push-action/cache-manager.ts index 84d7240a0..fc711f2ea 100644 --- a/src/proxy/processors/push-action/cache-manager.ts +++ b/src/proxy/processors/push-action/cache-manager.ts @@ -13,17 +13,17 @@ export interface CacheStats { } export class CacheManager { - private cacheDir: string; + private repoCacheDir: string; private maxSizeGB: number; private maxRepositories: number; private mutex: Promise = Promise.resolve(); constructor( - cacheDir: string = './.remote/cache', + repoCacheDir: string = './.remote/cache', maxSizeGB: number = 2, maxRepositories: number = 50, ) { - this.cacheDir = cacheDir; + this.repoCacheDir = repoCacheDir; this.maxSizeGB = maxSizeGB; this.maxRepositories = maxRepositories; } @@ -52,7 +52,7 @@ export class CacheManager { */ async touchRepository(repoName: string): Promise { return this.acquireLock(() => { - const repoPath = path.join(this.cacheDir, repoName); + const repoPath = path.join(this.repoCacheDir, repoName); if (fs.existsSync(repoPath)) { const now = new Date(); fs.utimesSync(repoPath, now, now); @@ -64,7 +64,7 @@ export class CacheManager { * Get cache statistics */ getCacheStats(): CacheStats { - if (!fs.existsSync(this.cacheDir)) { + if (!fs.existsSync(this.repoCacheDir)) { return { totalRepositories: 0, totalSizeBytes: 0, @@ -75,11 +75,11 @@ export class CacheManager { const repositories: Array<{ name: string; sizeBytes: number; lastAccessed: Date }> = []; let totalSizeBytes = 0; - const entries = fs.readdirSync(this.cacheDir, { withFileTypes: true }); + const entries = fs.readdirSync(this.repoCacheDir, { withFileTypes: true }); for (const entry of entries) { if (entry.isDirectory()) { - const repoPath = path.join(this.cacheDir, entry.name); + const repoPath = path.join(this.repoCacheDir, entry.name); const sizeBytes = this.getDirectorySize(repoPath); const stats = fs.statSync(repoPath); @@ -141,7 +141,7 @@ export class CacheManager { * Remove specific repository from cache */ private removeRepository(repoName: string): void { - const repoPath = path.join(this.cacheDir, repoName); + const repoPath = path.join(this.repoCacheDir, repoName); if (fs.existsSync(repoPath)) { fs.rmSync(repoPath, { recursive: true, force: true }); } @@ -189,7 +189,7 @@ export class CacheManager { return { maxSizeGB: this.maxSizeGB, maxRepositories: this.maxRepositories, - cacheDir: this.cacheDir, + repoCacheDir: this.repoCacheDir, }; } } diff --git a/src/proxy/processors/push-action/clearBareClone.ts b/src/proxy/processors/push-action/clearBareClone.ts index 973dcb2d8..6c2b582c9 100644 --- a/src/proxy/processors/push-action/clearBareClone.ts +++ b/src/proxy/processors/push-action/clearBareClone.ts @@ -8,7 +8,7 @@ const exec = async (_req: any, action: Action): Promise => { // Get work directory from configuration const config = cacheManager.getConfig(); - const WORK_DIR = path.join(path.dirname(config.cacheDir), 'work'); + const WORK_DIR = path.join(path.dirname(config.repoCacheDir), 'work'); // Delete ONLY this push's working copy const workCopy = path.join(WORK_DIR, action.id); diff --git a/src/proxy/processors/push-action/pullRemote.ts b/src/proxy/processors/push-action/pullRemote.ts index 3903a495d..305b6e890 100644 --- a/src/proxy/processors/push-action/pullRemote.ts +++ b/src/proxy/processors/push-action/pullRemote.ts @@ -12,7 +12,7 @@ const exec = async (req: any, action: Action): Promise => { try { // Get cache directories from configuration const config = cacheManager.getConfig(); - const BARE_CACHE = config.cacheDir; + const BARE_CACHE = config.repoCacheDir; const WORK_DIR = path.join(path.dirname(BARE_CACHE), 'work'); // Paths for hybrid architecture diff --git a/test/processors/cacheManager.test.js b/test/processors/cacheManager.test.js index b2be1bfc0..7cb84b9f0 100644 --- a/test/processors/cacheManager.test.js +++ b/test/processors/cacheManager.test.js @@ -158,7 +158,7 @@ describe('CacheManager', () => { expect(config).to.deep.equal({ maxSizeGB: 0.001, maxRepositories: 3, - cacheDir: testCacheDir, + repoCacheDir: testCacheDir, }); }); }); From b073eb377f9b89f581c14e1bbd6b8f62d3df996d Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Fri, 31 Oct 2025 15:43:01 +0100 Subject: [PATCH 23/26] docs: add readme and cache benchmark script --- benchmark-cache.sh | 205 +++++++++++++++++++++ src/proxy/processors/push-action/README.md | 200 ++++++++++++++++++++ 2 files changed, 405 insertions(+) create mode 100755 benchmark-cache.sh create mode 100644 src/proxy/processors/push-action/README.md diff --git a/benchmark-cache.sh b/benchmark-cache.sh new file mode 100755 index 000000000..27ab9a899 --- /dev/null +++ b/benchmark-cache.sh @@ -0,0 +1,205 @@ +#!/bin/bash + +set -e + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo -e "${BLUE}=== Git Proxy Hybrid Cache Benchmark ===${NC}" +echo "" + +# Configuration +PROXY_URL="http://localhost:8000" +GITHUB_REPO="${1:-fabiovincenzi/open-webui}" +TEST_BRANCH="${2:-main}" +NUM_PUSHES="${3:-3}" + +# Construct proxy URL (format: http://localhost:8000/github.com/user/repo.git) +PROXY_REPO_URL="$PROXY_URL/github.com/$GITHUB_REPO.git" + +echo "Configuration:" +echo " Proxy URL: $PROXY_URL" +echo " GitHub Repo: $GITHUB_REPO" +echo " Proxy Repo URL: $PROXY_REPO_URL" +echo " Branch: $TEST_BRANCH" +echo " Number of pushes: $NUM_PUSHES" +echo "" + +# Check if git-proxy is running +echo -e "${YELLOW}Checking if git-proxy is running...${NC}" +if ! curl -s "$PROXY_URL" > /dev/null 2>&1; then + echo -e "${RED}ERROR: git-proxy is not running on $PROXY_URL${NC}" + echo "Please start git-proxy with: npm start" + exit 1 +fi +echo -e "${GREEN}✓ git-proxy is running${NC}" +echo "" + +# Get GitHub credentials from git credential helper +echo -e "${YELLOW}Retrieving GitHub credentials...${NC}" +CREDENTIALS=$(echo -e "protocol=https\nhost=github.com\n" | git credential fill 2>/dev/null) +if [ -z "$CREDENTIALS" ]; then + echo -e "${RED}ERROR: No GitHub credentials found${NC}" + echo "Please configure git credentials first:" + echo " git config --global credential.helper store" + echo " git clone https://github.com/your-repo.git" + exit 1 +fi + +GITHUB_USERNAME=$(echo "$CREDENTIALS" | grep "^username=" | cut -d= -f2) +GITHUB_TOKEN=$(echo "$CREDENTIALS" | grep "^password=" | cut -d= -f2) + +if [ -z "$GITHUB_USERNAME" ] || [ -z "$GITHUB_TOKEN" ]; then + echo -e "${RED}ERROR: Could not extract GitHub credentials${NC}" + exit 1 +fi + +echo -e "${GREEN}✓ GitHub credentials retrieved for user: $GITHUB_USERNAME${NC}" +echo "" + +# Setup test directory +TEST_DIR="./benchmark-test-$(date +%s)" +echo -e "${YELLOW}Creating test directory: $TEST_DIR${NC}" +mkdir -p "$TEST_DIR" +cd "$TEST_DIR" + +REPO_NAME=$(basename "$GITHUB_REPO") + +# Clear cache before starting +echo -e "${YELLOW}Clearing cache before benchmark...${NC}" +rm -rf ../.remote/cache/* ../.remote/work/* 2>/dev/null || true +echo -e "${GREEN}✓ Cache cleared${NC}" +echo "" + +measure_push() { + local push_number=$1 + local is_first=$2 + + echo -e "${BLUE}=== Push #$push_number $([ "$is_first" = "true" ] && echo "(COLD CACHE)" || echo "(WARM CACHE)") ===${NC}" + + # Clone repo through proxy + echo "Cloning repository..." + START_CLONE=$(date +%s.%N) + + rm -rf "$REPO_NAME" 2>/dev/null || true + git clone "$PROXY_REPO_URL" "$REPO_NAME" > clone.log 2>&1 + + END_CLONE=$(date +%s.%N) + CLONE_TIME=$(echo "$END_CLONE - $START_CLONE" | bc) + + cd "$REPO_NAME" + + # Get email from git config + GITHUB_EMAIL=$(git config --global user.email) + if [ -z "$GITHUB_EMAIL" ]; then + GITHUB_EMAIL="$GITHUB_USERNAME@users.noreply.github.com" + fi + + git config user.email "$GITHUB_EMAIL" + git config user.name "$GITHUB_USERNAME" + + # Create a test commit + echo "benchmark-$push_number-$(date +%s)" > "benchmark-$push_number.txt" + git add "benchmark-$push_number.txt" + git commit -m "Benchmark push #$push_number" > /dev/null 2>&1 + + # Push through proxy with credentials + echo "Pushing commit..." + START_PUSH=$(date +%s.%N) + + # Use credential helper to pass GitHub credentials + git -c credential.helper="!f() { echo username=$GITHUB_USERNAME; echo password=$GITHUB_TOKEN; }; f" \ + push "$PROXY_REPO_URL" "HEAD:refs/heads/benchmark-test-$push_number" > push.log 2>&1 || true + + END_PUSH=$(date +%s.%N) + PUSH_TIME=$(echo "$END_PUSH - $START_PUSH" | bc) + + TOTAL_TIME=$(echo "$CLONE_TIME + $PUSH_TIME" | bc) + + cd .. + + echo -e "${GREEN}Results:${NC}" + echo " Clone time: ${CLONE_TIME}s" + echo " Push time: ${PUSH_TIME}s" + echo " Total time: ${TOTAL_TIME}s" + echo "" + + # Store results + echo "$push_number,$is_first,$CLONE_TIME,$PUSH_TIME,$TOTAL_TIME" >> results.csv +} + +# Initialize results file +echo "push_number,is_cold_cache,clone_time,push_time,total_time" > results.csv + +# Measure first push (cold cache) +measure_push 1 true + +# Get cache stats after first push +echo -e "${BLUE}=== Cache Statistics After First Push ===${NC}" +CACHE_DIR="../.remote/cache" +if [ -d "$CACHE_DIR" ]; then + CACHE_SIZE=$(du -sh "$CACHE_DIR" | cut -f1) + CACHE_REPOS=$(ls -1 "$CACHE_DIR" | wc -l) + echo " Cache size: $CACHE_SIZE" + echo " Cached repos: $CACHE_REPOS" +else + echo " Cache directory not found" +fi +echo "" + +# Measure subsequent pushes (warm cache) +for i in $(seq 2 $NUM_PUSHES); do + measure_push $i false +done + +# Final cache stats +echo -e "${BLUE}=== Final Cache Statistics ===${NC}" +if [ -d "$CACHE_DIR" ]; then + CACHE_SIZE=$(du -sh "$CACHE_DIR" | cut -f1) + CACHE_REPOS=$(ls -1 "$CACHE_DIR" | wc -l) + echo " Cache size: $CACHE_SIZE" + echo " Cached repos: $CACHE_REPOS" + echo "" + echo " Cached repositories:" + ls -lh "$CACHE_DIR" | tail -n +2 | awk '{print " " $9 " (" $5 ")"}' +fi +echo "" + +# Calculate and display summary +echo -e "${BLUE}=== Performance Summary ===${NC}" +echo "" + +# Read results +FIRST_PUSH_TIME=$(awk -F, 'NR==2 {print $5}' results.csv) +AVG_WARM_TIME=$(awk -F, 'NR>2 {sum+=$5; count++} END {if(count>0) print sum/count; else print 0}' results.csv) + +echo "First push (cold cache): ${FIRST_PUSH_TIME}s" +if (( $(echo "$AVG_WARM_TIME > 0" | bc -l) )); then + echo "Average warm push: ${AVG_WARM_TIME}s" + SPEEDUP=$(echo "scale=2; $FIRST_PUSH_TIME / $AVG_WARM_TIME" | bc) + IMPROVEMENT=$(echo "scale=1; (1 - $AVG_WARM_TIME / $FIRST_PUSH_TIME) * 100" | bc) + echo "" + echo -e "${GREEN}Performance improvement: ${IMPROVEMENT}% faster (${SPEEDUP}x speedup)${NC}" +fi +echo "" + +# Show detailed results table +echo -e "${BLUE}=== Detailed Results ===${NC}" +echo "" +printf "%-12s %-12s %-12s %-12s %-12s\n" "Push #" "Cache" "Clone (s)" "Push (s)" "Total (s)" +printf "%-12s %-12s %-12s %-12s %-12s\n" "------" "-----" "---------" "--------" "---------" +awk -F, 'NR>1 { + cache = ($2 == "true") ? "COLD" : "WARM" + printf "%-12s %-12s %-12.2f %-12.2f %-12.2f\n", $1, cache, $3, $4, $5 +}' results.csv +echo "" + +# Cleanup prompt +echo -e "${YELLOW}Test directory: $TEST_DIR${NC}" +echo "To clean up: rm -rf $TEST_DIR" +echo "" +echo -e "${GREEN}✓ Benchmark complete!${NC}" diff --git a/src/proxy/processors/push-action/README.md b/src/proxy/processors/push-action/README.md new file mode 100644 index 000000000..07ba12de7 --- /dev/null +++ b/src/proxy/processors/push-action/README.md @@ -0,0 +1,200 @@ +# Hybrid Cache Architecture + +## Overview + +The hybrid cache architecture optimizes Git repository cloning by splitting the cache into two layers: + +1. **Bare Cache** (persistent, shared) - Stores minimal Git data shared across all requests +2. **Working Copy** (temporary, isolated) - Per-request workspace for push validation + +## How pullRemote Works + +### Phase 1: Bare Cache (Persistent, Shared) + +```typescript +const bareRepo = path.join(BARE_CACHE, action.repoName); + +if (bareExists) { + // CACHE HIT: Fast fetch to update existing bare repo + await gitOps.fetch({ + dir: bareRepo, + url: action.url, + bare: true, + depth: 1, + }); + cacheManager.touchRepository(action.repoName); // Update LRU timestamp +} else { + // CACHE MISS: Clone new bare repository + await gitOps.clone({ + dir: bareRepo, + url: action.url, + bare: true, + depth: 1, + }); +} +``` + +**Key Points:** + +- Bare repositories contain only `.git` data (no working tree) +- Shared across all push requests for the same repository +- Uses LRU eviction based on `maxSizeGB` and `maxRepositories` limits +- `touchRepository()` updates access time for LRU tracking + +### Phase 2: Working Copy (Temporary, Isolated) + +```typescript +const workCopy = path.join(WORK_DIR, action.id); +const workCopyPath = path.join(workCopy, action.repoName); + +// Fast local clone from bare cache +await gitOps.cloneLocal({ + sourceDir: bareRepo, + targetDir: workCopyPath, + depth: 1, +}); + +action.proxyGitPath = workCopy; // Used by subsequent processors +``` + +**Key Points:** + +- Each push request gets an isolated working copy +- Cloned from local bare cache (fast, no network) +- Cleaned up after push validation completes + +### Phase 3: Cache Management + +```typescript +const evictionResult = await cacheManager.enforceLimits(); +``` + +**CacheManager** uses LRU (Least Recently Used) eviction: + +- Monitors total cache size and repository count +- Removes oldest repositories when limits are exceeded +- Thread-safe via mutex to prevent race conditions + +## Performance Benchmarks + +Real-world performance comparison using the Backstage repository (177MB cached bare repo with `depth: 1`). + +### Benchmark Setup + +- **Test Repository**: Backstage (medium-large repository, 177MB cached) +- **Test Method**: 10 consecutive push operations (1 cold + 9 warm) +- **Cache Configuration**: Bare repositories with `depth: 1` (shallow clone) +- **Benchmark Script**: [`cache-benchmark.sh`](../../../../scripts/cache-benchmark.sh) + +### Results Comparison + +| Metric | Without Cache (main) | With Cache (PR) | Improvement | +| ------------------- | -------------------- | --------------- | -------------------- | +| **Cold Push** | 20.63s | 17.58s | 15% faster | +| **Warm Push (avg)** | 19.88s | **6.68s** | **66% faster** | +| **Warm Push (min)** | 18.37s | 6.34s | 65% faster | +| **Warm Push (max)** | 21.22s | 7.12s | 66% faster | +| **Std Deviation** | 0.99s | 0.19s | 5x more consistent | +| **Speedup Ratio** | 1.03x | **2.63x** | **155% improvement** | + +### Time Saved + +**Without Cache (main branch)**: + +- 9 warm pushes: 178.93s total +- Every push requires full GitHub clone + +**With Cache (this PR)**: + +- 9 warm pushes: 60.16s total +- **Time saved: 98.10s (1.6 minutes)** +- **Efficiency gain: 66%** + +### Running the Benchmark + +To reproduce these results with your own repository fork: + +```bash +# Test with cache (this PR branch) +./cache-benchmark.sh owner/repo +``` + +**Example**: + +```bash +./cache-benchmark.sh yourFork/backstage main 10 +``` + +**Note**: Results may vary based on network conditions, GitHub server load, and repository size. The benchmark uses `depth: 1` for all git operations. You must have push access to the repository you're testing. + +## Cache Configuration + +In `proxy.config.json`: + +```json +{ + "cache": { + "maxSizeGB": 2, // Maximum total cache size + "maxRepositories": 50, // Maximum number of cached repos + "cacheDir": "./.remote/cache" // Bare cache location + } +} +``` + +## Concurrency & Thread Safety + +The `CacheManager` uses a Promise-based mutex to serialize cache operations: + +```typescript +private mutex: Promise = Promise.resolve(); + +async touchRepository(repoName: string): Promise { + return this.acquireLock(() => { + // Atomic operation + }); +} + +async enforceLimits(): Promise<{ removedRepos: string[]; freedBytes: number }> { + return this.acquireLock(() => { + // Atomic operation + }); +} +``` + +**Race Conditions Prevented:** + +- Multiple `enforceLimits()` calls removing the same repository +- `touchRepository()` updating while `enforceLimits()` is removing +- `getCacheStats()` reading while repositories are being deleted + +## Cleanup Strategy + +**Bare Cache:** + +- Cleaned via LRU eviction (oldest repositories removed first) +- Triggered after every push via `enforceLimits()` +- Respects `maxSizeGB` and `maxRepositories` limits + +**Working Copies:** + +- Automatically cleaned by `clearBareClone.ts` after push completes +- Each request's `action.id` directory is deleted +- No manual cleanup needed + +## Monitoring & Debugging + +**Cache Statistics:** + +```typescript +const stats = cacheManager.getCacheStats(); +console.log(`Total repos: ${stats.totalRepositories}`); +console.log(`Total size: ${stats.totalSizeBytes / (1024 * 1024)}MB`); +``` + +**LRU Eviction Logs:** + +```typescript +const result = await cacheManager.enforceLimits(); +console.log(`Evicted ${result.removedRepos.length} repositories`); +console.log(`Freed ${result.freedBytes / (1024 * 1024)}MB`); +``` From 5b94ec9c75bd21d93957dc76bcff436625a377e6 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 5 Nov 2025 11:53:30 +0100 Subject: [PATCH 24/26] chore: add results csv to gitignore --- .gitignore | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitignore b/.gitignore index c6076f1af..f38377e79 100644 --- a/.gitignore +++ b/.gitignore @@ -275,3 +275,7 @@ website/.docusaurus # Generated from testing /test/fixtures/test-package/package-lock.json + +# Benchmark results +benchmark-detailed-*/ +results*.csv From 7c05bfb1adc4734a6d211e21b31f91b33ca03480 Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 5 Nov 2025 11:54:09 +0100 Subject: [PATCH 25/26] chore: move and edit benchmark script --- benchmark-cache.sh | 205 ------------------------------------- scripts/cache-benchmark.sh | 173 +++++++++++++++++++++++++++++++ 2 files changed, 173 insertions(+), 205 deletions(-) delete mode 100755 benchmark-cache.sh create mode 100755 scripts/cache-benchmark.sh diff --git a/benchmark-cache.sh b/benchmark-cache.sh deleted file mode 100755 index 27ab9a899..000000000 --- a/benchmark-cache.sh +++ /dev/null @@ -1,205 +0,0 @@ -#!/bin/bash - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -echo -e "${BLUE}=== Git Proxy Hybrid Cache Benchmark ===${NC}" -echo "" - -# Configuration -PROXY_URL="http://localhost:8000" -GITHUB_REPO="${1:-fabiovincenzi/open-webui}" -TEST_BRANCH="${2:-main}" -NUM_PUSHES="${3:-3}" - -# Construct proxy URL (format: http://localhost:8000/github.com/user/repo.git) -PROXY_REPO_URL="$PROXY_URL/github.com/$GITHUB_REPO.git" - -echo "Configuration:" -echo " Proxy URL: $PROXY_URL" -echo " GitHub Repo: $GITHUB_REPO" -echo " Proxy Repo URL: $PROXY_REPO_URL" -echo " Branch: $TEST_BRANCH" -echo " Number of pushes: $NUM_PUSHES" -echo "" - -# Check if git-proxy is running -echo -e "${YELLOW}Checking if git-proxy is running...${NC}" -if ! curl -s "$PROXY_URL" > /dev/null 2>&1; then - echo -e "${RED}ERROR: git-proxy is not running on $PROXY_URL${NC}" - echo "Please start git-proxy with: npm start" - exit 1 -fi -echo -e "${GREEN}✓ git-proxy is running${NC}" -echo "" - -# Get GitHub credentials from git credential helper -echo -e "${YELLOW}Retrieving GitHub credentials...${NC}" -CREDENTIALS=$(echo -e "protocol=https\nhost=github.com\n" | git credential fill 2>/dev/null) -if [ -z "$CREDENTIALS" ]; then - echo -e "${RED}ERROR: No GitHub credentials found${NC}" - echo "Please configure git credentials first:" - echo " git config --global credential.helper store" - echo " git clone https://github.com/your-repo.git" - exit 1 -fi - -GITHUB_USERNAME=$(echo "$CREDENTIALS" | grep "^username=" | cut -d= -f2) -GITHUB_TOKEN=$(echo "$CREDENTIALS" | grep "^password=" | cut -d= -f2) - -if [ -z "$GITHUB_USERNAME" ] || [ -z "$GITHUB_TOKEN" ]; then - echo -e "${RED}ERROR: Could not extract GitHub credentials${NC}" - exit 1 -fi - -echo -e "${GREEN}✓ GitHub credentials retrieved for user: $GITHUB_USERNAME${NC}" -echo "" - -# Setup test directory -TEST_DIR="./benchmark-test-$(date +%s)" -echo -e "${YELLOW}Creating test directory: $TEST_DIR${NC}" -mkdir -p "$TEST_DIR" -cd "$TEST_DIR" - -REPO_NAME=$(basename "$GITHUB_REPO") - -# Clear cache before starting -echo -e "${YELLOW}Clearing cache before benchmark...${NC}" -rm -rf ../.remote/cache/* ../.remote/work/* 2>/dev/null || true -echo -e "${GREEN}✓ Cache cleared${NC}" -echo "" - -measure_push() { - local push_number=$1 - local is_first=$2 - - echo -e "${BLUE}=== Push #$push_number $([ "$is_first" = "true" ] && echo "(COLD CACHE)" || echo "(WARM CACHE)") ===${NC}" - - # Clone repo through proxy - echo "Cloning repository..." - START_CLONE=$(date +%s.%N) - - rm -rf "$REPO_NAME" 2>/dev/null || true - git clone "$PROXY_REPO_URL" "$REPO_NAME" > clone.log 2>&1 - - END_CLONE=$(date +%s.%N) - CLONE_TIME=$(echo "$END_CLONE - $START_CLONE" | bc) - - cd "$REPO_NAME" - - # Get email from git config - GITHUB_EMAIL=$(git config --global user.email) - if [ -z "$GITHUB_EMAIL" ]; then - GITHUB_EMAIL="$GITHUB_USERNAME@users.noreply.github.com" - fi - - git config user.email "$GITHUB_EMAIL" - git config user.name "$GITHUB_USERNAME" - - # Create a test commit - echo "benchmark-$push_number-$(date +%s)" > "benchmark-$push_number.txt" - git add "benchmark-$push_number.txt" - git commit -m "Benchmark push #$push_number" > /dev/null 2>&1 - - # Push through proxy with credentials - echo "Pushing commit..." - START_PUSH=$(date +%s.%N) - - # Use credential helper to pass GitHub credentials - git -c credential.helper="!f() { echo username=$GITHUB_USERNAME; echo password=$GITHUB_TOKEN; }; f" \ - push "$PROXY_REPO_URL" "HEAD:refs/heads/benchmark-test-$push_number" > push.log 2>&1 || true - - END_PUSH=$(date +%s.%N) - PUSH_TIME=$(echo "$END_PUSH - $START_PUSH" | bc) - - TOTAL_TIME=$(echo "$CLONE_TIME + $PUSH_TIME" | bc) - - cd .. - - echo -e "${GREEN}Results:${NC}" - echo " Clone time: ${CLONE_TIME}s" - echo " Push time: ${PUSH_TIME}s" - echo " Total time: ${TOTAL_TIME}s" - echo "" - - # Store results - echo "$push_number,$is_first,$CLONE_TIME,$PUSH_TIME,$TOTAL_TIME" >> results.csv -} - -# Initialize results file -echo "push_number,is_cold_cache,clone_time,push_time,total_time" > results.csv - -# Measure first push (cold cache) -measure_push 1 true - -# Get cache stats after first push -echo -e "${BLUE}=== Cache Statistics After First Push ===${NC}" -CACHE_DIR="../.remote/cache" -if [ -d "$CACHE_DIR" ]; then - CACHE_SIZE=$(du -sh "$CACHE_DIR" | cut -f1) - CACHE_REPOS=$(ls -1 "$CACHE_DIR" | wc -l) - echo " Cache size: $CACHE_SIZE" - echo " Cached repos: $CACHE_REPOS" -else - echo " Cache directory not found" -fi -echo "" - -# Measure subsequent pushes (warm cache) -for i in $(seq 2 $NUM_PUSHES); do - measure_push $i false -done - -# Final cache stats -echo -e "${BLUE}=== Final Cache Statistics ===${NC}" -if [ -d "$CACHE_DIR" ]; then - CACHE_SIZE=$(du -sh "$CACHE_DIR" | cut -f1) - CACHE_REPOS=$(ls -1 "$CACHE_DIR" | wc -l) - echo " Cache size: $CACHE_SIZE" - echo " Cached repos: $CACHE_REPOS" - echo "" - echo " Cached repositories:" - ls -lh "$CACHE_DIR" | tail -n +2 | awk '{print " " $9 " (" $5 ")"}' -fi -echo "" - -# Calculate and display summary -echo -e "${BLUE}=== Performance Summary ===${NC}" -echo "" - -# Read results -FIRST_PUSH_TIME=$(awk -F, 'NR==2 {print $5}' results.csv) -AVG_WARM_TIME=$(awk -F, 'NR>2 {sum+=$5; count++} END {if(count>0) print sum/count; else print 0}' results.csv) - -echo "First push (cold cache): ${FIRST_PUSH_TIME}s" -if (( $(echo "$AVG_WARM_TIME > 0" | bc -l) )); then - echo "Average warm push: ${AVG_WARM_TIME}s" - SPEEDUP=$(echo "scale=2; $FIRST_PUSH_TIME / $AVG_WARM_TIME" | bc) - IMPROVEMENT=$(echo "scale=1; (1 - $AVG_WARM_TIME / $FIRST_PUSH_TIME) * 100" | bc) - echo "" - echo -e "${GREEN}Performance improvement: ${IMPROVEMENT}% faster (${SPEEDUP}x speedup)${NC}" -fi -echo "" - -# Show detailed results table -echo -e "${BLUE}=== Detailed Results ===${NC}" -echo "" -printf "%-12s %-12s %-12s %-12s %-12s\n" "Push #" "Cache" "Clone (s)" "Push (s)" "Total (s)" -printf "%-12s %-12s %-12s %-12s %-12s\n" "------" "-----" "---------" "--------" "---------" -awk -F, 'NR>1 { - cache = ($2 == "true") ? "COLD" : "WARM" - printf "%-12s %-12s %-12.2f %-12.2f %-12.2f\n", $1, cache, $3, $4, $5 -}' results.csv -echo "" - -# Cleanup prompt -echo -e "${YELLOW}Test directory: $TEST_DIR${NC}" -echo "To clean up: rm -rf $TEST_DIR" -echo "" -echo -e "${GREEN}✓ Benchmark complete!${NC}" diff --git a/scripts/cache-benchmark.sh b/scripts/cache-benchmark.sh new file mode 100755 index 000000000..6144cc874 --- /dev/null +++ b/scripts/cache-benchmark.sh @@ -0,0 +1,173 @@ +#!/bin/bash + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +BOLD='\033[1m' +NC='\033[0m' + +echo -e "${BOLD}${BLUE}═══════════════════════════════════════════════════════════${NC}" +echo -e "${BOLD}${BLUE} Git Proxy Hybrid Cache - Detailed Performance Benchmark${NC}" +echo -e "${BOLD}${BLUE}═══════════════════════════════════════════════════════════${NC}" +echo "" + +PROXY_URL="http://localhost:8000" +GITHUB_REPO="${1}" +TEST_BRANCH="${2:-main}" +NUM_PUSHES="${3:-10}" + +if [ -z "$GITHUB_REPO" ]; then + echo -e "${RED}ERROR: GitHub repository required${NC}" + echo "" + echo "Usage: $0 [branch] [num_pushes]" + echo "Example: $0 yourFork/backstage main 10" + echo "" + echo -e "${YELLOW}Note: You must have push access to the specified repository${NC}" + exit 1 +fi + +PROXY_REPO_URL="$PROXY_URL/github.com/$GITHUB_REPO.git" + +echo -e "${CYAN}Configuration:${NC}" +echo " Proxy URL: $PROXY_URL" +echo " GitHub Repo: $GITHUB_REPO" +echo " Branch: $TEST_BRANCH" +echo " Number of pushes: $NUM_PUSHES (1 cold + $((NUM_PUSHES-1)) warm)" +echo "" + +echo -e "${YELLOW}[1/5] Checking git-proxy status...${NC}" +if ! curl -s "$PROXY_URL" > /dev/null 2>&1; then + echo -e "${RED}✗ ERROR: git-proxy not running on $PROXY_URL${NC}" + exit 1 +fi +echo -e "${GREEN}✓ Git-proxy is running${NC}\n" + +echo -e "${YELLOW}[2/5] Retrieving GitHub credentials...${NC}" +CREDENTIALS=$(echo -e "protocol=https\nhost=github.com\n" | git credential fill 2>/dev/null) +if [ -z "$CREDENTIALS" ]; then + echo -e "${RED}✗ ERROR: No GitHub credentials found${NC}" + exit 1 +fi + +GITHUB_USERNAME=$(echo "$CREDENTIALS" | grep "^username=" | cut -d= -f2) +GITHUB_TOKEN=$(echo "$CREDENTIALS" | grep "^password=" | cut -d= -f2) +GITHUB_EMAIL=$(git config --global user.email || echo "$GITHUB_USERNAME@users.noreply.github.com") +echo -e "${GREEN}✓ Credentials retrieved for: $GITHUB_USERNAME${NC}\n" + +TEST_DIR="./benchmark-detailed-$(date +%s)" +echo -e "${YELLOW}[3/5] Setting up test environment...${NC}" +mkdir -p "$TEST_DIR" && cd "$TEST_DIR" +REPO_NAME=$(basename "$GITHUB_REPO") + +echo " → Clearing cache..." +rm -rf ../.remote/cache/* ../.remote/work/* 2>/dev/null || true +echo -e "${GREEN}✓ Cache cleared${NC}\n" + +echo -e "${YELLOW}[4/5] Performing initial clone (one-time operation)...${NC}" +echo -e "${CYAN}→ Cloning $GITHUB_REPO via proxy...${NC}\n" +START_INITIAL_CLONE=$(date +%s.%N) +git clone "$PROXY_REPO_URL" "$REPO_NAME" +CLONE_EXIT_CODE=$? +END_INITIAL_CLONE=$(date +%s.%N) + +INITIAL_CLONE_TIME=$(echo "$END_INITIAL_CLONE - $START_INITIAL_CLONE" | bc) + +cd "$REPO_NAME" +git config user.email "$GITHUB_EMAIL" +git config user.name "$GITHUB_USERNAME" +echo -e "${GREEN}✓ Initial clone completed in ${INITIAL_CLONE_TIME}s${NC}\n" + +RESULTS_FILE="../results-detailed.csv" +echo "push_number,is_cold,push_time_s" > "$RESULTS_FILE" + +perform_push() { + local push_num=$1 + local is_cold=$2 + local label=$([ "$is_cold" = "true" ] && echo "COLD CACHE" || echo "WARM CACHE") + + echo -e "${BLUE}═══ Push #$push_num ($label) ═══${NC}" + + local commit_file="benchmark-push-$push_num-$(date +%s).txt" + echo "Benchmark push $push_num at $(date)" > "$commit_file" + git add "$commit_file" > /dev/null 2>&1 + git commit -m "Benchmark push #$push_num" > /dev/null 2>&1 + + echo -n " Pushing... " + START_PUSH=$(date +%s.%N) + PUSH_OUTPUT=$(git -c credential.helper="!f() { echo username=$GITHUB_USERNAME; echo password=$GITHUB_TOKEN; }; f" \ + push "$PROXY_REPO_URL" "HEAD:refs/heads/benchmark-test-$push_num" 2>&1) + PUSH_EXIT_CODE=$? + END_PUSH=$(date +%s.%N) + PUSH_TIME=$(echo "$END_PUSH - $START_PUSH" | bc) + + if [ $PUSH_EXIT_CODE -ne 0 ]; then + echo -e "${RED}✗ FAILED${NC}" + echo "$PUSH_OUTPUT" + echo "" + exit 1 + fi + + echo -e "${GREEN}✓ ${PUSH_TIME}s${NC}" + echo "$push_num,$is_cold,$PUSH_TIME" >> "$RESULTS_FILE" + echo "" +} + +echo -e "${YELLOW}[5/5] Running push benchmark...${NC}\n" + +perform_push 1 true +for i in $(seq 2 $NUM_PUSHES); do + perform_push $i false +done + +cd .. + +echo -e "${BOLD}${BLUE}═══════════════════════════════════════════════════════════${NC}" +echo -e "${BOLD}${BLUE} Performance Analysis ${NC}" +echo -e "${BOLD}${BLUE}═══════════════════════════════════════════════════════════${NC}\n" + +RESULTS_CSV="results-detailed.csv" +COLD_TIME=$(awk -F, 'NR==2 {print $3}' "$RESULTS_CSV") +WARM_TIMES=$(awk -F, 'NR>2 {print $3}' "$RESULTS_CSV") + +WARM_MIN=$(echo "$WARM_TIMES" | sort -n | head -1) +WARM_MAX=$(echo "$WARM_TIMES" | sort -n | tail -1) +WARM_AVG=$(echo "$WARM_TIMES" | awk '{sum+=$1; count++} END {print sum/count}') +WARM_COUNT=$(echo "$WARM_TIMES" | wc -l | tr -d ' ') +WARM_STDDEV=$(echo "$WARM_TIMES" | awk -v avg="$WARM_AVG" '{sum+=($1-avg)^2; count++} END {print sqrt(sum/count)}') + +SPEEDUP=$(echo "scale=2; $COLD_TIME / $WARM_AVG" | bc) +IMPROVEMENT=$(echo "scale=1; (1 - $WARM_AVG / $COLD_TIME) * 100" | bc) + +TOTAL_WARM_TIME=$(echo "$WARM_TIMES" | awk '{sum+=$1} END {print sum}') +HYPOTHETICAL_NO_CACHE=$(echo "scale=2; $COLD_TIME * $WARM_COUNT" | bc) +TIME_SAVED=$(echo "scale=2; $HYPOTHETICAL_NO_CACHE - $TOTAL_WARM_TIME" | bc) +TIME_SAVED_MINUTES=$(echo "scale=1; $TIME_SAVED / 60" | bc) + +echo -e "${CYAN}${BOLD}Push Performance:${NC}\n" +printf " %-25s %10.2fs\n" "Cold cache (Push #1):" "$COLD_TIME" +printf " %-25s %10.2fs\n" "Warm cache (average):" "$WARM_AVG" +printf " %-25s %10.2fs\n" "Warm cache (min):" "$WARM_MIN" +printf " %-25s %10.2fs\n" "Warm cache (max):" "$WARM_MAX" +printf " %-25s %10.2fs\n" "Warm cache (std dev):" "$WARM_STDDEV" + +echo -e "\n${GREEN}${BOLD}Performance Improvement:${NC}\n" +printf " %-25s %10.1f%%\n" "Speed improvement:" "$IMPROVEMENT" +printf " %-25s %10.2fx\n" "Speedup ratio:" "$SPEEDUP" + +echo -e "\n${CYAN}${BOLD}Total Time Saved:${NC}\n" +printf " %-30s %10.2fs\n" "Total warm pushes time:" "$TOTAL_WARM_TIME" +printf " %-30s %10.2fs\n" "Hypothetical (no cache):" "$HYPOTHETICAL_NO_CACHE" +printf " %-30s %10.2fs (%.1fm)\n" "Time saved:" "$TIME_SAVED" "$TIME_SAVED_MINUTES" + +echo -e "\n${CYAN}${BOLD}Cache Statistics:${NC}\n" +CACHE_DIR="../.remote/cache" +if [ -d "$CACHE_DIR" ]; then + FINAL_CACHE_SIZE=$(du -sh "$CACHE_DIR" 2>/dev/null | cut -f1) + FINAL_CACHE_COUNT=$(ls -1 "$CACHE_DIR" 2>/dev/null | wc -l | tr -d ' ') + printf " %-25s %10s\n" "Cache size:" "$FINAL_CACHE_SIZE" + printf " %-25s %10s\n" "Cached repositories:" "$FINAL_CACHE_COUNT" +fi + +echo -e "\n${GREEN}${BOLD}✓ Benchmark complete!${NC}" From a42bd2ee871a148f366a37bfe5fe53bc14000f5b Mon Sep 17 00:00:00 2001 From: fabiovincenzi Date: Wed, 5 Nov 2025 11:54:55 +0100 Subject: [PATCH 26/26] refactor: use multiplier for speed improvement --- src/proxy/processors/push-action/README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/proxy/processors/push-action/README.md b/src/proxy/processors/push-action/README.md index 07ba12de7..6c3ccd07b 100644 --- a/src/proxy/processors/push-action/README.md +++ b/src/proxy/processors/push-action/README.md @@ -90,12 +90,12 @@ Real-world performance comparison using the Backstage repository (177MB cached b | Metric | Without Cache (main) | With Cache (PR) | Improvement | | ------------------- | -------------------- | --------------- | -------------------- | -| **Cold Push** | 20.63s | 17.58s | 15% faster | -| **Warm Push (avg)** | 19.88s | **6.68s** | **66% faster** | -| **Warm Push (min)** | 18.37s | 6.34s | 65% faster | -| **Warm Push (max)** | 21.22s | 7.12s | 66% faster | +| **Cold Push** | 20.63s | 17.58s | 1.2x faster | +| **Warm Push (avg)** | 19.88s | **6.68s** | **3x faster** | +| **Warm Push (min)** | 18.37s | 6.34s | 2.9x faster | +| **Warm Push (max)** | 21.22s | 7.12s | 3x faster | | **Std Deviation** | 0.99s | 0.19s | 5x more consistent | -| **Speedup Ratio** | 1.03x | **2.63x** | **155% improvement** | +| **Speedup Ratio** | 1.03x | **2.63x** | **2.6x improvement** | ### Time Saved @@ -116,13 +116,13 @@ To reproduce these results with your own repository fork: ```bash # Test with cache (this PR branch) -./cache-benchmark.sh owner/repo +./scripts/cache-benchmark.sh owner/repo ``` **Example**: ```bash -./cache-benchmark.sh yourFork/backstage main 10 +./scripts/cache-benchmark.sh yourFork/backstage main 10 ``` **Note**: Results may vary based on network conditions, GitHub server load, and repository size. The benchmark uses `depth: 1` for all git operations. You must have push access to the repository you're testing.