Adds missing server dir

This commit is contained in:
2025-07-11 09:26:17 -04:00
parent c20b9e98f8
commit 2f2f647a9e
7 changed files with 1879 additions and 0 deletions

336
server/github.ts Normal file
View File

@@ -0,0 +1,336 @@
import axios from 'axios';
import { Repository } from './config';
interface CacheEntry<T> {
data: T;
timestamp: number;
ttl: number;
}
interface RateLimitInfo {
limit: number;
remaining: number;
resetTime: number;
used: number;
}
class RequestQueue {
private activeRequests = 0;
private requestTimes: number[] = [];
private maxConcurrent = 10; // Maximum concurrent requests
private maxRequestsPerSecond = 10; // GitHub allows up to 5000/hour, so 10/second is very conservative
async add<T>(request: () => Promise<T>): Promise<T> {
// Wait if we're at max concurrent requests
while (this.activeRequests >= this.maxConcurrent) {
console.log(`⏳ Waiting for concurrent request slot (${this.activeRequests}/${this.maxConcurrent})`);
await new Promise(resolve => setTimeout(resolve, 50));
}
// Clean up old request times (older than 1 second)
const now = Date.now();
this.requestTimes = this.requestTimes.filter(time => now - time < 1000);
// Wait if we're hitting rate limit
if (this.requestTimes.length >= this.maxRequestsPerSecond) {
const waitTime = 1000 - (now - this.requestTimes[0]);
console.log(`⏳ Rate limiting: waiting ${waitTime}ms (${this.requestTimes.length}/${this.maxRequestsPerSecond} requests/sec)`);
await new Promise(resolve => setTimeout(resolve, waitTime));
}
this.activeRequests++;
this.requestTimes.push(now);
console.log(`🔄 Starting request (${this.activeRequests}/${this.maxConcurrent} active, ${this.requestTimes.length}/sec)`);
try {
const result = await request();
console.log(`✅ Request completed (${this.activeRequests - 1}/${this.maxConcurrent} remaining)`);
return result;
} catch (error) {
console.error('Queue request failed:', error);
throw error;
} finally {
this.activeRequests--;
}
}
}
export interface WorkflowRun {
id: number;
name: string;
display_title: string;
status: 'queued' | 'in_progress' | 'completed';
conclusion: 'success' | 'failure' | 'neutral' | 'cancelled' | 'skipped' | 'timed_out' | 'action_required' | null;
workflow_id: number;
head_branch: string;
head_sha: string;
run_number: number;
event: string;
created_at: string;
updated_at: string;
html_url: string;
repository: {
id: number;
name: string;
full_name: string;
owner: {
login: string;
avatar_url: string;
};
};
head_commit: {
id: string;
message: string;
author: {
name: string;
email: string;
};
};
actor: {
login: string;
avatar_url: string;
};
}
const GITHUB_API_BASE = 'https://api.github.com';
export class GitHubService {
private token: string;
private cache = new Map<string, CacheEntry<any>>();
private rateLimitInfo: RateLimitInfo = {
limit: 5000,
remaining: 5000,
resetTime: Date.now() + (60 * 60 * 1000), // 1 hour from now
used: 0
};
private requestQueue = new RequestQueue();
private readonly DEFAULT_TTL: number;
private readonly WORKFLOW_RUNS_TTL: number;
constructor(token: string, cacheTimeoutSeconds: number = 300) {
this.token = token;
this.DEFAULT_TTL = cacheTimeoutSeconds * 1000;
this.WORKFLOW_RUNS_TTL = cacheTimeoutSeconds * 1000;
console.log(`🚀 GitHubService initialized with caching (${cacheTimeoutSeconds}s) and rate limiting`);
}
private getHeaders() {
return {
'Authorization': `token ${this.token}`,
'Accept': 'application/vnd.github.v3+json',
'X-GitHub-Api-Version': '2022-11-28'
};
}
private getCacheKey(endpoint: string, params?: any): string {
// Extract repository info from endpoint for better cache key readability
const repoMatch = endpoint.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
// Create a sorted, clean parameter string
const cleanParams = params ? Object.keys(params).sort().reduce((obj: any, key) => {
obj[key] = params[key];
return obj;
}, {}) : {};
const cacheKey = `${repoInfo}:${endpoint.split('/').pop()}:${JSON.stringify(cleanParams)}`;
console.log(`🔑 Generated cache key: ${cacheKey} for ${endpoint}`);
return cacheKey;
}
private isCacheValid<T>(entry: CacheEntry<T>): boolean {
return Date.now() - entry.timestamp < entry.ttl;
}
private setCache<T>(key: string, data: T, ttl: number = this.DEFAULT_TTL): void {
this.cache.set(key, {
data,
timestamp: Date.now(),
ttl
});
console.log(`💾 SET Cache: ${key} (TTL: ${ttl}ms, Size: ${this.cache.size})`);
}
private getCache<T>(key: string): T | null {
const entry = this.cache.get(key);
if (!entry) {
console.log(`💾 GET Cache: ${key} - NOT FOUND`);
return null;
}
if (!this.isCacheValid(entry)) {
console.log(`💾 GET Cache: ${key} - EXPIRED (age: ${Date.now() - entry.timestamp}ms, ttl: ${entry.ttl}ms)`);
this.cache.delete(key);
return null;
}
console.log(`💾 GET Cache: ${key} - HIT (age: ${Date.now() - entry.timestamp}ms, ttl: ${entry.ttl}ms)`);
return entry.data;
}
private updateRateLimitInfo(headers: any): void {
if (headers['x-ratelimit-limit']) {
this.rateLimitInfo.limit = parseInt(headers['x-ratelimit-limit']);
}
if (headers['x-ratelimit-remaining']) {
this.rateLimitInfo.remaining = parseInt(headers['x-ratelimit-remaining']);
}
if (headers['x-ratelimit-reset']) {
this.rateLimitInfo.resetTime = parseInt(headers['x-ratelimit-reset']) * 1000;
}
if (headers['x-ratelimit-used']) {
this.rateLimitInfo.used = parseInt(headers['x-ratelimit-used']);
}
}
private async shouldWaitForRateLimit(): Promise<void> {
if (this.rateLimitInfo.remaining <= 10) {
const waitTime = Math.max(0, this.rateLimitInfo.resetTime - Date.now());
if (waitTime > 0) {
console.log(`⏳ Rate limit nearly exceeded. Waiting ${Math.ceil(waitTime / 1000)} seconds...`);
await new Promise(resolve => setTimeout(resolve, waitTime));
}
}
}
private async makeRequest<T>(url: string, params?: any, ttl: number = this.DEFAULT_TTL): Promise<T> {
const cacheKey = this.getCacheKey(url, params);
// Check cache first
const cached = this.getCache<T>(cacheKey);
if (cached) {
const repoMatch = url.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
console.log(`💾 Cache HIT: ${repoInfo} - ${url.split('/').pop()}`);
return cached;
}
const repoMatch = url.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
console.log(`🌐 Cache MISS: ${repoInfo} - ${url.split('/').pop()} - Making API request`);
// Check rate limit
await this.shouldWaitForRateLimit();
// Make the request through the queue
return this.requestQueue.add(async () => {
try {
const response = await axios.get(url, {
headers: this.getHeaders(),
params
});
// Update rate limit info
this.updateRateLimitInfo(response.headers);
console.log(`📊 API Rate Limit: ${this.rateLimitInfo.remaining}/${this.rateLimitInfo.limit} remaining`);
// Cache the response
this.setCache(cacheKey, response.data, ttl);
console.log(`💾 Cached response for ${Math.round(ttl/1000)}s`);
return response.data;
} catch (error: any) {
if (error.response?.status === 403 && error.response?.headers['x-ratelimit-remaining'] === '0') {
console.error('🚫 GitHub API rate limit exceeded');
throw new Error('GitHub API rate limit exceeded. Please wait before making more requests.');
}
throw error;
}
});
}
async getWorkflowRuns(repository: Repository, per_page = 1, branch?: string): Promise<WorkflowRun[]> {
console.log(`📊 getWorkflowRuns called for ${repository.owner}/${repository.name} (per_page: ${per_page}, branch: ${branch})`);
try {
const params: any = {
per_page,
page: 1
};
if (branch) {
params.branch = branch;
}
const url = `${GITHUB_API_BASE}/repos/${repository.owner}/${repository.name}/actions/runs`;
const response = await this.makeRequest<any>(url, params, this.WORKFLOW_RUNS_TTL);
console.log(`✅ getWorkflowRuns completed for ${repository.owner}/${repository.name} - ${response.workflow_runs.length} runs`);
return response.workflow_runs;
} catch (error) {
console.error(`Error fetching workflow runs for ${repository.owner}/${repository.name}:`, error);
return [];
}
}
async getLatestMainBranchRuns(repositories: Repository[]): Promise<WorkflowRun[]> {
console.log(`🚀 getLatestMainBranchRuns called for ${repositories.length} repositories`);
// Create a cache key for the aggregate request
const repoListHash = repositories.map(r => `${r.owner}/${r.name}`).sort().join(',');
const aggregateCacheKey = `aggregate:latest-main-runs:${repoListHash}`;
console.log(`🔑 Aggregate cache key: ${aggregateCacheKey}`);
// Check if we have a cached result for this exact set of repositories
const cached = this.getCache<WorkflowRun[]>(aggregateCacheKey);
if (cached) {
console.log(`💾 Cache HIT: Aggregate latest runs for ${repositories.length} repositories`);
return cached;
}
console.log(`🌐 Cache MISS: Aggregate latest runs for ${repositories.length} repositories - Fetching individual repos in parallel`);
// Process repositories in parallel - the individual cache and request queue will handle rate limiting
const promises = repositories.map(async (repo, index) => {
console.log(`📊 Starting repo ${index + 1}/${repositories.length}: ${repo.owner}/${repo.name}`);
const runs = await this.getWorkflowRuns(repo, 1, 'main');
if (runs.length > 0) {
console.log(`✅ Completed run for ${repo.owner}/${repo.name}`);
return runs[0];
} else {
console.log(`⚠️ No runs found for ${repo.owner}/${repo.name}`);
return null;
}
});
// Wait for all parallel requests to complete
const results = await Promise.all(promises);
const filteredResults = results.filter((run): run is WorkflowRun => run !== null);
// Cache the aggregate result
this.setCache(aggregateCacheKey, filteredResults, this.WORKFLOW_RUNS_TTL);
console.log(`💾 Cached aggregate result for ${repositories.length} repositories (${Math.round(this.WORKFLOW_RUNS_TTL/1000)}s)`);
return filteredResults;
}
async getRepositoryWorkflowRuns(repository: Repository, limit = 10): Promise<WorkflowRun[]> {
return this.getWorkflowRuns(repository, limit);
}
// Add method to get current rate limit status
getRateLimitInfo(): RateLimitInfo {
return { ...this.rateLimitInfo };
}
// Add method to clear cache if needed
clearCache(): void {
this.cache.clear();
console.log('🗑️ Cache cleared');
}
// Add method to get cache stats
getCacheStats(): { size: number; entries: string[] } {
return {
size: this.cache.size,
entries: Array.from(this.cache.keys())
};
}
// Add method to update token without losing cache
updateToken(newToken: string): void {
this.token = newToken;
console.log('🔧 GitHub token updated, cache preserved');
}
// Add method to get current cache timeout
getCacheTimeout(): number {
return this.DEFAULT_TTL / 1000; // Convert back to seconds
}
}