Adds missing server dir

This commit is contained in:
2025-07-11 09:26:17 -04:00
parent c20b9e98f8
commit 2f2f647a9e
7 changed files with 1879 additions and 0 deletions

327
server/config.test.ts Normal file
View File

@@ -0,0 +1,327 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { readFileSync } from 'fs';
import { loadConfig, validateConfig, Config } from './config';
// Mock fs module
vi.mock('fs');
const mockedReadFileSync = vi.mocked(readFileSync);
describe('Configuration Management', () => {
beforeEach(() => {
vi.clearAllMocks();
});
afterEach(() => {
// Clear environment variables
delete process.env.GITHUB_TOKEN;
delete process.env.PORT;
delete process.env.HOST;
});
describe('loadConfig', () => {
it('should load configuration from file', () => {
const mockConfig = {
github: {
token: 'test-token',
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
mockedReadFileSync.mockReturnValueOnce(JSON.stringify(mockConfig));
const result = loadConfig();
expect(result).toEqual(mockConfig);
expect(mockedReadFileSync).toHaveBeenCalledWith(
expect.stringContaining('config.json'),
'utf8'
);
});
it('should use environment variables when config file is not found', () => {
process.env.GITHUB_TOKEN = 'env-token';
process.env.PORT = '8080';
process.env.HOST = '127.0.0.1';
mockedReadFileSync.mockImplementationOnce(() => {
throw new Error('ENOENT: no such file or directory');
});
const result = loadConfig();
expect(result).toEqual({
github: {
token: 'env-token',
repositories: []
},
server: {
port: 8080,
host: '127.0.0.1'
},
cache: {
timeoutSeconds: 300
}
});
});
it('should merge file config with environment variables', () => {
process.env.GITHUB_TOKEN = 'env-token';
process.env.PORT = '8080';
const mockConfig = {
github: {
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 600
}
};
mockedReadFileSync.mockReturnValueOnce(JSON.stringify(mockConfig));
const result = loadConfig();
expect(result).toEqual({
github: {
token: 'env-token', // From environment
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 8080, // From environment
host: '0.0.0.0' // From file
},
cache: {
timeoutSeconds: 600 // From file
}
});
});
it('should use default values when neither file nor env vars are provided', () => {
mockedReadFileSync.mockImplementationOnce(() => {
throw new Error('ENOENT: no such file or directory');
});
const result = loadConfig();
expect(result).toEqual({
github: {
token: '',
repositories: []
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
});
});
it('should handle invalid JSON in config file', () => {
mockedReadFileSync.mockReturnValueOnce('invalid json');
const result = loadConfig();
// Should fallback to defaults
expect(result).toEqual({
github: {
token: '',
repositories: []
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
});
});
});
describe('validateConfig', () => {
it('should validate correct configuration', () => {
const config: Config = {
github: {
token: 'test-token',
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).not.toThrow();
});
it('should throw error when GitHub token is missing', () => {
const config: Config = {
github: {
token: '',
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).toThrow('GitHub token is required');
});
it('should throw error when no repositories are configured', () => {
const config: Config = {
github: {
token: 'test-token',
repositories: []
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).toThrow('At least one repository is required');
});
it('should throw error for invalid repository configuration', () => {
const config: Config = {
github: {
token: 'test-token',
repositories: [
{ owner: '', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).toThrow('Invalid repository configuration');
});
it('should throw error for repository missing name', () => {
const config: Config = {
github: {
token: 'test-token',
repositories: [
{ owner: 'test-owner', name: '' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).toThrow('Invalid repository configuration');
});
it('should validate multiple repositories', () => {
const config: Config = {
github: {
token: 'test-token',
repositories: [
{ owner: 'owner1', name: 'repo1' },
{ owner: 'owner2', name: 'repo2' },
{ owner: 'owner3', name: 'repo3' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
expect(() => validateConfig(config)).not.toThrow();
});
});
describe('cache configuration', () => {
it('should use default cache timeout when not specified', () => {
const mockConfig = {
github: {
token: 'test-token',
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
}
};
mockedReadFileSync.mockReturnValueOnce(JSON.stringify(mockConfig));
const result = loadConfig();
expect(result.cache?.timeoutSeconds).toBe(300);
});
it('should use custom cache timeout when specified', () => {
const mockConfig = {
github: {
token: 'test-token',
repositories: [
{ owner: 'test-owner', name: 'test-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 600
}
};
mockedReadFileSync.mockReturnValueOnce(JSON.stringify(mockConfig));
const result = loadConfig();
expect(result.cache?.timeoutSeconds).toBe(600);
});
});
});

173
server/config.ts Normal file
View File

@@ -0,0 +1,173 @@
import { readFileSync } from 'fs';
import { join } from 'path';
import chokidar from 'chokidar';
import { EventEmitter } from 'events';
export interface Repository {
owner: string;
name: string;
token?: string;
}
export interface Config {
github: {
token: string;
repositories: Repository[];
};
server: {
port: number;
host: string;
};
cache?: {
timeoutSeconds?: number;
};
}
const defaultConfig: Config = {
github: {
token: process.env.GITHUB_TOKEN || '',
repositories: []
},
server: {
port: parseInt(process.env.PORT || '3001'),
host: process.env.HOST || '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
export function loadConfig(): Config {
const configPath = join(process.cwd(), 'config.json');
try {
const configFile = readFileSync(configPath, 'utf8');
const fileConfig = JSON.parse(configFile);
return {
github: {
token: fileConfig.github?.token || process.env.GITHUB_TOKEN || '',
repositories: fileConfig.github?.repositories || []
},
server: {
port: fileConfig.server?.port || parseInt(process.env.PORT || '3001'),
host: fileConfig.server?.host || process.env.HOST || '0.0.0.0'
},
cache: {
timeoutSeconds: fileConfig.cache?.timeoutSeconds || 300
}
};
} catch (error) {
console.log('Config file not found, using environment variables and defaults');
return defaultConfig;
}
}
// Legacy function for backward compatibility
export function createConfigWatcher(): ConfigWatcher {
return new ConfigWatcher();
}
export function validateConfig(config: Config): void {
if (!config.github.token) {
throw new Error('GitHub token is required. Set GITHUB_TOKEN environment variable or add it to config.json');
}
if (!config.github.repositories.length) {
throw new Error('At least one repository is required in config.json');
}
for (const repo of config.github.repositories) {
if (!repo.owner || !repo.name) {
throw new Error(`Invalid repository configuration: ${JSON.stringify(repo)}`);
}
}
}
export class ConfigWatcher extends EventEmitter {
private config: Config;
private configPath: string;
private watcher?: chokidar.FSWatcher;
constructor() {
super();
this.configPath = join(process.cwd(), 'config.json');
this.config = this.loadConfigSync();
this.startWatching();
}
private loadConfigSync(): Config {
try {
const configFile = readFileSync(this.configPath, 'utf8');
const fileConfig = JSON.parse(configFile);
const config = {
github: {
token: fileConfig.github?.token || process.env.GITHUB_TOKEN || '',
repositories: fileConfig.github?.repositories || []
},
server: {
port: fileConfig.server?.port || parseInt(process.env.PORT || '3001'),
host: fileConfig.server?.host || process.env.HOST || '0.0.0.0'
},
cache: {
timeoutSeconds: fileConfig.cache?.timeoutSeconds || 300
}
};
validateConfig(config);
return config;
} catch (error) {
console.log('Config file not found or invalid, using environment variables and defaults');
const config = {
github: {
token: process.env.GITHUB_TOKEN || '',
repositories: []
},
server: {
port: parseInt(process.env.PORT || '3001'),
host: process.env.HOST || '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
return config;
}
}
private startWatching(): void {
this.watcher = chokidar.watch(this.configPath, {
ignored: /(^|[/\\])\../, // ignore dotfiles
persistent: true
});
this.watcher.on('change', () => {
console.log('📁 Config file changed, reloading...');
try {
const newConfig = this.loadConfigSync();
this.config = newConfig;
this.emit('configChanged', newConfig);
console.log('✅ Config reloaded successfully');
console.log(`📊 Now monitoring ${newConfig.github.repositories.length} repositories`);
} catch (error) {
console.error('❌ Failed to reload config:', error);
this.emit('configError', error);
}
});
this.watcher.on('error', (error) => {
console.error('❌ Config watcher error:', error);
});
}
public getConfig(): Config {
return this.config;
}
public close(): void {
if (this.watcher) {
this.watcher.close();
}
}
}

View File

@@ -0,0 +1,344 @@
import { describe, it, expect } from 'vitest';
// Mock Express request object
interface MockRequest {
path: string;
url: string;
ip: string;
headers: Record<string, string>;
}
interface MockResponse {
statusCode: number;
headers: Record<string, string>;
body: string;
status: (code: number) => MockResponse;
json: (data: any) => MockResponse;
setHeader: (name: string, value: string) => void;
end: (data: string) => void;
}
// Simulate the security middleware logic
function createSecurityMiddleware() {
const sensitiveFiles = [
'/config.json',
'/config.example.json',
'/.env',
'/package.json',
'/package-lock.json',
'/tsconfig.json',
'/server/',
'/.git/',
'/node_modules/',
'/dist/',
'/build/',
'/.vscode/',
'/.idea/',
'/README.md',
'/CLAUDE.md'
];
return (req: MockRequest, res: MockResponse, next: () => void) => {
const normalizedPath = req.path.toLowerCase();
// Check if the request is for a sensitive file or directory
const isSensitiveFile = sensitiveFiles.some(sensitiveFile =>
normalizedPath === sensitiveFile.toLowerCase() ||
normalizedPath.startsWith(sensitiveFile.toLowerCase())
);
if (isSensitiveFile) {
console.warn(`🚫 Blocked access to sensitive file: ${req.path} from ${req.ip}`);
res.statusCode = 403;
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify({
error: 'Access denied',
message: 'This resource is not available'
}));
return;
}
next();
};
}
// Create mock response object
function createMockResponse(): MockResponse {
const response: MockResponse = {
statusCode: 200,
headers: {},
body: '',
status: (code: number) => {
response.statusCode = code;
return response;
},
json: (data: any) => {
response.body = JSON.stringify(data);
return response;
},
setHeader: (name: string, value: string) => {
response.headers[name] = value;
},
end: (data: string) => {
response.body = data;
}
};
return response;
}
describe('File Security Middleware', () => {
const securityMiddleware = createSecurityMiddleware();
describe('Sensitive File Protection', () => {
it('should block access to config.json', () => {
const req: MockRequest = {
path: '/config.json',
url: '/config.json',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(res.headers['Content-Type']).toBe('application/json');
expect(res.body).toContain('Access denied');
expect(next).not.toHaveBeenCalled();
});
it('should block access to config.example.json', () => {
const req: MockRequest = {
path: '/config.example.json',
url: '/config.example.json',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to .env files', () => {
const req: MockRequest = {
path: '/.env',
url: '/.env',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to package.json', () => {
const req: MockRequest = {
path: '/package.json',
url: '/package.json',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to server directory', () => {
const req: MockRequest = {
path: '/server/index.ts',
url: '/server/index.ts',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to git directory', () => {
const req: MockRequest = {
path: '/.git/config',
url: '/.git/config',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to node_modules', () => {
const req: MockRequest = {
path: '/node_modules/package/index.js',
url: '/node_modules/package/index.js',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to README.md', () => {
const req: MockRequest = {
path: '/README.md',
url: '/README.md',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should block access to CLAUDE.md', () => {
const req: MockRequest = {
path: '/CLAUDE.md',
url: '/CLAUDE.md',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
it('should be case insensitive', () => {
const req: MockRequest = {
path: '/CONFIG.JSON',
url: '/CONFIG.JSON',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(403);
expect(next).not.toHaveBeenCalled();
});
});
describe('Allowed File Access', () => {
it('should allow access to API endpoints', () => {
const req: MockRequest = {
path: '/api/workflow-runs',
url: '/api/workflow-runs',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(200);
expect(next).toHaveBeenCalled();
});
it('should allow access to static assets', () => {
const req: MockRequest = {
path: '/assets/main.js',
url: '/assets/main.js',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(200);
expect(next).toHaveBeenCalled();
});
it('should allow access to root path', () => {
const req: MockRequest = {
path: '/',
url: '/',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(200);
expect(next).toHaveBeenCalled();
});
it('should allow access to legitimate files', () => {
const req: MockRequest = {
path: '/favicon.ico',
url: '/favicon.ico',
ip: '127.0.0.1',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(res.statusCode).toBe(200);
expect(next).toHaveBeenCalled();
});
});
describe('Security Logging', () => {
it('should log blocked access attempts', () => {
const consoleSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
const req: MockRequest = {
path: '/config.json',
url: '/config.json',
ip: '192.168.1.100',
headers: {}
};
const res = createMockResponse();
const next = vi.fn();
securityMiddleware(req, res, next);
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('🚫 Blocked access to sensitive file: /config.json from 192.168.1.100')
);
consoleSpy.mockRestore();
});
});
});

275
server/github.test.ts Normal file
View File

@@ -0,0 +1,275 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import axios from 'axios';
import { GitHubService } from './github';
import { Repository } from './config';
// Mock axios
vi.mock('axios');
const mockedAxios = vi.mocked(axios);
describe('GitHubService', () => {
let githubService: GitHubService;
const mockToken = 'test-token';
const mockRepository: Repository = {
owner: 'test-owner',
name: 'test-repo'
};
beforeEach(() => {
githubService = new GitHubService(mockToken, 300); // 300 seconds cache timeout
vi.clearAllMocks();
vi.useFakeTimers();
});
describe('constructor', () => {
it('should initialize with correct cache timeout in seconds', () => {
const service = new GitHubService('token', 600);
expect(service.getCacheTimeout()).toBe(600);
});
it('should use default cache timeout when not specified', () => {
const service = new GitHubService('token');
expect(service.getCacheTimeout()).toBe(300);
});
});
describe('getWorkflowRuns', () => {
it('should fetch workflow runs successfully', async () => {
const mockResponse = {
data: {
workflow_runs: [
{
id: 1,
name: 'Test Workflow',
display_title: 'Test Run',
status: 'completed',
conclusion: 'success',
created_at: '2024-01-01T10:00:00Z',
repository: {
id: 1,
name: 'test-repo',
full_name: 'test-owner/test-repo',
owner: {
login: 'test-owner',
avatar_url: 'https://github.com/test-owner.png'
}
},
actor: {
login: 'test-actor',
avatar_url: 'https://github.com/test-actor.png'
}
}
]
},
headers: {
'x-ratelimit-limit': '5000',
'x-ratelimit-remaining': '4999',
'x-ratelimit-reset': '1640995200',
'x-ratelimit-used': '1'
}
};
mockedAxios.get.mockResolvedValueOnce(mockResponse);
const result = await githubService.getWorkflowRuns(mockRepository, 1);
expect(mockedAxios.get).toHaveBeenCalledWith(
'https://api.github.com/repos/test-owner/test-repo/actions/runs',
{
headers: {
'Authorization': 'token test-token',
'Accept': 'application/vnd.github.v3+json',
'X-GitHub-Api-Version': '2022-11-28'
},
params: {
per_page: 1,
page: 1
}
}
);
expect(result).toEqual(mockResponse.data.workflow_runs);
});
it('should handle API errors gracefully', async () => {
mockedAxios.get.mockRejectedValueOnce(new Error('API Error'));
const result = await githubService.getWorkflowRuns(mockRepository);
expect(result).toEqual([]);
});
it('should handle rate limit exceeded', async () => {
mockedAxios.get.mockRejectedValueOnce({
response: {
status: 403,
headers: {
'x-ratelimit-remaining': '0'
}
}
});
const result = await githubService.getWorkflowRuns(mockRepository);
expect(result).toEqual([]);
});
});
describe('caching', () => {
it('should cache responses', async () => {
const mockResponse = {
data: {
workflow_runs: [{ id: 1, name: 'Test' }]
},
headers: {
'x-ratelimit-remaining': '4999'
}
};
mockedAxios.get.mockResolvedValueOnce(mockResponse);
// First call
const result1 = await githubService.getWorkflowRuns(mockRepository, 1);
// Second call should use cache
const result2 = await githubService.getWorkflowRuns(mockRepository, 1);
expect(mockedAxios.get).toHaveBeenCalledTimes(1);
expect(result1).toEqual(result2);
});
it('should expire cache after TTL', async () => {
const mockResponse = {
data: {
workflow_runs: [{ id: 1, name: 'Test' }]
},
headers: {
'x-ratelimit-remaining': '4999'
}
};
mockedAxios.get.mockResolvedValue(mockResponse);
// First call
await githubService.getWorkflowRuns(mockRepository, 1);
// Advance time beyond cache TTL (300 seconds)
vi.advanceTimersByTime(301 * 1000);
// Second call should make new request
await githubService.getWorkflowRuns(mockRepository, 1);
expect(mockedAxios.get).toHaveBeenCalledTimes(2);
});
it('should clear cache', async () => {
const mockResponse = {
data: { workflow_runs: [] },
headers: { 'x-ratelimit-remaining': '4999' }
};
mockedAxios.get.mockResolvedValue(mockResponse);
await githubService.getWorkflowRuns(mockRepository, 1);
const statsBefore = githubService.getCacheStats();
expect(statsBefore.size).toBeGreaterThan(0);
githubService.clearCache();
const statsAfter = githubService.getCacheStats();
expect(statsAfter.size).toBe(0);
});
});
describe('getLatestMainBranchRuns', () => {
it('should fetch runs from multiple repositories in parallel', async () => {
const repositories = [
{ owner: 'owner1', name: 'repo1' },
{ owner: 'owner2', name: 'repo2' }
];
const mockResponse = {
data: {
workflow_runs: [
{
id: 1,
name: 'Test',
repository: {
full_name: 'owner1/repo1',
owner: { login: 'owner1' }
}
}
]
},
headers: { 'x-ratelimit-remaining': '4999' }
};
mockedAxios.get.mockResolvedValue(mockResponse);
const result = await githubService.getLatestMainBranchRuns(repositories);
expect(mockedAxios.get).toHaveBeenCalledTimes(2);
expect(result).toHaveLength(2);
});
it('should handle repositories with no runs', async () => {
const repositories = [
{ owner: 'owner1', name: 'repo1' }
];
const mockResponse = {
data: { workflow_runs: [] },
headers: { 'x-ratelimit-remaining': '4999' }
};
mockedAxios.get.mockResolvedValue(mockResponse);
const result = await githubService.getLatestMainBranchRuns(repositories);
expect(result).toEqual([]);
});
});
describe('rate limit handling', () => {
it('should update rate limit info from response headers', async () => {
const mockResponse = {
data: { workflow_runs: [] },
headers: {
'x-ratelimit-limit': '5000',
'x-ratelimit-remaining': '4000',
'x-ratelimit-reset': '1640995200',
'x-ratelimit-used': '1000'
}
};
mockedAxios.get.mockResolvedValueOnce(mockResponse);
await githubService.getWorkflowRuns(mockRepository);
const rateLimitInfo = githubService.getRateLimitInfo();
expect(rateLimitInfo.limit).toBe(5000);
expect(rateLimitInfo.remaining).toBe(4000);
expect(rateLimitInfo.used).toBe(1000);
});
});
describe('updateToken', () => {
it('should update token without losing cache', () => {
const newToken = 'new-token';
const initialCacheSize = githubService.getCacheStats().size;
githubService.updateToken(newToken);
const finalCacheSize = githubService.getCacheStats().size;
expect(finalCacheSize).toBe(initialCacheSize);
});
});
describe('getCacheTimeout', () => {
it('should return cache timeout in seconds', () => {
const service = new GitHubService('token', 600);
expect(service.getCacheTimeout()).toBe(600);
});
});
});

336
server/github.ts Normal file
View File

@@ -0,0 +1,336 @@
import axios from 'axios';
import { Repository } from './config';
interface CacheEntry<T> {
data: T;
timestamp: number;
ttl: number;
}
interface RateLimitInfo {
limit: number;
remaining: number;
resetTime: number;
used: number;
}
class RequestQueue {
private activeRequests = 0;
private requestTimes: number[] = [];
private maxConcurrent = 10; // Maximum concurrent requests
private maxRequestsPerSecond = 10; // GitHub allows up to 5000/hour, so 10/second is very conservative
async add<T>(request: () => Promise<T>): Promise<T> {
// Wait if we're at max concurrent requests
while (this.activeRequests >= this.maxConcurrent) {
console.log(`⏳ Waiting for concurrent request slot (${this.activeRequests}/${this.maxConcurrent})`);
await new Promise(resolve => setTimeout(resolve, 50));
}
// Clean up old request times (older than 1 second)
const now = Date.now();
this.requestTimes = this.requestTimes.filter(time => now - time < 1000);
// Wait if we're hitting rate limit
if (this.requestTimes.length >= this.maxRequestsPerSecond) {
const waitTime = 1000 - (now - this.requestTimes[0]);
console.log(`⏳ Rate limiting: waiting ${waitTime}ms (${this.requestTimes.length}/${this.maxRequestsPerSecond} requests/sec)`);
await new Promise(resolve => setTimeout(resolve, waitTime));
}
this.activeRequests++;
this.requestTimes.push(now);
console.log(`🔄 Starting request (${this.activeRequests}/${this.maxConcurrent} active, ${this.requestTimes.length}/sec)`);
try {
const result = await request();
console.log(`✅ Request completed (${this.activeRequests - 1}/${this.maxConcurrent} remaining)`);
return result;
} catch (error) {
console.error('Queue request failed:', error);
throw error;
} finally {
this.activeRequests--;
}
}
}
export interface WorkflowRun {
id: number;
name: string;
display_title: string;
status: 'queued' | 'in_progress' | 'completed';
conclusion: 'success' | 'failure' | 'neutral' | 'cancelled' | 'skipped' | 'timed_out' | 'action_required' | null;
workflow_id: number;
head_branch: string;
head_sha: string;
run_number: number;
event: string;
created_at: string;
updated_at: string;
html_url: string;
repository: {
id: number;
name: string;
full_name: string;
owner: {
login: string;
avatar_url: string;
};
};
head_commit: {
id: string;
message: string;
author: {
name: string;
email: string;
};
};
actor: {
login: string;
avatar_url: string;
};
}
const GITHUB_API_BASE = 'https://api.github.com';
export class GitHubService {
private token: string;
private cache = new Map<string, CacheEntry<any>>();
private rateLimitInfo: RateLimitInfo = {
limit: 5000,
remaining: 5000,
resetTime: Date.now() + (60 * 60 * 1000), // 1 hour from now
used: 0
};
private requestQueue = new RequestQueue();
private readonly DEFAULT_TTL: number;
private readonly WORKFLOW_RUNS_TTL: number;
constructor(token: string, cacheTimeoutSeconds: number = 300) {
this.token = token;
this.DEFAULT_TTL = cacheTimeoutSeconds * 1000;
this.WORKFLOW_RUNS_TTL = cacheTimeoutSeconds * 1000;
console.log(`🚀 GitHubService initialized with caching (${cacheTimeoutSeconds}s) and rate limiting`);
}
private getHeaders() {
return {
'Authorization': `token ${this.token}`,
'Accept': 'application/vnd.github.v3+json',
'X-GitHub-Api-Version': '2022-11-28'
};
}
private getCacheKey(endpoint: string, params?: any): string {
// Extract repository info from endpoint for better cache key readability
const repoMatch = endpoint.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
// Create a sorted, clean parameter string
const cleanParams = params ? Object.keys(params).sort().reduce((obj: any, key) => {
obj[key] = params[key];
return obj;
}, {}) : {};
const cacheKey = `${repoInfo}:${endpoint.split('/').pop()}:${JSON.stringify(cleanParams)}`;
console.log(`🔑 Generated cache key: ${cacheKey} for ${endpoint}`);
return cacheKey;
}
private isCacheValid<T>(entry: CacheEntry<T>): boolean {
return Date.now() - entry.timestamp < entry.ttl;
}
private setCache<T>(key: string, data: T, ttl: number = this.DEFAULT_TTL): void {
this.cache.set(key, {
data,
timestamp: Date.now(),
ttl
});
console.log(`💾 SET Cache: ${key} (TTL: ${ttl}ms, Size: ${this.cache.size})`);
}
private getCache<T>(key: string): T | null {
const entry = this.cache.get(key);
if (!entry) {
console.log(`💾 GET Cache: ${key} - NOT FOUND`);
return null;
}
if (!this.isCacheValid(entry)) {
console.log(`💾 GET Cache: ${key} - EXPIRED (age: ${Date.now() - entry.timestamp}ms, ttl: ${entry.ttl}ms)`);
this.cache.delete(key);
return null;
}
console.log(`💾 GET Cache: ${key} - HIT (age: ${Date.now() - entry.timestamp}ms, ttl: ${entry.ttl}ms)`);
return entry.data;
}
private updateRateLimitInfo(headers: any): void {
if (headers['x-ratelimit-limit']) {
this.rateLimitInfo.limit = parseInt(headers['x-ratelimit-limit']);
}
if (headers['x-ratelimit-remaining']) {
this.rateLimitInfo.remaining = parseInt(headers['x-ratelimit-remaining']);
}
if (headers['x-ratelimit-reset']) {
this.rateLimitInfo.resetTime = parseInt(headers['x-ratelimit-reset']) * 1000;
}
if (headers['x-ratelimit-used']) {
this.rateLimitInfo.used = parseInt(headers['x-ratelimit-used']);
}
}
private async shouldWaitForRateLimit(): Promise<void> {
if (this.rateLimitInfo.remaining <= 10) {
const waitTime = Math.max(0, this.rateLimitInfo.resetTime - Date.now());
if (waitTime > 0) {
console.log(`⏳ Rate limit nearly exceeded. Waiting ${Math.ceil(waitTime / 1000)} seconds...`);
await new Promise(resolve => setTimeout(resolve, waitTime));
}
}
}
private async makeRequest<T>(url: string, params?: any, ttl: number = this.DEFAULT_TTL): Promise<T> {
const cacheKey = this.getCacheKey(url, params);
// Check cache first
const cached = this.getCache<T>(cacheKey);
if (cached) {
const repoMatch = url.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
console.log(`💾 Cache HIT: ${repoInfo} - ${url.split('/').pop()}`);
return cached;
}
const repoMatch = url.match(/\/repos\/([^/]+\/[^/]+)\//);
const repoInfo = repoMatch ? repoMatch[1] : 'unknown';
console.log(`🌐 Cache MISS: ${repoInfo} - ${url.split('/').pop()} - Making API request`);
// Check rate limit
await this.shouldWaitForRateLimit();
// Make the request through the queue
return this.requestQueue.add(async () => {
try {
const response = await axios.get(url, {
headers: this.getHeaders(),
params
});
// Update rate limit info
this.updateRateLimitInfo(response.headers);
console.log(`📊 API Rate Limit: ${this.rateLimitInfo.remaining}/${this.rateLimitInfo.limit} remaining`);
// Cache the response
this.setCache(cacheKey, response.data, ttl);
console.log(`💾 Cached response for ${Math.round(ttl/1000)}s`);
return response.data;
} catch (error: any) {
if (error.response?.status === 403 && error.response?.headers['x-ratelimit-remaining'] === '0') {
console.error('🚫 GitHub API rate limit exceeded');
throw new Error('GitHub API rate limit exceeded. Please wait before making more requests.');
}
throw error;
}
});
}
async getWorkflowRuns(repository: Repository, per_page = 1, branch?: string): Promise<WorkflowRun[]> {
console.log(`📊 getWorkflowRuns called for ${repository.owner}/${repository.name} (per_page: ${per_page}, branch: ${branch})`);
try {
const params: any = {
per_page,
page: 1
};
if (branch) {
params.branch = branch;
}
const url = `${GITHUB_API_BASE}/repos/${repository.owner}/${repository.name}/actions/runs`;
const response = await this.makeRequest<any>(url, params, this.WORKFLOW_RUNS_TTL);
console.log(`✅ getWorkflowRuns completed for ${repository.owner}/${repository.name} - ${response.workflow_runs.length} runs`);
return response.workflow_runs;
} catch (error) {
console.error(`Error fetching workflow runs for ${repository.owner}/${repository.name}:`, error);
return [];
}
}
async getLatestMainBranchRuns(repositories: Repository[]): Promise<WorkflowRun[]> {
console.log(`🚀 getLatestMainBranchRuns called for ${repositories.length} repositories`);
// Create a cache key for the aggregate request
const repoListHash = repositories.map(r => `${r.owner}/${r.name}`).sort().join(',');
const aggregateCacheKey = `aggregate:latest-main-runs:${repoListHash}`;
console.log(`🔑 Aggregate cache key: ${aggregateCacheKey}`);
// Check if we have a cached result for this exact set of repositories
const cached = this.getCache<WorkflowRun[]>(aggregateCacheKey);
if (cached) {
console.log(`💾 Cache HIT: Aggregate latest runs for ${repositories.length} repositories`);
return cached;
}
console.log(`🌐 Cache MISS: Aggregate latest runs for ${repositories.length} repositories - Fetching individual repos in parallel`);
// Process repositories in parallel - the individual cache and request queue will handle rate limiting
const promises = repositories.map(async (repo, index) => {
console.log(`📊 Starting repo ${index + 1}/${repositories.length}: ${repo.owner}/${repo.name}`);
const runs = await this.getWorkflowRuns(repo, 1, 'main');
if (runs.length > 0) {
console.log(`✅ Completed run for ${repo.owner}/${repo.name}`);
return runs[0];
} else {
console.log(`⚠️ No runs found for ${repo.owner}/${repo.name}`);
return null;
}
});
// Wait for all parallel requests to complete
const results = await Promise.all(promises);
const filteredResults = results.filter((run): run is WorkflowRun => run !== null);
// Cache the aggregate result
this.setCache(aggregateCacheKey, filteredResults, this.WORKFLOW_RUNS_TTL);
console.log(`💾 Cached aggregate result for ${repositories.length} repositories (${Math.round(this.WORKFLOW_RUNS_TTL/1000)}s)`);
return filteredResults;
}
async getRepositoryWorkflowRuns(repository: Repository, limit = 10): Promise<WorkflowRun[]> {
return this.getWorkflowRuns(repository, limit);
}
// Add method to get current rate limit status
getRateLimitInfo(): RateLimitInfo {
return { ...this.rateLimitInfo };
}
// Add method to clear cache if needed
clearCache(): void {
this.cache.clear();
console.log('🗑️ Cache cleared');
}
// Add method to get cache stats
getCacheStats(): { size: number; entries: string[] } {
return {
size: this.cache.size,
entries: Array.from(this.cache.keys())
};
}
// Add method to update token without losing cache
updateToken(newToken: string): void {
this.token = newToken;
console.log('🔧 GitHub token updated, cache preserved');
}
// Add method to get current cache timeout
getCacheTimeout(): number {
return this.DEFAULT_TTL / 1000; // Convert back to seconds
}
}

226
server/index.ts Normal file
View File

@@ -0,0 +1,226 @@
import express from 'express';
import cors from 'cors';
import { ConfigWatcher } from './config';
import { GitHubService } from './github';
const app = express();
// Security middleware
app.use((req, res, next) => {
// Remove server information from headers
res.removeHeader('X-Powered-By');
// Add security headers
res.setHeader('X-Content-Type-Options', 'nosniff');
res.setHeader('X-Frame-Options', 'DENY');
res.setHeader('X-XSS-Protection', '1; mode=block');
next();
});
// Block access to sensitive files
app.use((req, res, next) => {
const sensitiveFiles = [
'/config.json',
'/config.example.json',
'/.env',
'/package.json',
'/package-lock.json',
'/tsconfig.json',
'/server/',
'/.git/',
'/node_modules/',
'/dist/',
'/build/',
'/.vscode/',
'/.idea/',
'/README.md',
'/CLAUDE.md'
];
const normalizedPath = req.path.toLowerCase();
// Check if the request is for a sensitive file or directory
const isSensitiveFile = sensitiveFiles.some(sensitiveFile =>
normalizedPath === sensitiveFile.toLowerCase() ||
normalizedPath.startsWith(sensitiveFile.toLowerCase())
);
if (isSensitiveFile) {
console.warn(`🚫 Blocked access to sensitive file: ${req.path} from ${req.ip}`);
return res.status(403).json({
error: 'Access denied',
message: 'This resource is not available'
});
}
next();
});
app.use(cors());
app.use(express.json());
let configWatcher: ConfigWatcher;
let githubService: GitHubService;
try {
configWatcher = new ConfigWatcher();
const config = configWatcher.getConfig();
githubService = new GitHubService(config.github.token, config.cache?.timeoutSeconds || 300);
console.log('✅ Configuration loaded successfully');
logConfigChange(config);
// Handle config changes
configWatcher.on('configChanged', (newConfig) => {
const oldCache = githubService.getCacheStats();
const newCacheTimeout = newConfig.cache?.timeoutSeconds || 300;
// If cache timeout changed, create new service, otherwise just update token
const currentCacheTimeout = githubService.getCacheTimeout();
if (currentCacheTimeout !== newCacheTimeout) {
githubService = new GitHubService(newConfig.github.token, newCacheTimeout);
console.log(`🔄 GitHub service recreated with new cache timeout: ${newCacheTimeout} seconds`);
} else {
githubService.updateToken(newConfig.github.token);
console.log('🔄 GitHub service updated with new token');
}
console.log(`📊 Previous cache had ${oldCache.size} entries`);
logConfigChange(newConfig);
});
configWatcher.on('configError', (error) => {
console.error('⚠️ Config reload failed, continuing with previous config:', error);
});
} catch (error) {
console.error('❌ Configuration error:', error);
process.exit(1);
}
app.get('/api/health', (req, res) => {
res.json({ status: 'ok', timestamp: new Date().toISOString() });
});
// Helper function to safely filter config for public consumption
function getPublicConfig(config: any) {
// Only expose safe, non-sensitive configuration data
// Never expose tokens, API keys, or other sensitive data
return {
repositories: config.github.repositories.map((repo: any) => ({
owner: repo.owner,
name: repo.name,
full_name: `${repo.owner}/${repo.name}`
})),
// Add cache info without sensitive details
cache: {
timeoutSeconds: config.cache?.timeoutSeconds || 300
},
// Add repository count for UI
repositoryCount: config.github.repositories.length
};
}
// Helper function to safely log config changes without exposing sensitive data
function logConfigChange(config: any) {
console.log(`📊 Configuration loaded: ${config.github.repositories.length} repositories`);
console.log(`💾 Cache timeout: ${config.cache?.timeoutSeconds || 300} seconds`);
// Never log tokens or other sensitive data
}
app.get('/api/config', (req, res) => {
try {
const config = configWatcher.getConfig();
const publicConfig = getPublicConfig(config);
res.json(publicConfig);
} catch (error) {
console.error('Error fetching config:', error);
res.status(500).json({ error: 'Failed to fetch configuration' });
}
});
app.get('/api/workflow-runs', async (req, res) => {
try {
const config = configWatcher.getConfig();
const runs = await githubService.getLatestMainBranchRuns(config.github.repositories);
res.json(runs);
} catch (error) {
console.error('Error fetching workflow runs:', error);
res.status(500).json({ error: 'Failed to fetch workflow runs' });
}
});
app.get('/api/repository/:owner/:repo/workflow-runs', async (req, res) => {
try {
const { owner, repo } = req.params;
const limit = parseInt(req.query.limit as string) || 10;
const repository = { owner, name: repo };
const runs = await githubService.getRepositoryWorkflowRuns(repository, limit);
res.json(runs);
} catch (error) {
console.error(`Error fetching workflow runs for ${req.params.owner}/${req.params.repo}:`, error);
res.status(500).json({ error: 'Failed to fetch repository workflow runs' });
}
});
// New endpoints for rate limit and cache management
app.get('/api/rate-limit', (req, res) => {
try {
const rateLimitInfo = githubService.getRateLimitInfo();
res.json({
...rateLimitInfo,
resetTimeFormatted: new Date(rateLimitInfo.resetTime).toISOString(),
timeUntilReset: Math.max(0, rateLimitInfo.resetTime - Date.now())
});
} catch (error) {
console.error('Error fetching rate limit info:', error);
res.status(500).json({ error: 'Failed to fetch rate limit information' });
}
});
app.get('/api/cache/stats', (req, res) => {
try {
const cacheStats = githubService.getCacheStats();
res.json(cacheStats);
} catch (error) {
console.error('Error fetching cache stats:', error);
res.status(500).json({ error: 'Failed to fetch cache statistics' });
}
});
app.delete('/api/cache', (req, res) => {
try {
githubService.clearCache();
res.json({ message: 'Cache cleared successfully' });
} catch (error) {
console.error('Error clearing cache:', error);
res.status(500).json({ error: 'Failed to clear cache' });
}
});
const config = configWatcher.getConfig();
const port = config.server.port;
const host = config.server.host;
app.listen(port, host, () => {
console.log(`🚀 Server running at http://${host}:${port}`);
console.log(`📡 API endpoints:`);
console.log(` GET /api/health - Health check`);
console.log(` GET /api/config - Repository configuration`);
console.log(` GET /api/workflow-runs - Latest workflow runs`);
console.log(` GET /api/repository/:owner/:repo/workflow-runs - Repository workflow runs`);
console.log(` GET /api/rate-limit - GitHub API rate limit status`);
console.log(` GET /api/cache/stats - Cache statistics`);
console.log(` DELETE /api/cache - Clear cache`);
console.log(`👀 Watching config.json for changes...`);
});
// Graceful shutdown
process.on('SIGTERM', () => {
console.log('🛑 Received SIGTERM, shutting down gracefully...');
configWatcher.close();
process.exit(0);
});
process.on('SIGINT', () => {
console.log('🛑 Received SIGINT, shutting down gracefully...');
configWatcher.close();
process.exit(0);
});

198
server/security.test.ts Normal file
View File

@@ -0,0 +1,198 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { Config } from './config';
// Mock the config and server modules
const mockConfig: Config = {
github: {
token: 'ghp_super_secret_token_123',
repositories: [
{ owner: 'test-owner', name: 'test-repo' },
{ owner: 'another-owner', name: 'another-repo' }
]
},
server: {
port: 3001,
host: '0.0.0.0'
},
cache: {
timeoutSeconds: 300
}
};
// Import the getPublicConfig function (we'll need to export it for testing)
// For now, let's simulate what the public config should look like
function getPublicConfig(config: Config) {
return {
repositories: config.github.repositories.map((repo) => ({
owner: repo.owner,
name: repo.name,
full_name: `${repo.owner}/${repo.name}`
})),
cache: {
timeoutSeconds: config.cache?.timeoutSeconds || 300
},
repositoryCount: config.github.repositories.length
};
}
describe('Security Tests', () => {
describe('Config API Security', () => {
it('should not expose sensitive information in public config', () => {
const publicConfig = getPublicConfig(mockConfig);
// Ensure sensitive data is not included
expect(publicConfig).not.toHaveProperty('github');
expect(publicConfig).not.toHaveProperty('server');
expect(publicConfig).not.toHaveProperty('token');
// Ensure no token is accidentally included anywhere
const configString = JSON.stringify(publicConfig);
expect(configString).not.toContain('ghp_');
expect(configString).not.toContain('token');
expect(configString).not.toContain('secret');
});
it('should only expose safe repository information', () => {
const publicConfig = getPublicConfig(mockConfig);
expect(publicConfig.repositories).toHaveLength(2);
expect(publicConfig.repositories[0]).toEqual({
owner: 'test-owner',
name: 'test-repo',
full_name: 'test-owner/test-repo'
});
expect(publicConfig.repositories[1]).toEqual({
owner: 'another-owner',
name: 'another-repo',
full_name: 'another-owner/another-repo'
});
});
it('should expose safe cache configuration', () => {
const publicConfig = getPublicConfig(mockConfig);
expect(publicConfig.cache).toEqual({
timeoutSeconds: 300
});
});
it('should include repository count for UI', () => {
const publicConfig = getPublicConfig(mockConfig);
expect(publicConfig.repositoryCount).toBe(2);
});
it('should handle missing cache configuration safely', () => {
const configWithoutCache = {
...mockConfig,
cache: undefined
};
const publicConfig = getPublicConfig(configWithoutCache);
expect(publicConfig.cache).toEqual({
timeoutSeconds: 300
});
});
it('should never expose server configuration', () => {
const publicConfig = getPublicConfig(mockConfig);
expect(publicConfig).not.toHaveProperty('port');
expect(publicConfig).not.toHaveProperty('host');
expect(publicConfig).not.toHaveProperty('server');
});
it('should never expose GitHub token', () => {
const publicConfig = getPublicConfig(mockConfig);
// Check that token is never exposed in any form
const configString = JSON.stringify(publicConfig);
expect(configString).not.toContain('ghp_super_secret_token_123');
expect(configString).not.toContain('token');
});
it('should handle repository with potential sensitive data', () => {
const configWithSensitiveRepo = {
...mockConfig,
github: {
...mockConfig.github,
repositories: [
{
owner: 'test-owner',
name: 'test-repo',
// @ts-ignore - testing potential sensitive data
token: 'per-repo-token-123'
}
]
}
};
const publicConfig = getPublicConfig(configWithSensitiveRepo);
expect(publicConfig.repositories[0]).toEqual({
owner: 'test-owner',
name: 'test-repo',
full_name: 'test-owner/test-repo'
});
// Ensure repository-specific sensitive data is not exposed
expect(publicConfig.repositories[0]).not.toHaveProperty('token');
});
});
describe('Safe Logging', () => {
it('should log config changes without sensitive data', () => {
const consoleSpy = vi.spyOn(console, 'log');
// Simulate the logConfigChange function
function logConfigChange(config: Config) {
console.log(`📊 Configuration loaded: ${config.github.repositories.length} repositories`);
console.log(`💾 Cache timeout: ${config.cache?.timeoutSeconds || 300} seconds`);
}
logConfigChange(mockConfig);
const logCalls = consoleSpy.mock.calls.flat();
const allLogs = logCalls.join(' ');
// Ensure no sensitive data is logged
expect(allLogs).not.toContain('ghp_super_secret_token_123');
expect(allLogs).not.toContain('token');
expect(allLogs).not.toContain('3001'); // port
expect(allLogs).not.toContain('0.0.0.0'); // host
// Ensure safe data is logged
expect(allLogs).toContain('2 repositories');
expect(allLogs).toContain('300 seconds');
consoleSpy.mockRestore();
});
});
describe('Data Sanitization', () => {
it('should sanitize any potential sensitive data in repository names', () => {
const configWithSensitiveNames = {
...mockConfig,
github: {
...mockConfig.github,
repositories: [
{ owner: 'test-owner', name: 'repo-with-token-ghp123' },
{ owner: 'user', name: 'secret-project' }
]
}
};
const publicConfig = getPublicConfig(configWithSensitiveNames);
// Repository names should be preserved as-is (they're not sensitive themselves)
// but we should ensure the filtering process doesn't accidentally expose other data
expect(publicConfig.repositories[0].name).toBe('repo-with-token-ghp123');
expect(publicConfig.repositories[1].name).toBe('secret-project');
// But the actual token should never be exposed
const configString = JSON.stringify(publicConfig);
expect(configString).not.toContain('ghp_super_secret_token_123');
});
});
});