diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 000000000..09d13e222 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,63 @@ +{ + "parser": "@typescript-eslint/parser", + "env": { + "node": true, + "browser": true, + "commonjs": true, + "es2021": true, + "mocha": true + }, + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "plugin:react/recommended", + "google", + "prettier", + "plugin:json/recommended" + ], + "overrides": [ + { + "files": ["test/**/*.js", "**/*.json", "cypress/**/*.js", "plugins/**/*.js"], + "parserOptions": { + "project": null + }, + "parser": "espree", + "env": { + "cypress/globals": true + }, + "plugins": ["cypress"], + "rules": { + "@typescript-eslint/no-unused-expressions": "off" + } + } + ], + "parserOptions": { + "project": "./tsconfig.json", + "requireConfigFile": false, + "ecmaVersion": 12, + "sourceType": "module", + "ecmaFeatures": { + "jsx": true, + "modules": true + }, + "babelOptions": { + "presets": ["@babel/preset-react"] + } + }, + "plugins": ["@typescript-eslint", "react", "prettier"], + "rules": { + "react/prop-types": "off", + "require-jsdoc": "off", + "no-async-promise-executor": "off", + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/no-unused-vars": "off", + "@typescript-eslint/no-require-imports": "off", + "@typescript-eslint/no-unused-expressions": "off" + }, + "settings": { + "react": { + "version": "detect" + } + }, + "ignorePatterns": ["src/config/generated/config.ts"] +} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..6ba80af95 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,60 @@ +# --- +# name: Release + +# on: +# workflow_dispatch: +# push: +# branches: +# - main +# paths: +# - 'src/**' +# - 'test/**' +# - 'scripts/**' +# - 'public/**' +# - 'packages/**' +# - 'package.json' +# - 'package-lock.json' + +# permissions: +# contents: read + +# jobs: +# create_github_release: +# outputs: +# full-tag: ${{ steps.release-drafter.outputs.tag_name }} +# short-tag: ${{ steps.get_tag_name.outputs.SHORT_TAG }} +# body: ${{ steps.release-drafter.outputs.body }} +# runs-on: ubuntu-latest +# permissions: +# contents: write +# pull-requests: read +# steps: +# - uses: release-drafter/release-drafter@v6 +# id: release-drafter +# env: +# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} +# with: +# publish: true +# - name: Get the short tag +# id: get_tag_name +# run: | +# short_tag=$(echo ${{ steps.release-drafter.outputs.tag_name }} | cut -d. -f1) +# echo "SHORT_TAG=$short_tag" >> $GITHUB_OUTPUT +# create_npm_release: +# needs: create_github_release +# runs-on: ubuntu-latest +# permissions: +# packages: write +# env: +# REGISTRY: ghcr.io +# IMAGE_NAME: ${{ github.repository }} +# steps: +# - uses: actions/checkout@8459bc0 # v4 +# - uses: actions/setup-node@c2ac33f # v4, Setup .npmrc file to publish to npm +# with: +# node-version: '20.x' +# registry-url: 'https://registry.npmjs.org' +# - run: npm ci +# - run: npm publish --access=public +# env: +# NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/cypress/e2e/tagPush.cy.js b/cypress/e2e/tagPush.cy.js new file mode 100644 index 000000000..80c432891 --- /dev/null +++ b/cypress/e2e/tagPush.cy.js @@ -0,0 +1,127 @@ +describe('Tag Push Functionality', () => { + beforeEach(() => { + cy.login('admin', 'admin'); + cy.on('uncaught:exception', () => false); + + // Create test data for tag pushes + cy.createTestTagPush(); + }); + + describe('Tag Push Display in PushesTable', () => { + it('can navigate to push dashboard and view push table', () => { + cy.visit('/dashboard/push'); + + // Wait for API call to complete + cy.wait('@getPushes'); + + // Check that we can see the basic table structure + cy.get('table', { timeout: 10000 }).should('exist'); + cy.get('thead').should('exist'); + cy.get('tbody').should('exist'); + + // Now we should have test data, so we can check for rows + cy.get('tbody tr').should('have.length.at.least', 1); + + // Check the structure of the first row + cy.get('tbody tr') + .first() + .within(() => { + cy.get('td').should('have.length.at.least', 6); // We know there are multiple columns + // Check for tag-specific content + cy.contains('v1.0.0').should('exist'); // Tag name + cy.contains('test-tagger').should('exist'); // Tagger + }); + }); + + it('has search functionality', () => { + cy.visit('/dashboard/push'); + cy.wait('@getPushes'); + + // Check search input exists + cy.get('input[type="text"]').first().should('exist'); + + // Test searching for tag name + cy.get('input[type="text"]').first().type('v1.0.0'); + cy.get('tbody tr').should('have.length.at.least', 1); + }); + + it('can interact with push table entries', () => { + cy.visit('/dashboard/push'); + cy.wait('@getPushes'); + + cy.get('tbody tr').should('have.length.at.least', 1); + + // Check for clickable elements in the first row + cy.get('tbody tr') + .first() + .within(() => { + // Should have links and buttons + cy.get('a').should('have.length.at.least', 1); // Repository links, etc. + cy.get('button').should('have.length.at.least', 1); // Action button + }); + }); + }); + + describe('Tag Push Details Page', () => { + it('can access push details page structure', () => { + // Try to access a push details page directly + cy.visit('/dashboard/push/test-push-id', { failOnStatusCode: false }); + + // Check basic page structure exists (regardless of whether push exists) + cy.get('body').should('exist'); // Basic content check + + // If we end up redirected, that's also acceptable behavior + cy.url().should('include', '/dashboard'); + }); + }); + + describe('Basic UI Navigation', () => { + it('can navigate between dashboard pages', () => { + cy.visit('/dashboard/push'); + cy.wait('@getPushes'); + cy.get('table', { timeout: 10000 }).should('exist'); + + // Test navigation to repo dashboard + cy.visit('/dashboard/repo'); + cy.get('table', { timeout: 10000 }).should('exist'); + + // Test navigation to user management if it exists + cy.visit('/dashboard/user'); + cy.get('body').should('exist'); + }); + }); + + describe('Application Robustness', () => { + it('handles navigation to non-existent push gracefully', () => { + // Try to visit a non-existent push detail page + cy.visit('/dashboard/push/non-existent-push-id', { failOnStatusCode: false }); + + // Should either redirect or show error page, but not crash + cy.get('body').should('exist'); + }); + + it('maintains functionality after page refresh', () => { + cy.visit('/dashboard/push'); + cy.wait('@getPushes'); + cy.get('table', { timeout: 10000 }).should('exist'); + + // Refresh the page + cy.reload(); + // Wait for API call again after reload + cy.wait('@getPushes'); + + // Wait for page to reload and check basic functionality + cy.get('body').should('exist'); + + // Give more time for table to load after refresh, or check if redirected + cy.url().then((url) => { + if (url.includes('/dashboard/push')) { + cy.get('table', { timeout: 15000 }).should('exist'); + } else { + // If redirected (e.g., to login), that's also acceptable behavior + cy.get('body').should('exist'); + } + }); + }); + }); +}); diff --git a/cypress/support/commands.js b/cypress/support/commands.js index 5117d6cfc..b3e9917b6 100644 --- a/cypress/support/commands.js +++ b/cypress/support/commands.js @@ -65,3 +65,66 @@ Cypress.Commands.add('getCSRFToken', () => { return cy.wrap(decodeURIComponent(token)); }); }); + +Cypress.Commands.add('createTestTagPush', (pushData = {}) => { + const defaultTagPush = { + id: `test-tag-push-${Date.now()}`, + steps: [], + error: false, + blocked: true, + allowPush: false, + authorised: false, + canceled: false, + rejected: false, + autoApproved: false, + autoRejected: false, + type: 'push', + method: 'get', + timestamp: Date.now(), + project: 'cypress-test', + repoName: 'test-repo.git', + url: 'https://github.com/cypress-test/test-repo.git', + repo: 'cypress-test/test-repo.git', + user: 'test-tagger', + userEmail: 'test-tagger@test.com', + branch: 'refs/heads/main', + tag: 'refs/tags/v1.0.0', + commitFrom: '0000000000000000000000000000000000000000', + commitTo: 'abcdef1234567890abcdef1234567890abcdef12', + lastStep: null, + blockedMessage: '\n\n\nGitProxy has received your tag push\n\n\n', + _id: null, + attestation: null, + tagData: [ + { + tagName: 'v1.0.0', + type: 'annotated', + tagger: 'test-tagger', + message: 'Release version 1.0.0\n\nThis is a test tag release for Cypress testing.', + timestamp: Math.floor(Date.now() / 1000), + }, + ], + commitData: [ + { + commitTs: Math.floor(Date.now() / 1000) - 300, + commitTimestamp: Math.floor(Date.now() / 1000) - 300, + message: 'feat: add new tag push feature', + committer: 'test-committer', + author: 'test-author', + authorEmail: 'test-author@test.com', + }, + ], + diff: { + content: '+++ test tag push implementation', + }, + ...pushData, + }; + + // For now, intercept the push API calls and return our test data + cy.intercept('GET', '**/api/v1/push*', { + statusCode: 200, + body: [defaultTagPush], + }).as('getPushes'); + + return cy.wrap(defaultTagPush); +}); diff --git a/packages/git-proxy-cli/index.js b/packages/git-proxy-cli/index.js new file mode 100755 index 000000000..614104d6a --- /dev/null +++ b/packages/git-proxy-cli/index.js @@ -0,0 +1,561 @@ +#!/usr/bin/env node +const axios = require('axios'); +const yargs = require('yargs/yargs'); +const { hideBin } = require('yargs/helpers'); +const fs = require('fs'); +const util = require('util'); + +const GIT_PROXY_COOKIE_FILE = 'git-proxy-cookie'; +// GitProxy UI HOST and PORT (configurable via environment variable) +const { GIT_PROXY_UI_HOST: uiHost = 'http://localhost', GIT_PROXY_UI_PORT: uiPort = 8080 } = + process.env; + +const baseUrl = `${uiHost}:${uiPort}`; + +axios.defaults.timeout = 30000; + +/** + * Log in to GitProxy + * @param {string} username The user name to login with + * @param {string} password The password to use for the login + */ +async function login(username, password) { + try { + let response = await axios.post( + `${baseUrl}/api/auth/login`, + { + username, + password, + }, + { + headers: { 'Content-Type': 'application/json' }, + withCredentials: true, + }, + ); + const cookies = response.headers['set-cookie']; + + response = await axios.get(`${baseUrl}/api/auth/profile`, { + headers: { Cookie: cookies }, + withCredentials: true, + }); + + fs.writeFileSync(GIT_PROXY_COOKIE_FILE, JSON.stringify(cookies), 'utf8'); + + const user = `"${response.data.username}" <${response.data.email}>`; + const isAdmin = response.data.admin ? ' (admin)' : ''; + console.log(`Login ${user}${isAdmin}: OK`); + } catch (error) { + if (error.response) { + console.error(`Error: Login '${username}': '${error.response.status}'`); + process.exitCode = 1; + } else { + console.error(`Error: Login '${username}': '${error.message}'`); + process.exitCode = 2; + } + } +} + +/** + * Prints a JSON list of git pushes filtered based on specified criteria. + * The function filters the pushes based on various statuses such as whether + * the push is allowed, authorised, blocked, canceled, encountered an error, + * or was rejected. + * + * @param {Object} filters - An object containing filter criteria for Git + * pushes. + * @param {boolean} filters.allowPush - If not null, filters for pushes with + * given attribute and status. + * @param {boolean} filters.authorised - If not null, filters for pushes with + * given attribute and status. + * @param {boolean} filters.blocked - If not null, filters for pushes with + * given attribute and status. + * @param {boolean} filters.canceled - If not null, filters for pushes with + * given attribute and status. + * @param {boolean} filters.error - If not null, filters for pushes with given + * attribute and status. + * @param {boolean} filters.rejected - If not null, filters for pushes with + * given attribute and status. + */ +async function getGitPushes(filters) { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: List: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + const response = await axios.get(`${baseUrl}/api/v1/push/`, { + headers: { Cookie: cookies }, + params: filters, + }); + + const records = []; + response.data?.forEach((push) => { + const record = {}; + record.id = push.id; + record.timestamp = push.timestamp; + record.url = push.url; + record.allowPush = push.allowPush; + record.authorised = push.authorised; + record.blocked = push.blocked; + record.canceled = push.canceled; + record.error = push.error; + record.rejected = push.rejected; + + record.lastStep = { + stepName: push.lastStep?.stepName, + error: push.lastStep?.error, + errorMessage: push.lastStep?.errorMessage, + blocked: push.lastStep?.blocked, + blockedMessage: push.lastStep?.blockedMessage, + }; + + record.commitData = []; + push.commitData?.forEach((pushCommitDataRecord) => { + record.commitData.push({ + message: pushCommitDataRecord.message, + committer: pushCommitDataRecord.committer, + }); + }); + + records.push(record); + }); + + console.log(`${util.inspect(records, false, null, false)}`); + } catch (error) { + // default error + const errorMessage = `Error: List: '${error.message}'`; + process.exitCode = 2; + console.error(errorMessage); + } +} + +/** + * Authorise git push by ID + * @param {string} id The ID of the git push to authorise + */ +async function authoriseGitPush(id) { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: Authorise: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + await axios.get(`${baseUrl}/api/v1/push/${id}`, { + headers: { Cookie: cookies }, + }); + + await axios.post( + `${baseUrl}/api/v1/push/${id}/authorise`, + { + params: { + attestation: [ + { + label: 'Authorising via GitProxy CLI', + checked: true, + }, + ], + }, + }, + { + headers: { Cookie: cookies }, + }, + ); + + console.log(`Authorise: ID: '${id}': OK`); + } catch (error) { + // default error + let errorMessage = `Error: Authorise: '${error.message}'`; + process.exitCode = 2; + + if (error.response) { + switch (error.response.status) { + case 401: + errorMessage = + 'Error: Authorise: Authentication required (401): ' + error?.response?.data?.message; + process.exitCode = 3; + break; + case 404: + errorMessage = `Error: Authorise: ID: '${id}': Not Found`; + process.exitCode = 4; + } + } + console.error(errorMessage); + } +} + +/** + * Reject git push by ID + * @param {string} id The ID of the git push to reject + */ +async function rejectGitPush(id) { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: Reject: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + await axios.get(`${baseUrl}/api/v1/push/${id}`, { + headers: { Cookie: cookies }, + }); + + await axios.post( + `${baseUrl}/api/v1/push/${id}/reject`, + {}, + { + headers: { Cookie: cookies }, + }, + ); + + console.log(`Reject: ID: '${id}': OK`); + } catch (error) { + // default error + let errorMessage = `Error: Reject: '${error.message}'`; + process.exitCode = 2; + + if (error.response) { + switch (error.response.status) { + case 401: + errorMessage = + 'Error: Reject: Authentication required (401): ' + error?.response?.data?.message; + process.exitCode = 3; + break; + case 404: + errorMessage = `Error: Reject: ID: '${id}': Not Found`; + process.exitCode = 4; + } + } + console.error(errorMessage); + } +} + +/** + * Cancel git push by ID + * @param {string} id The ID of the git push to cancel + */ +async function cancelGitPush(id) { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: Cancel: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + await axios.get(`${baseUrl}/api/v1/push/${id}`, { + headers: { Cookie: cookies }, + }); + + await axios.post( + `${baseUrl}/api/v1/push/${id}/cancel`, + {}, + { + headers: { Cookie: cookies }, + }, + ); + + console.log(`Cancel: ID: '${id}': OK`); + } catch (error) { + // default error + let errorMessage = `Error: Cancel: '${error.message}'`; + process.exitCode = 2; + + if (error.response) { + switch (error.response.status) { + case 401: + errorMessage = + 'Error: Cancel: Authentication required (401): ' + error?.response?.data?.message; + process.exitCode = 3; + break; + case 404: + errorMessage = `Error: Cancel: ID: '${id}': Not Found`; + process.exitCode = 4; + } + } + console.error(errorMessage); + } +} + +/** + * Log out (and clean up) + */ +async function logout() { + if (fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + fs.writeFileSync(GIT_PROXY_COOKIE_FILE, '*** logged out ***', 'utf8'); + fs.unlinkSync(GIT_PROXY_COOKIE_FILE); + + await axios.post( + `${baseUrl}/api/auth/logout`, + {}, + { + headers: { Cookie: cookies }, + }, + ); + } catch (error) { + console.log(`Warning: Logout: '${error.message}'`); + } + } + + console.log('Logout: OK'); +} + +/** + * Reloads the GitProxy configuration without restarting the process + */ +async function reloadConfig() { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: Reload config: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + await axios.post(`${baseUrl}/api/v1/admin/reload-config`, {}, { headers: { Cookie: cookies } }); + + console.log('Configuration reloaded successfully'); + } catch (error) { + const errorMessage = `Error: Reload config: '${error.message}'`; + process.exitCode = 2; + console.error(errorMessage); + } +} + +/** + * Create a new user + * @param {string} username The username for the new user + * @param {string} password The password for the new user + * @param {string} email The email for the new user + * @param {string} gitAccount The git account for the new user + * @param {boolean} [admin=false] Whether the user should be an admin (optional) + */ +async function createUser(username, password, email, gitAccount, admin = false) { + if (!fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + console.error('Error: Create User: Authentication required'); + process.exitCode = 1; + return; + } + + try { + const cookies = JSON.parse(fs.readFileSync(GIT_PROXY_COOKIE_FILE, 'utf8')); + + await axios.post( + `${baseUrl}/api/auth/create-user`, + { + username, + password, + email, + gitAccount, + admin, + }, + { + headers: { Cookie: cookies }, + }, + ); + + console.log(`User '${username}' created successfully`); + } catch (error) { + let errorMessage = `Error: Create User: '${error.message}'`; + process.exitCode = 2; + + if (error.response) { + switch (error.response.status) { + case 401: + errorMessage = 'Error: Create User: Authentication required'; + process.exitCode = 3; + break; + case 400: + errorMessage = `Error: Create User: ${error.response.data.message}`; + process.exitCode = 4; + break; + } + } + console.error(errorMessage); + } +} + +// Parsing command line arguments +yargs(hideBin(process.argv)) // eslint-disable-line @typescript-eslint/no-unused-expressions + .command({ + command: 'authorise', + describe: 'Authorise git push by ID', + builder: { + id: { + describe: 'Push ID', + demandOption: true, + type: 'string', + }, + }, + handler(argv) { + authoriseGitPush(argv.id); + }, + }) + .command({ + command: 'cancel', + describe: 'Cancel git push by ID', + builder: { + id: { + describe: 'Push ID', + demandOption: true, + type: 'string', + }, + }, + handler(argv) { + cancelGitPush(argv.id); + }, + }) + .command({ + command: 'config', + describe: 'Print configuration', + handler() { + console.log(`GitProxy URL: ${baseUrl}`); + }, + }) + .command({ + command: 'login', + describe: 'Log in by username/password', + builder: { + username: { + describe: 'Username', + demandOption: true, + type: 'string', + }, + password: { + describe: 'Password', + demandOption: true, + type: 'string', + }, + }, + handler(argv) { + login(argv.username, argv.password); + }, + }) + .command({ + command: 'logout', + describe: 'Log out', + handler() { + logout(); + }, + }) + .command({ + command: 'ls', + describe: 'Get list of git pushes', + builder: { + allowPush: { + describe: `Filter for the "allowPush" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + authorised: { + describe: `Filter for the "authorised" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + blocked: { + describe: `Filter for the "blocked" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + canceled: { + describe: `Filter for the "canceled" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + error: { + describe: `Filter for the "error" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + rejected: { + describe: `Filter for the "rejected" flag of the git push on the list`, + demandOption: false, + type: 'boolean', + default: null, + }, + }, + handler(argv) { + const filters = { + allowPush: argv.allowPush, + authorised: argv.authorised, + blocked: argv.blocked, + canceled: argv.canceled, + error: argv.error, + rejected: argv.rejected, + }; + getGitPushes(filters); + }, + }) + .command({ + command: 'reject', + describe: 'Reject git push by ID', + builder: { + id: { + describe: 'Push ID', + demandOption: true, + type: 'string', + }, + }, + handler(argv) { + rejectGitPush(argv.id); + }, + }) + .command({ + command: 'reload-config', + description: 'Reload GitProxy configuration without restarting', + action: reloadConfig, + }) + .command({ + command: 'create-user', + describe: 'Create a new user', + builder: { + username: { + describe: 'Username for the new user', + demandOption: true, + type: 'string', + }, + password: { + describe: 'Password for the new user', + demandOption: true, + type: 'string', + }, + email: { + describe: 'Email for the new user', + demandOption: true, + type: 'string', + }, + gitAccount: { + describe: 'Git account for the new user', + demandOption: true, + type: 'string', + }, + admin: { + describe: 'Whether the user should be an admin (optional)', + demandOption: false, + type: 'boolean', + default: false, + }, + }, + handler(argv) { + createUser(argv.username, argv.password, argv.email, argv.gitAccount, argv.admin); + }, + }) + .demandCommand(1, 'You need at least one command before moving on') + .strict() + .help().argv; diff --git a/packages/git-proxy-cli/test/testCli.test.js b/packages/git-proxy-cli/test/testCli.test.js new file mode 100644 index 000000000..9a2607ab3 --- /dev/null +++ b/packages/git-proxy-cli/test/testCli.test.js @@ -0,0 +1,804 @@ +const helper = require('./testCliUtils'); + +const path = require('path'); + +// set test proxy config file path *before* loading the proxy +require('../../../src/config/file').configFile = path.join( + process.cwd(), + 'test', + 'testCli.proxy.config.json', +); +const service = require('../../../src/service'); + +/* test constants */ +// push ID which does not exist +const GHOST_PUSH_ID = + '0000000000000000000000000000000000000000__79b4d8953cbc324bcc1eb53d6412ff89666c241f'; +// repo for test cases +const TEST_REPO_CONFIG = { + project: 'finos', + name: 'git-proxy-test', + url: 'https://github.com/finos/git-proxy-test.git', +}; +const TEST_REPO = 'finos/git-proxy-test.git'; +// user for test cases +const TEST_USER = 'testuser'; +const TEST_PASSWORD = 'testpassword'; +const TEST_EMAIL = 'jane.doe@email.com'; +const TEST_GIT_ACCOUNT = 'testGitAccount'; + +describe('test git-proxy-cli', function () { + // *** help *** + + describe(`test git-proxy-cli :: help`, function () { + it(`print help if no command or option is given`, async function () { + const cli = `npx -- @finos/git-proxy-cli`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = [ + 'Commands:', + 'Options:', + 'You need at least one command before moving on', + ]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it(`print help if invalid command or option is given`, async function () { + const cli = `npx -- @finos/git-proxy-cli invalid --invalid`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = [ + 'Commands:', + 'Options:', + 'Unknown arguments: invalid, invalid', + ]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it(`print help if "--help" option is given`, async function () { + const cli = `npx -- @finos/git-proxy-cli invalid --help`; + const expectedExitCode = 0; + const expectedMessages = ['Commands:', 'Options:']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + }); + + // *** version *** + + describe(`test git-proxy-cli :: version`, function () { + it(`"--version" option prints version details `, async function () { + const cli = `npx -- @finos/git-proxy-cli --version`; + const expectedExitCode = 0; + const expectedMessages = ['0.1.0']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + }); + + // *** cofiguration *** + + describe('test git-proxy-cli :: configuration', function () { + it(`"config" command prints configuration details`, async function () { + const cli = `npx -- @finos/git-proxy-cli config`; + const expectedExitCode = 0; + const expectedMessages = ['GitProxy URL:']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + }); + + // *** login *** + + describe('test git-proxy-cli :: login', function () { + before(async function () { + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + }); + + after(async function () { + await helper.removeUserFromDb(TEST_USER); + }); + + it('login should fail when server is down', async function () { + const username = 'admin'; + const password = 'admin'; + const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = [`Error: Login '${username}':`]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('login should fail with invalid credentials', async function () { + const username = 'unkn0wn'; + const password = 'p4ssw0rd'; + const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = [`Error: Login '${username}': '401'`]; + try { + await helper.startServer(service); + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('login shoud be successful with valid credentials (admin)', async function () { + const username = 'admin'; + const password = 'admin'; + const cli = `npx -- @finos/git-proxy-cli login --username ${username} --password ${password}`; + const expectedExitCode = 0; + const expectedMessages = [`Login "${username}" (admin): OK`]; + const expectedErrorMessages = null; + try { + await helper.startServer(service); + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('login shoud be successful with valid credentials (non-admin)', async function () { + const cli = `npx -- @finos/git-proxy-cli login --username ${TEST_USER} --password ${TEST_PASSWORD}`; + const expectedExitCode = 0; + const expectedMessages = [`Login "${TEST_USER}" <${TEST_EMAIL}>: OK`]; + const expectedErrorMessages = null; + try { + await helper.startServer(service); + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** logout *** + + describe('test git-proxy-cli :: logout', function () { + it('logout shoud succeed when server is down (and not logged in before)', async function () { + await helper.removeCookiesFile(); + + const cli = `npx -- @finos/git-proxy-cli logout`; + const expectedExitCode = 0; + const expectedMessages = [`Logout: OK`]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('logout should succeed when server is down (but logged in before)', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const cli = `npx -- @finos/git-proxy-cli logout`; + const expectedExitCode = 0; + const expectedMessages = [`Logout: OK`]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('logout should succeed when not authenticated (server is up)', async function () { + try { + await helper.createCookiesFileWithExpiredCookie(); + + const cli = `npx -- @finos/git-proxy-cli logout`; + const expectedExitCode = 0; + const expectedMessages = [`Logout: OK`]; + const expectedErrorMessages = null; + await helper.startServer(service); + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('logout shoud be successful when authenticated (server is up)', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli logout`; + const expectedExitCode = 0; + const expectedMessages = [`Logout: OK`]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** authorise *** + + describe('test git-proxy-cli :: authorise', function () { + const pushId = `auth000000000000000000000000000000000000__${Date.now()}`; + + before(async function () { + await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); + }); + + after(async function () { + await helper.removeGitPushFromDb(pushId); + await helper.removeUserFromDb(TEST_USER); + await helper.removeRepoFromDb(TEST_REPO_CONFIG.url); + }); + + it('attempt to authorise should fail when server is down', async function () { + try { + // start server -> login -> stop server + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Authorise:']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to authorise should fail when not authenticated', async function () { + await helper.removeCookiesFile(); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Authorise: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to authorise should fail when not authenticated (server restarted)', async function () { + try { + await helper.createCookiesFileWithExpiredCookie(); + await helper.startServer(service); + const id = pushId; + const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const expectedExitCode = 3; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Authorise: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to authorise should fail when git push ID not found', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli authorise --id ${id}`; + const expectedExitCode = 4; + const expectedMessages = null; + const expectedErrorMessages = [`Error: Authorise: ID: '${id}': Not Found`]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** cancel *** + + describe('test git-proxy-cli :: cancel', function () { + const pushId = `cancel0000000000000000000000000000000000__${Date.now()}`; + + before(async function () { + await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + await helper.addGitPushToDb(pushId, TEST_USER, TEST_EMAIL, TEST_REPO); + }); + + after(async function () { + await helper.removeGitPushFromDb(pushId); + await helper.removeUserFromDb(TEST_USER); + await helper.removeRepoFromDb(TEST_REPO_CONFIG.url); + }); + + it('attempt to cancel should fail when server is down', async function () { + try { + // start server -> login -> stop server + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Cancel:']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to cancel should fail when not authenticated', async function () { + await helper.removeCookiesFile(); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Cancel: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to cancel should fail when not authenticated (server restarted)', async function () { + try { + await helper.createCookiesFileWithExpiredCookie(); + await helper.startServer(service); + const id = pushId; + const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const expectedExitCode = 3; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Cancel: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + // }); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to cancel should fail when git push ID not found', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli cancel --id ${id}`; + const expectedExitCode = 4; + const expectedMessages = null; + const expectedErrorMessages = [`Error: Cancel: ID: '${id}': Not Found`]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** ls *** + + describe('test git-proxy-cli :: ls (list)', function () { + it('attempt to ls should fail when server is down', async function () { + try { + // start server -> login -> stop server + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const cli = `npx -- @finos/git-proxy-cli ls`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = ['Error: List:']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to ls should fail when not authenticated', async function () { + await helper.removeCookiesFile(); + + const cli = `npx -- @finos/git-proxy-cli ls`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Error: List: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to ls should fail when invalid option given', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --invalid`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Options:', 'Unknown argument: invalid']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** reject *** + + describe('test git-proxy-cli :: reject', function () { + const pushId = `reject0000000000000000000000000000000000__${Date.now()}`; + + before(async function () { + await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); + }); + + after(async function () { + await helper.removeGitPushFromDb(pushId); + await helper.removeUserFromDb(TEST_USER); + await helper.removeRepoFromDb(TEST_REPO_CONFIG.url); + }); + + it('attempt to reject should fail when server is down', async function () { + try { + // start server -> login -> stop server + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Reject:']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to reject should fail when not authenticated', async function () { + await helper.removeCookiesFile(); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Reject: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to reject should fail when not authenticated (server restarted)', async function () { + try { + await helper.createCookiesFileWithExpiredCookie(); + await helper.startServer(service); + const id = pushId; + const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const expectedExitCode = 3; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Reject: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to reject should fail when git push ID not found', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const id = GHOST_PUSH_ID; + const cli = `npx -- @finos/git-proxy-cli reject --id ${id}`; + const expectedExitCode = 4; + const expectedMessages = null; + const expectedErrorMessages = [`Error: Reject: ID: '${id}': Not Found`]; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + }); + + // *** create user *** + + describe('test git-proxy-cli :: create-user', function () { + before(async function () { + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + }); + + after(async function () { + await helper.removeUserFromDb(TEST_USER); + }); + + it('attempt to create user should fail when server is down', async function () { + try { + // start server -> login -> stop server + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + } finally { + await helper.closeServer(service.httpServer); + } + + const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const expectedExitCode = 2; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Create User:']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to create user should fail when not authenticated', async function () { + await helper.removeCookiesFile(); + + const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const expectedExitCode = 1; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Create User: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + }); + + it('attempt to create user should fail when not admin', async function () { + try { + await helper.startServer(service); + await helper.runCli( + `npx -- @finos/git-proxy-cli login --username testuser --password testpassword`, + ); + + const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password newpass --email new@email.com --gitAccount newgit`; + const expectedExitCode = 3; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Create User: Authentication required']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to create user should fail with missing required fields', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli create-user --username newuser --password "" --email new@email.com --gitAccount newgit`; + const expectedExitCode = 4; + const expectedMessages = null; + const expectedErrorMessages = ['Error: Create User: Missing required fields']; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('should successfully create a new user', async function () { + const uniqueUsername = `newuser_${Date.now()}`; + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli create-user --username ${uniqueUsername} --password newpass --email new@email.com --gitAccount newgit`; + const expectedExitCode = 0; + const expectedMessages = [`User '${uniqueUsername}' created successfully`]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + // Verify we can login with the new user + await helper.runCli( + `npx -- @finos/git-proxy-cli login --username ${uniqueUsername} --password newpass`, + 0, + [`Login "${uniqueUsername}" : OK`], + null, + ); + } finally { + await helper.closeServer(service.httpServer); + // Clean up the created user + try { + await helper.removeUserFromDb(uniqueUsername); + } catch (error) { + // Ignore cleanup errors + } + } + }); + + it('should successfully create a new admin user', async function () { + const uniqueUsername = `newadmin_${Date.now()}`; + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli create-user --username ${uniqueUsername} --password newpass --email ${uniqueUsername}@email.com --gitAccount newgit --admin`; + const expectedExitCode = 0; + const expectedMessages = [`User '${uniqueUsername}' created successfully`]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + // Verify we can login with the new admin user + await helper.runCli( + `npx -- @finos/git-proxy-cli login --username ${uniqueUsername} --password newpass`, + 0, + [`Login "${uniqueUsername}" <${uniqueUsername}@email.com> (admin): OK`], + null, + ); + } finally { + await helper.closeServer(service.httpServer); + // Clean up the created user + try { + await helper.removeUserFromDb(uniqueUsername); + } catch (error) { + console.error('Error cleaning up user', error); + } + } + }); + }); + + // *** tests require push in db *** + + describe('test git-proxy-cli :: git push administration', function () { + const pushId = `0000000000000000000000000000000000000000__${Date.now()}`; + + before(async function () { + await helper.addRepoToDb(TEST_REPO_CONFIG); + await helper.addUserToDb(TEST_USER, TEST_PASSWORD, TEST_EMAIL, TEST_GIT_ACCOUNT); + await helper.addGitPushToDb(pushId, TEST_REPO_CONFIG.url, TEST_USER, TEST_EMAIL); + }); + + after(async function () { + await helper.removeGitPushFromDb(pushId); + await helper.removeUserFromDb(TEST_USER); + await helper.removeRepoFromDb(TEST_REPO_CONFIG.url); + }); + + it('attempt to ls should list existing push', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --authorised false --blocked true --canceled false --rejected false`; + const expectedExitCode = 0; + const expectedMessages = [ + pushId, + TEST_REPO, + 'authorised: false', + 'blocked: true', + 'canceled: false', + 'error: false', + 'rejected: false', + ]; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to ls should not list existing push when filtered for authorised', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --authorised true`; + const expectedExitCode = 0; + const expectedMessages = ['[]']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to ls should not list existing push when filtered for canceled', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --canceled true`; + const expectedExitCode = 0; + const expectedMessages = ['[]']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to ls should not list existing push when filtered for rejected', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --rejected true`; + const expectedExitCode = 0; + const expectedMessages = ['[]']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('attempt to ls should not list existing push when filtered for non-blocked', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + const cli = `npx -- @finos/git-proxy-cli ls --blocked false`; + const expectedExitCode = 0; + const expectedMessages = ['[]']; + const expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('authorise push and test if appears on authorised list', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + let cli = `npx -- @finos/git-proxy-cli ls --authorised true --canceled false --rejected false`; + let expectedExitCode = 0; + let expectedMessages = ['[]']; + let expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli authorise --id ${pushId}`; + expectedExitCode = 0; + expectedMessages = [`Authorise: ID: '${pushId}': OK`]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli ls --authorised true --canceled false --rejected false`; + expectedExitCode = 0; + expectedMessages = [pushId, TEST_REPO]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('reject push and test if appears on rejected list', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + let cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled false --rejected true`; + let expectedExitCode = 0; + let expectedMessages = ['[]']; + let expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli reject --id ${pushId}`; + expectedExitCode = 0; + expectedMessages = [`Reject: ID: '${pushId}': OK`]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled false --rejected true`; + expectedExitCode = 0; + expectedMessages = [pushId, TEST_REPO]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + } + }); + + it('cancel push and test if appears on canceled list', async function () { + try { + await helper.startServer(service); + await helper.runCli(`npx -- @finos/git-proxy-cli login --username admin --password admin`); + + let cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled true --rejected false`; + let expectedExitCode = 0; + let expectedMessages = ['[]']; + let expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli cancel --id ${pushId}`; + expectedExitCode = 0; + expectedMessages = [`Cancel: ID: '${pushId}': OK`]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + + cli = `npx -- @finos/git-proxy-cli ls --authorised false --canceled true --rejected false`; + expectedExitCode = 0; + expectedMessages = [pushId, TEST_REPO]; + expectedErrorMessages = null; + await helper.runCli(cli, expectedExitCode, expectedMessages, expectedErrorMessages); + } finally { + await helper.closeServer(service.httpServer); + await helper.removeCookiesFile(); + } + }); + }); +}); diff --git a/packages/git-proxy-cli/test/testCliUtils.js b/packages/git-proxy-cli/test/testCliUtils.js new file mode 100644 index 000000000..232ec0234 --- /dev/null +++ b/packages/git-proxy-cli/test/testCliUtils.js @@ -0,0 +1,263 @@ +const fs = require('fs'); +const util = require('util'); +const { exec } = require('child_process'); +const execAsync = util.promisify(exec); +const { expect } = require('chai'); + +const actions = require('../../../src/proxy/actions/Action'); +const steps = require('../../../src/proxy/actions/Step'); +const processor = require('../../../src/proxy/processors/push-action/audit'); +const db = require('../../../src/db'); + +// cookie file name +const GIT_PROXY_COOKIE_FILE = 'git-proxy-cookie'; + +/** + * @async + * @param {string} cli - The CLI command to be executed. + * @param {number} expectedExitCode - The expected exit code after the command + * execution. Typically, `0` for successful execution. + * @param {string} expectedMessages - The array of expected messages included + * in the output after the command execution. + * @param {string} expectedErrorMessages - The array of expected messages + * included in the error output after the command execution. + * @param {boolean} debug - Flag to enable detailed logging for debugging. + * @throws {AssertionError} Throws an error if the actual exit code does not + * match the `expectedExitCode`. + */ +async function runCli( + cli, + expectedExitCode = 0, + expectedMessages = null, + expectedErrorMessages = null, + debug = true, +) { + try { + console.log(`cli: '${cli}'`); + const { stdout, stderr } = await execAsync(cli); + if (debug) { + console.log(`stdout: ${stdout}`); + console.log(`stderr: ${stderr}`); + } + expect(0).to.equal(expectedExitCode); + if (expectedMessages) { + expectedMessages.forEach((expectedMessage) => { + expect(stdout).to.include(expectedMessage); + }); + } + if (expectedErrorMessages) { + expectedErrorMessages.forEach((expectedErrorMessage) => { + expect(stderr).to.include(expectedErrorMessage); + }); + } + } catch (error) { + const exitCode = error.code; + if (!exitCode) { + // an AssertionError is thrown from failing some of the expectations + // in the 'try' block: forward it to Mocha to process + throw error; + } + if (debug) { + console.log(`error.stdout: ${error.stdout}`); + console.log(`error.stderr: ${error.stderr}`); + } + expect(exitCode).to.equal(expectedExitCode); + if (expectedMessages) { + expectedMessages.forEach((expectedMessage) => { + expect(error.stdout).to.include(expectedMessage); + }); + } + if (expectedErrorMessages) { + expectedErrorMessages.forEach((expectedErrorMessage) => { + expect(error.stderr).to.include(expectedErrorMessage); + }); + } + } finally { + if (debug) { + console.log(`cli: '${cli}': done`); + } + } +} + +/** + * Starts the server. + * @param {Object} service - The GitProxy API service to be started. + * @return {Promise} A promise that resolves when the service has + * successfully started. Does not return any value upon resolution. + */ +async function startServer(service) { + await service.start(); +} + +/** + * Closes the specified HTTP server gracefully. This function wraps the + * `close` method of the `http.Server` instance in a promise to facilitate + * async/await usage. It ensures the server stops accepting new connections + * and terminates existing ones before shutting down. + * + * @param {http.Server} server - The `http.Server` instance to close. + * @param {number} waitTime - The wait time after close. + * @return {Promise} A promise that resolves when the server has been + * successfully closed, or rejects if an error occurs during closure. The + * promise does not return any value upon resolution. + * + * @throws {Error} If the server cannot be closed properly or if an error + * occurs during the close operation. + */ +async function closeServer(server, waitTime = 0) { + return new Promise((resolve, reject) => { + server.closeAllConnections(); + server.close((err) => { + if (err) { + console.error('Failed to close the server:', err); + reject(err); // Reject the promise if there's an error + } else { + setTimeout(() => { + console.log(`Server closed successfully (wait time ${waitTime}).`); + resolve(); // Resolve the promise when the server is closed + }, waitTime); + } + }); + }); +} + +/** + * Create local cookies file with an expired connect cookie. + */ +async function createCookiesFileWithExpiredCookie() { + await removeCookiesFile(); + const cookies = [ + 'connect.sid=s%3AuWjJK_VGFbX9-03UfvoSt_HFU3a0vFOd.jd986YQ17Bw4j1xGJn2l9yiF3QPYhayaYcDqGsNgQY4; Path=/; HttpOnly', + ]; + fs.writeFileSync(GIT_PROXY_COOKIE_FILE, JSON.stringify(cookies), 'utf8'); +} + +/** + * Remove local cookies file. + */ +async function removeCookiesFile() { + if (fs.existsSync(GIT_PROXY_COOKIE_FILE)) { + fs.unlinkSync(GIT_PROXY_COOKIE_FILE); + } +} + +/** + * Add a new repo to the database. + * @param {object} newRepo The new repo attributes. + * @param {boolean} debug Print debug messages to console if true. + */ +async function addRepoToDb(newRepo, debug = false) { + const repos = await db.getRepos(); + const found = repos.find((y) => y.project === newRepo.project && newRepo.name === y.name); + if (!found) { + await db.createRepo(newRepo); + const repo = await db.getRepoByUrl(newRepo.url); + await db.addUserCanPush(repo._id, 'admin'); + await db.addUserCanAuthorise(repo._id, 'admin'); + if (debug) { + console.log(`New repo added to database: ${newRepo}`); + } + } else { + if (debug) { + console.log(`New repo already found in database: ${newRepo}`); + } + } +} + +/** + * Removes a repo from the DB. + * @param {string} repoUrl The url of the repo to remove. + */ +async function removeRepoFromDb(repoUrl) { + const repo = await db.getRepoByUrl(repoUrl); + await db.deleteRepo(repo._id); +} + +/** + * Add a new git push record to the database. + * @param {string} id The ID of the git push. + * @param {string} repoUrl The repository URL of the git push. + * @param {string} user The user who pushed the git push. + * @param {string} userEmail The email of the user who pushed the git push. + * @param {boolean} debug Flag to enable logging for debugging. + */ +async function addGitPushToDb(id, repoUrl, user = null, userEmail = null, debug = false) { + const action = new actions.Action( + id, + 'push', // type + 'get', // method + Date.now(), // timestamp + repoUrl, + ); + action.user = user; + action.userEmail = userEmail; + const step = new steps.Step( + 'authBlock', // stepName + false, // error + null, // errorMessage + true, // blocked + `\n\n\nGitProxy has received your push:\n\nhttp://localhost:8080/requests/${id}\n\n\n`, // blockedMessage + null, // content + ); + const commitData = []; + commitData.push({ + tree: 'tree test', + parent: 'parent', + author: 'author', + committer: 'committer', + commitTs: 'commitTs', + message: 'message', + }); + action.commitData = commitData; + action.addStep(step); + const result = await processor.exec(null, action); + if (debug) { + console.log(`New git push added to DB: ${util.inspect(result)}`); + } +} + +/** + * Removes a push from the DB + * @param {string} id + */ +async function removeGitPushFromDb(id) { + await db.deletePush(id); +} + +/** + * Add new user record to the database. + * @param {string} username The user name. + * @param {string} password The user password. + * @param {string} email The user email. + * @param {string} gitAccount The user git account. + * @param {boolean} admin Flag to make the user administrator. + * @param {boolean} debug Flag to enable logging for debugging. + */ +async function addUserToDb(username, password, email, gitAccount, admin = false, debug = false) { + const result = await db.createUser(username, password, email, gitAccount, admin); + if (debug) { + console.log(`New user added to DB: ${util.inspect(result)}`); + } +} + +/** + * Remove a user record from the database if present. + * @param {string} username The user name. + */ +async function removeUserFromDb(username) { + await db.deleteUser(username); +} + +module.exports = { + runCli: runCli, + startServer: startServer, + closeServer: closeServer, + addRepoToDb: addRepoToDb, + removeRepoFromDb: removeRepoFromDb, + addGitPushToDb: addGitPushToDb, + removeGitPushFromDb: removeGitPushFromDb, + addUserToDb: addUserToDb, + removeUserFromDb: removeUserFromDb, + createCookiesFileWithExpiredCookie: createCookiesFileWithExpiredCookie, + removeCookiesFile: removeCookiesFile, +}; diff --git a/scripts/build-for-publish.sh b/scripts/build-for-publish.sh new file mode 100755 index 000000000..1c9ac4130 --- /dev/null +++ b/scripts/build-for-publish.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +set -euo pipefail + +# This script allows for emitting js and definitions from the typescript into +# the same import locations as the original files. +# When we adjust how we import the library we can move to a "dist" folder and +# explicit "exports". + +if [ "${IS_PUBLISHING:-}" != "YES" ]; then + echo "This script is intended to prepare the directory for publishing" + echo "and replaces files. If you only want to build the UI run \`npm run build-ui\`." + echo "Otherwise set IS_PUBLISHING to \"YES\"" + exit 1 +fi + +set -x + +REPO_ROOT="$(git rev-parse --show-toplevel)" +cd "$REPO_ROOT" + +rm -rf dist || true +tsc --project tsconfig.publish.json +# replace tsx with node for the new index.js +sed -ie '1s/tsx/node/' dist/index.js +# ensure it's executable +chmod +x dist/index.js +# move the ts source +mv src src-old +# move the built source +mv dist/src dist/index.js dist/index.d.ts . +# copy back unchanged ui code +# could probably drop this as the ui code shouldn't really be imported from +# the main package but keep for compat until split out. +mv src-old/ui src/ui +rm -rf src-old index.ts dist diff --git a/src/context.ts b/src/context.ts new file mode 100644 index 000000000..d8302c7cb --- /dev/null +++ b/src/context.ts @@ -0,0 +1,8 @@ +import { createContext } from 'react'; +import { UserContextType } from './ui/views/RepoDetails/RepoDetails'; + +export const UserContext = createContext({ + user: { + admin: false, + }, +}); diff --git a/src/db/mongo/pushes.ts b/src/db/mongo/pushes.ts index 4ecb1659e..596a56e29 100644 --- a/src/db/mongo/pushes.ts +++ b/src/db/mongo/pushes.ts @@ -35,9 +35,12 @@ export const getPushes = async ( rejected: 1, repo: 1, repoName: 1, + tag: 1, + tagData: 1, timepstamp: 1, type: 1, url: 1, + user: 1, }, }); }; diff --git a/src/db/types.ts b/src/db/types.ts index e43aff295..ed2294676 100644 --- a/src/db/types.ts +++ b/src/db/types.ts @@ -81,6 +81,31 @@ export class User { } } +export type Push = { + id: string; + allowPush: boolean; + authorised: boolean; + blocked: boolean; + blockedMessage: string; + branch: string; + canceled: boolean; + commitData: object; + commitFrom: string; + commitTo: string; + error: boolean; + method: string; + project: string; + rejected: boolean; + repo: string; + repoName: string; + tag?: string; + tagData?: object; + timepstamp: string; + type: string; + url: string; + user?: string; +}; + export interface PublicUser { username: string; displayName: string; diff --git a/src/proxy/actions/Action.ts b/src/proxy/actions/Action.ts index 94d3af4f2..d32511673 100644 --- a/src/proxy/actions/Action.ts +++ b/src/proxy/actions/Action.ts @@ -1,13 +1,29 @@ import { processGitURLForNameAndOrg, processUrlPath } from '../routes/helper'; import { Step } from './Step'; import { Attestation, CommitData, Rejection } from '../processors/types'; +import { TagData } from '../../types/models'; + +export enum RequestType { + PUSH = 'push', + + PULL = 'pull', +} + +export enum ActionType { + COMMIT = 'commit', + + TAG = 'tag', + + BRANCH = 'branch', +} /** * Class representing a Push. */ class Action { id: string; - type: string; + type: RequestType; + actionType?: ActionType; method: string; timestamp: number; project: string; @@ -37,6 +53,8 @@ class Action { rejection?: Rejection; lastStep?: Step; proxyGitPath?: string; + tag?: string; + tagData?: TagData[]; newIdxFiles?: string[]; /** @@ -47,7 +65,7 @@ class Action { * @param {number} timestamp The timestamp of the action * @param {string} url The URL to the repo that should be proxied (with protocol, origin, repo path, but not the path for the git operation). */ - constructor(id: string, type: string, method: string, timestamp: number, url: string) { + constructor(id: string, type: RequestType, method: string, timestamp: number, url: string) { this.id = id; this.type = type; this.method = method; diff --git a/src/proxy/actions/index.ts b/src/proxy/actions/index.ts index 13f35276c..0e1aa7e18 100644 --- a/src/proxy/actions/index.ts +++ b/src/proxy/actions/index.ts @@ -1,4 +1,4 @@ -import { Action } from './Action'; +import { Action, RequestType, ActionType } from './Action'; import { Step } from './Step'; -export { Action, Step }; +export { Action, Step, RequestType, ActionType }; diff --git a/src/proxy/chain.ts b/src/proxy/chain.ts index b6c1b7609..1c28d075a 100644 --- a/src/proxy/chain.ts +++ b/src/proxy/chain.ts @@ -1,10 +1,9 @@ import { PluginLoader } from '../plugin'; -import { Action } from './actions'; +import { Action, RequestType, ActionType } from './actions'; import * as proc from './processors'; import { attemptAutoApproval, attemptAutoRejection } from './actions/autoActions'; -const pushActionChain: ((req: any, action: Action) => Promise)[] = [ - proc.push.parsePush, +const branchPushChain: ((req: any, action: Action) => Promise)[] = [ proc.push.checkEmptyBranch, proc.push.checkRepoInAuthorisedList, proc.push.checkCommitMessages, @@ -21,6 +20,17 @@ const pushActionChain: ((req: any, action: Action) => Promise)[] = [ proc.push.blockForAuth, ]; +const tagPushChain: ((req: any, action: Action) => Promise)[] = [ + proc.push.checkRepoInAuthorisedList, + proc.push.checkUserPushPermission, + proc.push.checkIfWaitingAuth, + proc.push.pullRemote, + proc.push.writePack, + proc.push.preReceive, + // TODO: implement tag message validation? + proc.push.blockForAuth, +]; + const pullActionChain: ((req: any, action: Action) => Promise)[] = [ proc.push.checkRepoInAuthorisedList, ]; @@ -36,9 +46,16 @@ export const executeChain = async (req: any, res: any): Promise => { let checkoutCleanUpRequired = false; try { + // 1) Initialize basic action fields action = await proc.pre.parseAction(req); + // 2) Parse the push payload first to detect tags/branches + if (action.type === RequestType.PUSH) { + action = await proc.push.parsePush(req, action); + } + // 3) Select the correct chain now that action.actionType is set const actionFns = await getChain(action); + // 4) Execute each step in the selected chain for (const fn of actionFns) { action = await fn(req, action); if (!action.continue() || action.allowPush) { @@ -77,6 +94,22 @@ export const executeChain = async (req: any, res: any): Promise => { */ let chainPluginLoader: PluginLoader; +/** + * Selects the appropriate push chain based on action type + * @param {Action} action The action to select a chain for + * @return {Array} The appropriate push chain + */ +const getPushChain = (action: Action): ((req: any, action: Action) => Promise)[] => { + switch (action.actionType) { + case ActionType.TAG: + return tagPushChain; + case ActionType.BRANCH: + case ActionType.COMMIT: + default: + return branchPushChain; + } +}; + export const getChain = async ( action: Action, ): Promise<((req: any, action: Action) => Promise)[]> => { @@ -86,6 +119,7 @@ export const getChain = async ( ); pluginsInserted = true; } + if (!pluginsInserted) { console.log( `Inserting loaded plugins (${chainPluginLoader.pushPlugins.length} push, ${chainPluginLoader.pullPlugins.length} pull) into proxy chains`, @@ -93,7 +127,8 @@ export const getChain = async ( for (const pluginObj of chainPluginLoader.pushPlugins) { console.log(`Inserting push plugin ${pluginObj.constructor.name} into chain`); // insert custom functions after parsePush but before other actions - pushActionChain.splice(1, 0, pluginObj.exec); + branchPushChain.splice(1, 0, pluginObj.exec); + tagPushChain.splice(1, 0, pluginObj.exec); } for (const pluginObj of chainPluginLoader.pullPlugins) { console.log(`Inserting pull plugin ${pluginObj.constructor.name} into chain`); @@ -103,12 +138,14 @@ export const getChain = async ( // This is set to true so that we don't re-insert the plugins into the chain pluginsInserted = true; } - if (action.type === 'pull') { - return pullActionChain; - } else if (action.type === 'push') { - return pushActionChain; - } else { - return defaultActionChain; + + switch (action.type) { + case RequestType.PULL: + return pullActionChain; + case RequestType.PUSH: + return getPushChain(action); + default: + return defaultActionChain; } }; @@ -122,8 +159,11 @@ export default { get pluginsInserted() { return pluginsInserted; }, - get pushActionChain() { - return pushActionChain; + get branchPushChain() { + return branchPushChain; + }, + get tagPushChain() { + return tagPushChain; }, get pullActionChain() { return pullActionChain; diff --git a/src/proxy/processors/constants.ts b/src/proxy/processors/constants.ts index 3ad5784b4..7204bc036 100644 --- a/src/proxy/processors/constants.ts +++ b/src/proxy/processors/constants.ts @@ -1,6 +1,8 @@ export const BRANCH_PREFIX = 'refs/heads/'; +export const TAG_PREFIX = 'refs/tags/'; export const EMPTY_COMMIT_HASH = '0000000000000000000000000000000000000000'; export const FLUSH_PACKET = '0000'; export const PACK_SIGNATURE = 'PACK'; export const PACKET_SIZE = 4; export const GIT_OBJECT_TYPE_COMMIT = 1; +export const GIT_OBJECT_TYPE_TAG = 4; diff --git a/src/proxy/processors/pre-processor/parseAction.ts b/src/proxy/processors/pre-processor/parseAction.ts index 619deea93..16f6fb4a8 100644 --- a/src/proxy/processors/pre-processor/parseAction.ts +++ b/src/proxy/processors/pre-processor/parseAction.ts @@ -1,4 +1,4 @@ -import { Action } from '../../actions'; +import { Action, RequestType } from '../../actions'; import { processUrlPath } from '../../routes/helper'; import * as db from '../../../db'; @@ -9,14 +9,14 @@ const exec = async (req: { }) => { const id = Date.now(); const timestamp = id; - let type = 'default'; + let type: RequestType | string = 'default'; //inspect content-type headers to classify requests as push or pull operations // see git http protocol docs for more details: https://github.com/git/git/blob/master/Documentation/gitprotocol-http.adoc if (req.headers['content-type'] === 'application/x-git-upload-pack-request') { - type = 'pull'; + type = RequestType.PULL; } else if (req.headers['content-type'] === 'application/x-git-receive-pack-request') { - type = 'push'; + type = RequestType.PUSH; } // Proxy URLs take the form https://:// @@ -38,7 +38,7 @@ const exec = async (req: { ); } - return new Action(id.toString(), type, req.method, timestamp, url); + return new Action(id.toString(), type as RequestType, req.method, timestamp, url); }; exec.displayName = 'parseAction.exec'; diff --git a/src/proxy/processors/push-action/audit.ts b/src/proxy/processors/push-action/audit.ts new file mode 100644 index 000000000..9633a6988 --- /dev/null +++ b/src/proxy/processors/push-action/audit.ts @@ -0,0 +1,14 @@ +import { writeAudit } from '../../../db'; +import { Action, RequestType } from '../../actions'; + +const exec = async (req: any, action: Action) => { + if (action.type !== RequestType.PULL) { + await writeAudit(action); + } + + return action; +}; + +exec.displayName = 'audit.exec'; + +export { exec }; diff --git a/src/proxy/processors/push-action/clearBareClone.ts b/src/proxy/processors/push-action/clearBareClone.ts new file mode 100644 index 000000000..91f7f5b22 --- /dev/null +++ b/src/proxy/processors/push-action/clearBareClone.ts @@ -0,0 +1,21 @@ +import { Action, Step } from '../../actions'; +import fs from 'node:fs'; + +const exec = async (req: any, action: Action): Promise => { + const step = new Step('clearBareClone'); + + // Recursively remove the contents of ./.remote and ignore exceptions + fs.rm('./.remote', { recursive: true, force: true }, (err) => { + if (err) { + throw err; + } + console.log(`.remote is deleted!`); + }); + + action.addStep(step); + return action; +}; + +exec.displayName = 'clearBareClone.exec'; + +export { exec }; diff --git a/src/proxy/processors/push-action/parsePush.ts b/src/proxy/processors/push-action/parsePush.ts index 307fe6286..642daad8f 100644 --- a/src/proxy/processors/push-action/parsePush.ts +++ b/src/proxy/processors/push-action/parsePush.ts @@ -1,14 +1,17 @@ -import { Action, Step } from '../../actions'; +import { Action, Step, ActionType } from '../../actions'; import fs from 'fs'; import lod from 'lodash'; import { createInflate } from 'zlib'; import { CommitContent, CommitData, CommitHeader, PackMeta, PersonLine } from '../types'; +import { TagData } from '../../../types/models'; import { BRANCH_PREFIX, + TAG_PREFIX, EMPTY_COMMIT_HASH, PACK_SIGNATURE, PACKET_SIZE, GIT_OBJECT_TYPE_COMMIT, + GIT_OBJECT_TYPE_TAG, } from '../constants'; const dir = './.tmp/'; @@ -38,13 +41,13 @@ async function exec(req: any, action: Action): Promise { throw new Error('No body found in request'); } const [packetLines, packDataOffset] = parsePacketLines(req.body); - const refUpdates = packetLines.filter((line) => line.includes(BRANCH_PREFIX)); + const refUpdates = packetLines.filter((line) => line.includes('refs/')); if (refUpdates.length !== 1) { - step.log('Invalid number of branch updates.'); + step.log('Invalid number of ref updates.'); step.log(`Expected 1, but got ${refUpdates.length}`); throw new Error( - 'Your push has been blocked. Please make sure you are pushing to a single branch.', + 'Your push has been blocked. Multi-ref pushes (multiple tags and/or branches) are not supported yet. Please push one ref at a time.', ); } else { console.log(`refUpdates: ${JSON.stringify(refUpdates, null, 2)}`); @@ -62,7 +65,21 @@ async function exec(req: any, action: Action): Promise { // Strip everything after NUL, which is cap-list from // https://git-scm.com/docs/http-protocol#_smart_server_response - action.branch = ref.replace(/\0.*/, '').trim(); + const refName = ref.replace(/\0.*/, '').trim(); + const isTag = refName.startsWith(TAG_PREFIX); + const isBranch = refName.startsWith(BRANCH_PREFIX); + + action.branch = isBranch ? refName : undefined; + action.tag = isTag ? refName : undefined; + + // Set actionType based on what type of push this is + if (isTag) { + action.actionType = ActionType.TAG; + } else if (isBranch) { + action.actionType = ActionType.BRANCH; + } else { + action.actionType = ActionType.COMMIT; + } // Note this will change the action.id to be based on the commits action.setCommit(oldCommit, newCommit); @@ -83,19 +100,32 @@ async function exec(req: any, action: Action): Promise { const [meta, contentBuff] = getPackMeta(buf); const contents = await getContents(contentBuff, meta.entries); - action.commitData = getCommitData(contents as any); + const ParsedObjects = { + commits: [] as CommitData[], + tags: [] as TagData[], + }; - if (action.commitData.length === 0) { - step.log('No commit data found when parsing push.'); - } else { + for (const obj of contents) { + if (obj.type === GIT_OBJECT_TYPE_COMMIT) ParsedObjects.commits.push(...getCommitData([obj])); + else if (obj.type === GIT_OBJECT_TYPE_TAG) ParsedObjects.tags.push(parseTag(obj)); + } + + action.commitData = ParsedObjects.commits; + action.tagData = ParsedObjects.tags; + + if (action.commitData.length) { if (action.commitFrom === EMPTY_COMMIT_HASH) { action.commitFrom = action.commitData[action.commitData.length - 1].parent; } - const { committer, committerEmail } = action.commitData[action.commitData.length - 1]; console.log(`Push Request received from user ${committer} with email ${committerEmail}`); action.user = committer; action.userEmail = committerEmail; + } else if (action.tagData?.length) { + action.user = action.tagData.at(-1)!.tagger; + action.userEmail = action.tagData.at(-1)!.taggerEmail; + } else { + step.log('No commit data found when parsing push.'); } step.content = { @@ -103,7 +133,7 @@ async function exec(req: any, action: Action): Promise { }; } catch (e: any) { step.setError( - `Unable to parse push. Please contact an administrator for support: ${e.toString('utf-8')}`, + `Unable to parse push. Please contact an administrator for support: ${e.message || e.toString()}`, ); } finally { action.addStep(step); @@ -111,6 +141,44 @@ async function exec(req: any, action: Action): Promise { return action; } +function parseTag(x: CommitContent): TagData { + const lines = x.content.split('\n'); + const object = lines + .find((l) => l.startsWith('object ')) + ?.slice(7) + .trim(); + const typeLine = lines + .find((l) => l.startsWith('type ')) + ?.slice(5) + .trim(); // commit | tree | blob + const tagName = lines + .find((l) => l.startsWith('tag ')) + ?.slice(4) + .trim(); + const rawTagger = lines + .find((l) => l.startsWith('tagger ')) + ?.slice(7) + .trim(); + if (!rawTagger) throw new Error('Invalid tag object: no tagger line'); + + const taggerInfo = parsePersonLine(rawTagger); + + const messageIndex = lines.indexOf(''); + const message = lines.slice(messageIndex + 1).join('\n'); + + if (!object || !typeLine || !tagName || !taggerInfo.name) throw new Error('Invalid tag object'); + + return { + object, + type: typeLine, + tagName, + tagger: taggerInfo.name, + taggerEmail: taggerInfo.email, + timestamp: taggerInfo.timestamp, + message, + }; +} + /** * Parses the name, email, and timestamp from an author or committer line. * @@ -571,4 +639,4 @@ const parsePacketLines = (buffer: Buffer): [string[], number] => { exec.displayName = 'parsePush.exec'; -export { exec, getCommitData, getContents, getPackMeta, parsePacketLines }; +export { exec, getCommitData, getContents, getPackMeta, parsePacketLines, parseTag }; diff --git a/src/service/emailSender.js b/src/service/emailSender.js new file mode 100644 index 000000000..aa1ddeee1 --- /dev/null +++ b/src/service/emailSender.js @@ -0,0 +1,20 @@ +const nodemailer = require('nodemailer'); +const config = require('../config'); + +exports.sendEmail = async (from, to, subject, body) => { + const smtpHost = config.getSmtpHost(); + const smtpPort = config.getSmtpPort(); + const transporter = nodemailer.createTransport({ + host: smtpHost, + port: smtpPort, + }); + + const email = `${body}`; + const info = await transporter.sendMail({ + from, + to, + subject, + html: email, + }); + console.log('Message sent %s', info.messageId); +}; diff --git a/src/service/index.js b/src/service/index.js new file mode 100644 index 000000000..f03d75b68 --- /dev/null +++ b/src/service/index.js @@ -0,0 +1,142 @@ +const express = require('express'); +const session = require('express-session'); +const http = require('http'); +const cors = require('cors'); +const app = express(); +const path = require('path'); +const config = require('../config'); +const db = require('../db'); +const rateLimit = require('express-rate-limit'); +const lusca = require('lusca'); +const configLoader = require('../config/ConfigLoader'); + +const limiter = rateLimit(config.getRateLimit()); + +const { GIT_PROXY_UI_PORT: uiPort } = require('../config/env').serverConfig; + +const _httpServer = http.createServer(app); + +const corsOptions = { + credentials: true, + origin: true, +}; + +/** + * Internal function used to bootstrap the Git Proxy API's express application. + * @param {proxy} proxy A reference to the proxy express application, used to restart it when necessary. + * @return {Promise} + */ +async function createApp(proxy) { + // configuration of passport is async + // Before we can bind the routes - we need the passport strategy + const passport = await require('./passport').configure(); + const routes = require('./routes'); + const absBuildPath = path.join(__dirname, '../../build'); + app.use(cors(corsOptions)); + app.set('trust proxy', 1); + app.use(limiter); + + // Add new admin-only endpoint to reload config + app.post('/api/v1/admin/reload-config', async (req, res) => { + if (!req.isAuthenticated() || !req.user.admin) { + return res.status(403).json({ error: 'Unauthorized' }); + } + + try { + // 1. Reload configuration + await configLoader.loadConfiguration(); + + // 2. Stop existing services + await proxy.stop(); + + // 3. Apply new configuration + config.validate(); + + // 4. Restart services with new config + await proxy.start(); + + console.log('Configuration reloaded and services restarted successfully'); + res.json({ status: 'success', message: 'Configuration reloaded and services restarted' }); + } catch (error) { + console.error('Failed to reload configuration and restart services:', error); + + // Attempt to restart with existing config if reload fails + try { + await proxy.start(); + } catch (startError) { + console.error('Failed to restart services:', startError); + } + + res.status(500).json({ error: 'Failed to reload configuration' }); + } + }); + + app.use( + session({ + store: config.getDatabase().type === 'mongo' ? db.getSessionStore(session) : null, + secret: config.getCookieSecret(), + resave: false, + saveUninitialized: false, + cookie: { + secure: 'auto', + httpOnly: true, + maxAge: config.getSessionMaxAgeHours() * 60 * 60 * 1000, + }, + }), + ); + if (config.getCSRFProtection() && process.env.NODE_ENV !== 'test') { + app.use( + lusca({ + csrf: { + cookie: { name: 'csrf' }, + }, + hsts: { maxAge: 31536000, includeSubDomains: true, preload: true }, + nosniff: true, + referrerPolicy: 'same-origin', + xframe: 'SAMEORIGIN', + xssProtection: true, + }), + ); + } + app.use(passport.initialize()); + app.use(passport.session()); + app.use(express.json()); + app.use(express.urlencoded({ extended: true })); + app.use('/', routes(proxy)); + app.use('/', express.static(absBuildPath)); + app.get('/*', (req, res) => { + res.sendFile(path.join(`${absBuildPath}/index.html`)); + }); + + return app; +} + +/** + * Starts the proxy service. + * @param {proxy?} proxy A reference to the proxy express application, used to restart it when necessary. + * @return {Promise} the express application (used for testing). + */ +async function start(proxy) { + if (!proxy) { + console.warn("WARNING: proxy is null and can't be controlled by the API service"); + } + + const app = await createApp(proxy); + + _httpServer.listen(uiPort); + + console.log(`Service Listening on ${uiPort}`); + app.emit('ready'); + + return app; +} + +/** + * Stops the proxy service. + */ +async function stop() { + console.log(`Stopping Service Listening on ${uiPort}`); + _httpServer.close(); +} + +module.exports = { start, stop, httpServer: _httpServer }; diff --git a/src/service/passport/activeDirectory.js b/src/service/passport/activeDirectory.js new file mode 100644 index 000000000..28b8f0e54 --- /dev/null +++ b/src/service/passport/activeDirectory.js @@ -0,0 +1,95 @@ +const ActiveDirectoryStrategy = require('passport-activedirectory'); +const ldaphelper = require('./ldaphelper'); + +const type = 'activedirectory'; + +const configure = (passport) => { + const db = require('../../db'); + + // We can refactor this by normalizing auth strategy config and pass it directly into the configure() function, + // ideally when we convert this to TS. + const authMethods = require('../../config').getAuthMethods(); + const config = authMethods.find((method) => method.type.toLowerCase() === type); + const adConfig = config.adConfig; + + const { userGroup, adminGroup, domain } = config; + + console.log(`AD User Group: ${userGroup}, AD Admin Group: ${adminGroup}`); + + passport.use( + type, + new ActiveDirectoryStrategy( + { + passReqToCallback: true, + integrated: false, + ldap: adConfig, + }, + async function (req, profile, ad, done) { + try { + profile.username = profile._json.sAMAccountName?.toLowerCase(); + profile.email = profile._json.mail; + profile.id = profile.username; + req.user = profile; + + console.log( + `passport.activeDirectory: resolved login ${ + profile._json.userPrincipalName + }, profile=${JSON.stringify(profile)}`, + ); + // First check to see if the user is in the AD user group + try { + const isUser = await ldaphelper.isUserInAdGroup(req, profile, ad, domain, userGroup); + if (!isUser) { + const message = `User it not a member of ${userGroup}`; + return done(message, null); + } + } catch (err) { + console.log('ad test (isUser): e', err); + const message = `An error occurred while checking if the user is a member of the user group: ${err.message}`; + return done(message, null); + } + + // Now check if the user is an admin + let isAdmin = false; + try { + isAdmin = await ldaphelper.isUserInAdGroup(req, profile, ad, domain, adminGroup); + } catch (err) { + const message = `An error occurred while checking if the user is a member of the admin group: ${err.message}`; + console.error(message, err); // don't return an error for this case as you may still be a user + } + + profile.admin = isAdmin; + console.log(`passport.activeDirectory: ${profile.username} admin=${isAdmin}`); + + const user = { + username: profile.username, + admin: isAdmin, + email: profile._json.mail, + displayName: profile.displayName, + title: profile._json.title, + }; + + await db.updateUser(user); + + return done(null, user); + } catch (err) { + console.log(`Error authenticating AD user: ${err.message}`); + return done(err, null); + } + }, + ), + ); + + passport.serializeUser(function (user, done) { + done(null, user); + }); + + passport.deserializeUser(function (user, done) { + done(null, user); + }); + passport.type = 'ActiveDirectory'; + + return passport; +}; + +module.exports = { configure, type }; diff --git a/src/service/passport/index.js b/src/service/passport/index.js new file mode 100644 index 000000000..e1cc9e0b5 --- /dev/null +++ b/src/service/passport/index.js @@ -0,0 +1,36 @@ +const passport = require('passport'); +const local = require('./local'); +const activeDirectory = require('./activeDirectory'); +const oidc = require('./oidc'); +const config = require('../../config'); + +// Allows obtaining strategy config function and type +// Keep in mind to add AuthStrategy enum when refactoring this to TS +const authStrategies = { + local: local, + activedirectory: activeDirectory, + openidconnect: oidc, +}; + +const configure = async () => { + passport.initialize(); + + const authMethods = config.getAuthMethods(); + + for (const auth of authMethods) { + const strategy = authStrategies[auth.type.toLowerCase()]; + if (strategy && typeof strategy.configure === 'function') { + await strategy.configure(passport); + } + } + + if (authMethods.some((auth) => auth.type.toLowerCase() === 'local')) { + await local.createDefaultAdmin(); + } + + return passport; +}; + +const getPassport = () => passport; + +module.exports = { authStrategies, configure, getPassport }; diff --git a/src/service/passport/jwtAuthHandler.js b/src/service/passport/jwtAuthHandler.js new file mode 100644 index 000000000..6ecc9250f --- /dev/null +++ b/src/service/passport/jwtAuthHandler.js @@ -0,0 +1,57 @@ +const { assignRoles, validateJwt } = require('./jwtUtils'); + +/** + * Middleware function to handle JWT authentication. + * @param {*} overrideConfig optional configuration to override the default JWT configuration (e.g. for testing) + * @return {Function} the middleware function + */ +const jwtAuthHandler = (overrideConfig = null) => { + return async (req, res, next) => { + const apiAuthMethods = overrideConfig + ? [{ type: 'jwt', jwtConfig: overrideConfig }] + : require('../../config').getAPIAuthMethods(); + + const jwtAuthMethod = apiAuthMethods.find((method) => method.type.toLowerCase() === 'jwt'); + if (!overrideConfig && (!jwtAuthMethod || !jwtAuthMethod.enabled)) { + return next(); + } + + const token = req.header('Authorization'); + if (!token) { + return res.status(401).send('No token provided\n'); + } + + const { clientID, authorityURL, expectedAudience, roleMapping } = jwtAuthMethod.jwtConfig; + const audience = expectedAudience || clientID; + + if (!authorityURL) { + return res.status(500).send({ + message: 'JWT handler: authority URL is not configured\n', + }); + } + + if (!clientID) { + return res.status(500).send({ + message: 'JWT handler: client ID is not configured\n', + }); + } + + const tokenParts = token.split(' '); + const { verifiedPayload, error } = await validateJwt( + tokenParts[1], + authorityURL, + audience, + clientID, + ); + if (error) { + return res.status(401).send(error); + } + + req.user = verifiedPayload; + assignRoles(roleMapping, verifiedPayload, req.user); + + return next(); + }; +}; + +module.exports = jwtAuthHandler; diff --git a/src/service/passport/jwtUtils.js b/src/service/passport/jwtUtils.js new file mode 100644 index 000000000..a1f8576fc --- /dev/null +++ b/src/service/passport/jwtUtils.js @@ -0,0 +1,99 @@ +const axios = require('axios'); +const jwt = require('jsonwebtoken'); +const jwkToPem = require('jwk-to-pem'); + +/** + * Obtain the JSON Web Key Set (JWKS) from the OIDC authority. + * @param {string} authorityUrl the OIDC authority URL. e.g. https://login.microsoftonline.com/{tenantId} + * @return {Promise} the JWKS keys + */ +async function getJwks(authorityUrl) { + try { + const { data } = await axios.get(`${authorityUrl}/.well-known/openid-configuration`); + const jwksUri = data.jwks_uri; + + const { data: jwks } = await axios.get(jwksUri); + return jwks.keys; + } catch (error) { + console.error('Error fetching JWKS:', error); + throw new Error('Failed to fetch JWKS'); + } +} + +/** + * Validate a JWT token using the OIDC configuration. + * @param {*} token the JWT token + * @param {*} authorityUrl the OIDC authority URL + * @param {*} clientID the OIDC client ID + * @param {*} expectedAudience the expected audience for the token + * @param {*} getJwksInject the getJwks function to use (for dependency injection). Defaults to the built-in getJwks function. + * @return {Promise} the verified payload or an error + */ +async function validateJwt( + token, + authorityUrl, + clientID, + expectedAudience, + getJwksInject = getJwks, +) { + try { + const jwks = await getJwksInject(authorityUrl); + + const decodedHeader = await jwt.decode(token, { complete: true }); + if (!decodedHeader || !decodedHeader.header || !decodedHeader.header.kid) { + throw new Error('Invalid JWT: Missing key ID (kid)'); + } + + const { kid } = decodedHeader.header; + const jwk = jwks.find((key) => key.kid === kid); + if (!jwk) { + throw new Error('No matching key found in JWKS'); + } + + const pubKey = jwkToPem(jwk); + + const verifiedPayload = jwt.verify(token, pubKey, { + algorithms: ['RS256'], + issuer: authorityUrl, + audience: expectedAudience, + }); + + if (verifiedPayload.azp !== clientID) { + throw new Error('JWT client ID does not match'); + } + + return { verifiedPayload }; + } catch (error) { + const errorMessage = `JWT validation failed: ${error.message}\n`; + console.error(errorMessage); + return { error: errorMessage }; + } +} + +/** + * Assign roles to the user based on the role mappings provided in the jwtConfig. + * + * If no role mapping is provided, the user will not have any roles assigned (i.e. user.admin = false). + * @param {*} roleMapping the role mapping configuration + * @param {*} payload the JWT payload + * @param {*} user the req.user object to assign roles to + */ +function assignRoles(roleMapping, payload, user) { + if (roleMapping) { + for (const role of Object.keys(roleMapping)) { + const claimValuePair = roleMapping[role]; + const claim = Object.keys(claimValuePair)[0]; + const value = claimValuePair[claim]; + + if (payload[claim] && payload[claim] === value) { + user[role] = true; + } + } + } +} + +module.exports = { + getJwks, + validateJwt, + assignRoles, +}; diff --git a/src/service/passport/ldaphelper.js b/src/service/passport/ldaphelper.js new file mode 100644 index 000000000..00ba01f00 --- /dev/null +++ b/src/service/passport/ldaphelper.js @@ -0,0 +1,51 @@ +const thirdpartyApiConfig = require('../../config').getAPIs(); +const axios = require('axios'); + +const isUserInAdGroup = (req, profile, ad, domain, name) => { + // determine, via config, if we're using HTTP or AD directly + if (thirdpartyApiConfig?.ls?.userInADGroup) { + return isUserInAdGroupViaHttp(profile.username, domain, name); + } else { + return isUserInAdGroupViaAD(req, profile, ad, domain, name); + } +}; + +const isUserInAdGroupViaAD = (req, profile, ad, domain, name) => { + return new Promise((resolve, reject) => { + ad.isUserMemberOf(profile.username, name, function (err, isMember) { + if (err) { + const msg = 'ERROR isUserMemberOf: ' + JSON.stringify(err); + reject(msg); + } else { + console.log(profile.username + ' isMemberOf ' + name + ': ' + isMember); + resolve(isMember); + } + }); + }); +}; + +const isUserInAdGroupViaHttp = (id, domain, name) => { + const url = String(thirdpartyApiConfig.ls.userInADGroup) + .replace('', domain) + .replace('', name) + .replace('', id); + + const client = axios.create({ + responseType: 'json', + headers: { + 'content-type': 'application/json', + }, + }); + + console.log(`checking if user is in group ${url}`); + return client + .get(url) + .then((res) => res.data) + .catch(() => { + return false; + }); +}; + +module.exports = { + isUserInAdGroup, +}; diff --git a/src/service/passport/local.js b/src/service/passport/local.js new file mode 100644 index 000000000..588278bca --- /dev/null +++ b/src/service/passport/local.js @@ -0,0 +1,59 @@ +const bcrypt = require('bcryptjs'); +const LocalStrategy = require('passport-local').Strategy; +const db = require('../../db'); + +const type = 'local'; + +const configure = async (passport) => { + passport.use( + new LocalStrategy(async (username, password, done) => { + try { + const user = await db.findUser(username); + if (!user) { + return done(null, false, { message: 'Incorrect username.' }); + } + + const passwordCorrect = await bcrypt.compare(password, user.password); + if (!passwordCorrect) { + return done(null, false, { message: 'Incorrect password.' }); + } + + return done(null, user); + } catch (err) { + return done(err); + } + }), + ); + + passport.serializeUser((user, done) => { + done(null, user.username); + }); + + passport.deserializeUser(async (username, done) => { + try { + const user = await db.findUser(username); + done(null, user); + } catch (err) { + done(err, null); + } + }); + + return passport; +}; + +/** + * Create the default admin and regular test users. + */ +const createDefaultAdmin = async () => { + const createIfNotExists = async (username, password, email, type, isAdmin) => { + const user = await db.findUser(username); + if (!user) { + await db.createUser(username, password, email, type, isAdmin); + } + }; + + await createIfNotExists('admin', 'admin', 'admin@place.com', 'none', true); + await createIfNotExists('user', 'user', 'user@place.com', 'none', false); +}; + +module.exports = { configure, createDefaultAdmin, type }; diff --git a/src/service/passport/oidc.js b/src/service/passport/oidc.js new file mode 100644 index 000000000..7e2aa5ee0 --- /dev/null +++ b/src/service/passport/oidc.js @@ -0,0 +1,125 @@ +const db = require('../../db'); + +const type = 'openidconnect'; + +const configure = async (passport) => { + // Temp fix for ERR_REQUIRE_ESM, will be changed when we refactor to ESM + const { discovery, fetchUserInfo } = await import('openid-client'); + const { Strategy } = await import('openid-client/passport'); + const authMethods = require('../../config').getAuthMethods(); + const oidcConfig = authMethods.find( + (method) => method.type.toLowerCase() === 'openidconnect', + )?.oidcConfig; + const { issuer, clientID, clientSecret, callbackURL, scope } = oidcConfig; + + if (!oidcConfig || !oidcConfig.issuer) { + throw new Error('Missing OIDC issuer in configuration'); + } + + const server = new URL(issuer); + let config; + + try { + config = await discovery(server, clientID, clientSecret); + } catch (error) { + console.error('Error during OIDC discovery:', error); + throw new Error('OIDC setup error (discovery): ' + error.message); + } + + try { + const strategy = new Strategy({ callbackURL, config, scope }, async (tokenSet, done) => { + // Validate token sub for added security + const idTokenClaims = tokenSet.claims(); + const expectedSub = idTokenClaims.sub; + const userInfo = await fetchUserInfo(config, tokenSet.access_token, expectedSub); + handleUserAuthentication(userInfo, done); + }); + + // currentUrl must be overridden to match the callback URL + strategy.currentUrl = function (request) { + const callbackUrl = new URL(callbackURL); + const currentUrl = Strategy.prototype.currentUrl.call(this, request); + currentUrl.host = callbackUrl.host; + currentUrl.protocol = callbackUrl.protocol; + return currentUrl; + }; + + // Prevent default strategy name from being overridden with the server host + passport.use(type, strategy); + + passport.serializeUser((user, done) => { + done(null, user.oidcId || user.username); + }); + + passport.deserializeUser(async (id, done) => { + try { + const user = await db.findUserByOIDC(id); + done(null, user); + } catch (err) { + done(err); + } + }); + + return passport; + } catch (error) { + console.error('Error during OIDC passport setup:', error); + throw new Error('OIDC setup error (strategy): ' + error.message); + } +}; + +/** + * Handles user authentication with OIDC. + * @param {Object} userInfo the OIDC user info object + * @param {Function} done the callback function + * @return {Promise} a promise with the authenticated user or an error + */ +const handleUserAuthentication = async (userInfo, done) => { + console.log('handleUserAuthentication called'); + try { + const user = await db.findUserByOIDC(userInfo.sub); + + if (!user) { + const email = safelyExtractEmail(userInfo); + if (!email) return done(new Error('No email found in OIDC profile')); + + const newUser = { + username: getUsername(email), + email, + oidcId: userInfo.sub, + }; + + await db.createUser(newUser.username, null, newUser.email, 'Edit me', false, newUser.oidcId); + return done(null, newUser); + } + + return done(null, user); + } catch (err) { + return done(err); + } +}; + +/** + * Extracts email from OIDC profile. + * This function is necessary because OIDC providers have different ways of storing emails. + * @param {object} profile the profile object from OIDC provider + * @return {string | null} the email address + */ +const safelyExtractEmail = (profile) => { + return ( + profile.email || (profile.emails && profile.emails.length > 0 ? profile.emails[0].value : null) + ); +}; + +/** + * Generates a username from email address. + * This helps differentiate users within the specific OIDC provider. + * Note: This is incompatible with multiple providers. Ideally, users are identified by + * OIDC ID (requires refactoring the database). + * @param {string} email the email address + * @return {string} the username + */ +const getUsername = (email) => { + return email ? email.split('@')[0] : ''; +}; + +module.exports = { configure, type }; diff --git a/src/service/routes/auth.js b/src/service/routes/auth.js new file mode 100644 index 000000000..e6163c774 --- /dev/null +++ b/src/service/routes/auth.js @@ -0,0 +1,205 @@ +const express = require('express'); +const router = new express.Router(); +const passport = require('../passport').getPassport(); +const { getAuthMethods } = require('../../config'); +const passportLocal = require('../passport/local'); +const passportAD = require('../passport/activeDirectory'); +const authStrategies = require('../passport').authStrategies; +const db = require('../../db'); +const { toPublicUser } = require('./publicApi'); +const { GIT_PROXY_UI_HOST: uiHost = 'http://localhost', GIT_PROXY_UI_PORT: uiPort = 3000 } = + process.env; + +router.get('/', (req, res) => { + res.status(200).json({ + login: { + action: 'post', + uri: '/api/auth/login', + }, + profile: { + action: 'get', + uri: '/api/auth/profile', + }, + logout: { + action: 'post', + uri: '/api/auth/logout', + }, + }); +}); + +// login strategies that will work with /login e.g. take username and password +const appropriateLoginStrategies = [passportLocal.type, passportAD.type]; +// getLoginStrategy fetches the enabled auth methods and identifies if there's an appropriate +// auth method for username and password login. If there isn't it returns null, if there is it +// returns the first. +const getLoginStrategy = () => { + // returns only enabled auth methods + // returns at least one enabled auth method + const enabledAppropriateLoginStrategies = getAuthMethods().filter((am) => + appropriateLoginStrategies.includes(am.type.toLowerCase()), + ); + // for where no login strategies which work for /login are enabled + // just return null + if (enabledAppropriateLoginStrategies.length === 0) { + return null; + } + // return the first enabled auth method + return enabledAppropriateLoginStrategies[0].type.toLowerCase(); +}; + +const loginSuccessHandler = () => async (req, res) => { + try { + const currentUser = { ...req.user }; + delete currentUser.password; + console.log( + `serivce.routes.auth.login: user logged in, username=${ + currentUser.username + } profile=${JSON.stringify(currentUser)}`, + ); + res.send({ + message: 'success', + user: toPublicUser(currentUser), + }); + } catch (e) { + console.log(`service.routes.auth.login: Error logging user in ${JSON.stringify(e)}`); + res.status(500).send('Failed to login').end(); + } +}; + +// TODO: provide separate auth endpoints for each auth strategy or chain compatibile auth strategies +// TODO: if providing separate auth methods, inform the frontend so it has relevant UI elements and appropriate client-side behavior +router.post( + '/login', + (req, res, next) => { + const authType = getLoginStrategy(); + if (authType === null) { + res.status(403).send('Username and Password based Login is not enabled at this time').end(); + return; + } + console.log('going to auth with', authType); + return passport.authenticate(authType)(req, res, next); + }, + loginSuccessHandler(), +); + +router.get('/oidc', passport.authenticate(authStrategies['openidconnect'].type)); + +router.get('/oidc/callback', (req, res, next) => { + passport.authenticate(authStrategies['openidconnect'].type, (err, user, info) => { + if (err) { + console.error('Authentication error:', err); + return res.status(401).end(); + } + if (!user) { + console.error('No user found:', info); + return res.status(401).end(); + } + req.logIn(user, (err) => { + if (err) { + console.error('Login error:', err); + return res.status(401).end(); + } + console.log('Logged in successfully. User:', user); + return res.redirect(`${uiHost}:${uiPort}/dashboard/profile`); + }); + })(req, res, next); +}); + +router.post('/logout', (req, res, next) => { + req.logout(req.user, (err) => { + if (err) return next(err); + }); + res.clearCookie('connect.sid'); + res.send({ isAuth: req.isAuthenticated(), user: req.user }); +}); + +router.get('/profile', async (req, res) => { + if (req.user) { + const userVal = await db.findUser(req.user.username); + res.send(toPublicUser(userVal)); + } else { + res.status(401).end(); + } +}); + +router.post('/gitAccount', async (req, res) => { + if (req.user) { + try { + let username = + req.body.username == null || req.body.username == 'undefined' + ? req.body.id + : req.body.username; + username = username?.split('@')[0]; + + if (!username) { + res.status(400).send('Error: Missing username. Git account not updated').end(); + return; + } + + const reqUser = await db.findUser(req.user.username); + if (username !== reqUser.username && !reqUser.admin) { + res.status(403).send('Error: You must be an admin to update a different account').end(); + return; + } + const user = await db.findUser(username); + console.log('Adding gitAccount' + req.body.gitAccount); + user.gitAccount = req.body.gitAccount; + db.updateUser(user); + res.status(200).end(); + } catch (e) { + res + .status(500) + .send({ + message: `Error updating git account: ${e.message}`, + }) + .end(); + } + } else { + res.status(401).end(); + } +}); + +router.get('/me', async (req, res) => { + if (req.user) { + const userVal = await db.findUser(req.user.username); + res.send(toPublicUser(userVal)); + } else { + res.status(401).end(); + } +}); + +router.post('/create-user', async (req, res) => { + if (!req.user || !req.user.admin) { + return res.status(401).send({ + message: 'You are not authorized to perform this action...', + }); + } + + try { + const { username, password, email, gitAccount, admin: isAdmin = false } = req.body; + + if (!username || !password || !email || !gitAccount) { + return res.status(400).send({ + message: 'Missing required fields: username, password, email, and gitAccount are required', + }); + } + + await db.createUser(username, password, email, gitAccount, isAdmin); + res.status(201).send({ + message: 'User created successfully', + username, + }); + } catch (error) { + console.error('Error creating user:', error); + res.status(400).send({ + message: error.message || 'Failed to create user', + }); + } +}); + +module.exports = router; + +module.exports = { + router, + loginSuccessHandler, +}; diff --git a/src/service/routes/config.js b/src/service/routes/config.js new file mode 100644 index 000000000..e80d70b5b --- /dev/null +++ b/src/service/routes/config.js @@ -0,0 +1,22 @@ +const express = require('express'); +const router = new express.Router(); + +const config = require('../../config'); + +router.get('/attestation', function ({ res }) { + res.send(config.getAttestationConfig()); +}); + +router.get('/urlShortener', function ({ res }) { + res.send(config.getURLShortener()); +}); + +router.get('/contactEmail', function ({ res }) { + res.send(config.getContactEmail()); +}); + +router.get('/uiRouteAuth', function ({ res }) { + res.send(config.getUIRouteAuth()); +}); + +module.exports = router; diff --git a/src/service/routes/healthcheck.js b/src/service/routes/healthcheck.js new file mode 100644 index 000000000..4745a8275 --- /dev/null +++ b/src/service/routes/healthcheck.js @@ -0,0 +1,10 @@ +const express = require('express'); +const router = new express.Router(); + +router.get('/', function (req, res) { + res.send({ + message: 'ok', + }); +}); + +module.exports = router; diff --git a/src/service/routes/home.js b/src/service/routes/home.js new file mode 100644 index 000000000..ce11503f6 --- /dev/null +++ b/src/service/routes/home.js @@ -0,0 +1,14 @@ +const express = require('express'); +const router = new express.Router(); + +const resource = { + healthcheck: '/api/v1/healthcheck', + push: '/api/v1/push', + auth: '/api/auth', +}; + +router.get('/', function (req, res) { + res.send(resource); +}); + +module.exports = router; diff --git a/src/service/routes/index.js b/src/service/routes/index.js new file mode 100644 index 000000000..e2e0cf1a8 --- /dev/null +++ b/src/service/routes/index.js @@ -0,0 +1,23 @@ +const express = require('express'); +const auth = require('./auth'); +const push = require('./push'); +const home = require('./home'); +const repo = require('./repo'); +const users = require('./users'); +const healthcheck = require('./healthcheck'); +const config = require('./config'); +const jwtAuthHandler = require('../passport/jwtAuthHandler'); + +const routes = (proxy) => { + const router = new express.Router(); + router.use('/api', home); + router.use('/api/auth', auth.router); + router.use('/api/v1/healthcheck', healthcheck); + router.use('/api/v1/push', jwtAuthHandler(), push); + router.use('/api/v1/repo', jwtAuthHandler(), repo(proxy)); + router.use('/api/v1/user', jwtAuthHandler(), users); + router.use('/api/v1/config', config); + return router; +}; + +module.exports = routes; diff --git a/src/service/routes/publicApi.js b/src/service/routes/publicApi.js new file mode 100644 index 000000000..c9b1b0566 --- /dev/null +++ b/src/service/routes/publicApi.js @@ -0,0 +1,10 @@ +export const toPublicUser = (user) => { + return { + username: user.username || '', + displayName: user.displayName || '', + email: user.email || '', + title: user.title || '', + gitAccount: user.gitAccount || '', + admin: user.admin || false, + }; +}; diff --git a/src/service/routes/push.js b/src/service/routes/push.js new file mode 100644 index 000000000..dd746a11f --- /dev/null +++ b/src/service/routes/push.js @@ -0,0 +1,183 @@ +const express = require('express'); +const router = new express.Router(); +const db = require('../../db'); + +router.get('/', async (req, res) => { + const query = { + type: 'push', + }; + + for (const k in req.query) { + if (!k) continue; + + if (k === 'limit') continue; + if (k === 'skip') continue; + let v = req.query[k]; + if (v === 'false') v = false; + if (v === 'true') v = true; + query[k] = v; + } + + res.send(await db.getPushes(query)); +}); + +router.get('/:id', async (req, res) => { + const id = req.params.id; + const push = await db.getPush(id); + if (push) { + res.send(push); + } else { + res.status(404).send({ + message: 'not found', + }); + } +}); + +router.post('/:id/reject', async (req, res) => { + if (req.user) { + const id = req.params.id; + + // Get the push request + const push = await db.getPush(id); + + // Get the committer of the push via their email + const committerEmail = push.userEmail; + const list = await db.getUsers({ email: committerEmail }); + + if (list.length === 0) { + res.status(401).send({ + message: `There was no registered user with the committer's email address: ${committerEmail}`, + }); + return; + } + + if (list[0].username.toLowerCase() === req.user.username.toLowerCase() && !list[0].admin) { + res.status(401).send({ + message: `Cannot reject your own changes`, + }); + return; + } + + const isAllowed = await db.canUserApproveRejectPush(id, req.user.username); + console.log({ isAllowed }); + + if (isAllowed) { + const result = await db.reject(id); + console.log(`user ${req.user.username} rejected push request for ${id}`); + res.send(result); + } else { + res.status(401).send({ + message: 'User is not authorised to reject changes', + }); + } + } else { + res.status(401).send({ + message: 'not logged in', + }); + } +}); + +router.post('/:id/authorise', async (req, res) => { + console.log({ req }); + + const questions = req.body.params?.attestation; + console.log({ questions }); + + // TODO: compare attestation to configuration and ensure all questions are answered + // - we shouldn't go on the definition in the request! + const attestationComplete = questions?.every((question) => !!question.checked); + console.log({ attestationComplete }); + + if (req.user && attestationComplete) { + const id = req.params.id; + console.log({ id }); + + // Get the push request + const push = await db.getPush(id); + console.log({ push }); + + // Get the committer of the push via their email address + const committerEmail = push.userEmail; + const list = await db.getUsers({ email: committerEmail }); + console.log({ list }); + + if (list.length === 0) { + res.status(401).send({ + message: `There was no registered user with the committer's email address: ${committerEmail}`, + }); + return; + } + + if (list[0].username.toLowerCase() === req.user.username.toLowerCase() && !list[0].admin) { + res.status(401).send({ + message: `Cannot approve your own changes`, + }); + return; + } + + // If we are not the author, now check that we are allowed to authorise on this + // repo + const isAllowed = await db.canUserApproveRejectPush(id, req.user.username); + if (isAllowed) { + console.log(`user ${req.user.username} approved push request for ${id}`); + + const reviewerList = await db.getUsers({ username: req.user.username }); + console.log({ reviewerList }); + + const reviewerGitAccount = reviewerList[0].gitAccount; + console.log({ reviewerGitAccount }); + + if (!reviewerGitAccount) { + res.status(401).send({ + message: 'You must associate a GitHub account with your user before approving...', + }); + return; + } + + const attestation = { + questions, + timestamp: new Date(), + reviewer: { + username: req.user.username, + gitAccount: reviewerGitAccount, + }, + }; + const result = await db.authorise(id, attestation); + res.send(result); + } else { + res.status(401).send({ + message: `user ${req.user.username} not authorised to approve push's on this project`, + }); + } + } else { + res.status(401).send({ + message: 'You are unauthorized to perform this action...', + }); + } +}); + +router.post('/:id/cancel', async (req, res) => { + if (req.user) { + const id = req.params.id; + + const isAllowed = await db.canUserCancelPush(id, req.user.username); + + if (isAllowed) { + const result = await db.cancel(id); + console.log(`user ${req.user.username} canceled push request for ${id}`); + res.send(result); + } else { + console.log(`user ${req.user.username} not authorised to cancel push request for ${id}`); + res.status(401).send({ + message: + 'User ${req.user.username)} not authorised to cancel push requests on this project.', + }); + } + } else { + res.status(401).send({ + message: 'not logged in', + }); + } +}); + +module.exports = router; diff --git a/src/service/routes/repo.js b/src/service/routes/repo.js new file mode 100644 index 000000000..7ebbb62e3 --- /dev/null +++ b/src/service/routes/repo.js @@ -0,0 +1,203 @@ +const express = require('express'); +const db = require('../../db'); +const { getProxyURL } = require('../urls'); +const { getAllProxiedHosts } = require('../../proxy/routes/helper'); + +// create a reference to the proxy service as arrow functions will lose track of the `proxy` parameter +// used to restart the proxy when a new host is added +let theProxy = null; +const repo = (proxy) => { + theProxy = proxy; + const router = new express.Router(); + + router.get('/', async (req, res) => { + const proxyURL = getProxyURL(req); + const query = {}; + + for (const k in req.query) { + if (!k) continue; + + if (k === 'limit') continue; + if (k === 'skip') continue; + let v = req.query[k]; + if (v === 'false') v = false; + if (v === 'true') v = true; + query[k] = v; + } + + const qd = await db.getRepos(query); + res.send(qd.map((d) => ({ ...d, proxyURL }))); + }); + + router.get('/:id', async (req, res) => { + const proxyURL = getProxyURL(req); + const _id = req.params.id; + const qd = await db.getRepoById(_id); + res.send({ ...qd, proxyURL }); + }); + + router.patch('/:id/user/push', async (req, res) => { + if (req.user && req.user.admin) { + const _id = req.params.id; + const username = req.body.username.toLowerCase(); + const user = await db.findUser(username); + + if (!user) { + res.status(400).send({ error: 'User does not exist' }); + return; + } + + await db.addUserCanPush(_id, username); + res.send({ message: 'created' }); + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + router.patch('/:id/user/authorise', async (req, res) => { + if (req.user && req.user.admin) { + const _id = req.params.id; + const username = req.body.username; + const user = await db.findUser(username); + + if (!user) { + res.status(400).send({ error: 'User does not exist' }); + return; + } + + await db.addUserCanAuthorise(_id, username); + res.send({ message: 'created' }); + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + router.delete('/:id/user/authorise/:username', async (req, res) => { + if (req.user && req.user.admin) { + const _id = req.params.id; + const username = req.params.username; + const user = await db.findUser(username); + + if (!user) { + res.status(400).send({ error: 'User does not exist' }); + return; + } + + await db.removeUserCanAuthorise(_id, username); + res.send({ message: 'created' }); + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + router.delete('/:id/user/push/:username', async (req, res) => { + if (req.user && req.user.admin) { + const _id = req.params.id; + const username = req.params.username; + const user = await db.findUser(username); + + if (!user) { + res.status(400).send({ error: 'User does not exist' }); + return; + } + + await db.removeUserCanPush(_id, username); + res.send({ message: 'created' }); + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + router.delete('/:id/delete', async (req, res) => { + if (req.user && req.user.admin) { + const _id = req.params.id; + + // determine if we need to restart the proxy + const previousHosts = await getAllProxiedHosts(); + await db.deleteRepo(_id); + const currentHosts = await getAllProxiedHosts(); + + if (currentHosts.length < previousHosts.length) { + // restart the proxy + console.log('Restarting the proxy to remove a host'); + await theProxy.stop(); + await theProxy.start(); + } + + res.send({ message: 'deleted' }); + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + router.post('/', async (req, res) => { + if (req.user && req.user.admin) { + if (!req.body.url) { + res.status(400).send({ + message: 'Repository url is required', + }); + return; + } + + const repo = await db.getRepoByUrl(req.body.url); + if (repo) { + res.status(409).send({ + message: `Repository ${req.body.url} already exists!`, + }); + } else { + try { + // figure out if this represent a new domain to proxy + let newOrigin = true; + + const existingHosts = await getAllProxiedHosts(); + existingHosts.forEach((h) => { + // assume SSL is in use and that our origins are missing the protocol + if (req.body.url.startsWith(`https://${h}`)) { + newOrigin = false; + } + }); + + console.log( + `API request to proxy repository ${req.body.url} is for a new origin: ${newOrigin},\n\texisting origin list was: ${JSON.stringify(existingHosts)}`, + ); + + // create the repository + const repoDetails = await db.createRepo(req.body); + const proxyURL = getProxyURL(req); + + // return data on the new repoistory (including it's _id and the proxyUrl) + res.send({ ...repoDetails, proxyURL, message: 'created' }); + + // restart the proxy if we're proxying a new domain + if (newOrigin) { + console.log('Restarting the proxy to handle an additional host'); + await theProxy.stop(); + await theProxy.start(); + } + } catch (e) { + console.error('Repository creation failed due to error: ', e.message ? e.message : e); + console.error(e.stack); + res.status(500).send({ message: 'Failed to create repository due to error' }); + } + } + } else { + res.status(401).send({ + message: 'You are not authorised to perform this action...', + }); + } + }); + + return router; +}; + +module.exports = repo; diff --git a/src/service/routes/users.js b/src/service/routes/users.js new file mode 100644 index 000000000..18c20801e --- /dev/null +++ b/src/service/routes/users.js @@ -0,0 +1,32 @@ +const express = require('express'); +const router = new express.Router(); +const db = require('../../db'); +const { toPublicUser } = require('./publicApi'); + +router.get('/', async (req, res) => { + const query = {}; + + console.log(`fetching users = query path =${JSON.stringify(req.query)}`); + for (const k in req.query) { + if (!k) continue; + + if (k === 'limit') continue; + if (k === 'skip') continue; + let v = req.query[k]; + if (v === 'false') v = false; + if (v === 'true') v = true; + query[k] = v; + } + + const users = await db.getUsers(query); + res.send(users.map(toPublicUser)); +}); + +router.get('/:id', async (req, res) => { + const username = req.params.id.toLowerCase(); + console.log(`Retrieving details for user: ${username}`); + const user = await db.findUser(username); + res.send(toPublicUser(user)); +}); + +module.exports = router; diff --git a/src/service/urls.js b/src/service/urls.js new file mode 100644 index 000000000..2d1a60de9 --- /dev/null +++ b/src/service/urls.js @@ -0,0 +1,20 @@ +const { GIT_PROXY_SERVER_PORT: PROXY_HTTP_PORT, GIT_PROXY_UI_PORT: UI_PORT } = + require('../config/env').serverConfig; +const config = require('../config'); + +module.exports = { + getProxyURL: (req) => { + const defaultURL = `${req.protocol}://${req.headers.host}`.replace( + `:${UI_PORT}`, + `:${PROXY_HTTP_PORT}`, + ); + return config.getDomains().proxy ?? defaultURL; + }, + getServiceUIURL: (req) => { + const defaultURL = `${req.protocol}://${req.headers.host}`.replace( + `:${PROXY_HTTP_PORT}`, + `:${UI_PORT}`, + ); + return config.getDomains().service ?? defaultURL; + }, +}; diff --git a/src/types/models.ts b/src/types/models.ts new file mode 100644 index 000000000..53f5fecc2 --- /dev/null +++ b/src/types/models.ts @@ -0,0 +1,125 @@ +import { AttestationData } from '../ui/views/PushDetails/attestation.types'; + +export interface UserData { + id: string; + name: string; + username: string; + email?: string; + displayName?: string; + title?: string; + gitAccount?: string; + admin?: boolean; +} + +export interface CommitData { + commitTs?: number; + message: string; + committer: string; + committerEmail: string; + tree?: string; + parent?: string; + author: string; + authorEmail: string; + commitTimestamp?: number; +} + +export interface TagData { + object?: string; + type: string; // commit | tree | blob | tag or 'lightweight' | 'annotated' for legacy + tagName: string; + tagger: string; + taggerEmail?: string; + timestamp?: string; + message: string; +} + +export interface PushData { + id: string; + url: string; + repo: string; + branch: string; + commitFrom: string; + commitTo: string; + commitData: CommitData[]; + diff: { + content: string; + }; + canceled?: boolean; + rejected?: boolean; + authorised?: boolean; + attestation?: AttestationData; + autoApproved?: boolean; + timestamp: string | Date; + // Tag-specific fields + tag?: string; + tagData?: TagData[]; + user?: string; // Used for tag pushes as the tagger +} + +export interface Route { + path: string; + layout: string; + name: string; + rtlName?: string; + component: React.ComponentType; + icon?: string | React.ComponentType; + visible?: boolean; +} + +export interface GitHubRepositoryMetadata { + description?: string; + language?: string; + license?: { + spdx_id: string; + }; + html_url: string; + parent?: { + full_name: string; + html_url: string; + }; + created_at?: string; + updated_at?: string; + pushed_at?: string; + owner?: { + avatar_url: string; + html_url: string; + }; +} + +export interface GitLabRepositoryMetadata { + description?: string; + primary_language?: string; + license?: { + nickname: string; + }; + web_url: string; + forked_from_project?: { + full_name: string; + web_url: string; + }; + last_activity_at?: string; + avatar_url?: string; + namespace?: { + name: string; + path: string; + full_path: string; + avatar_url?: string; + web_url: string; + }; +} + +export interface SCMRepositoryMetadata { + description?: string; + language?: string; + license?: string; + htmlUrl?: string; + parentName?: string; + parentUrl?: string; + lastUpdated?: string; + created_at?: string; + updated_at?: string; + pushed_at?: string; + + profileUrl?: string; + avatarUrl?: string; +} diff --git a/src/ui/components/CustomInput/CustomInput.jsx b/src/ui/components/CustomInput/CustomInput.jsx new file mode 100644 index 000000000..831f8c804 --- /dev/null +++ b/src/ui/components/CustomInput/CustomInput.jsx @@ -0,0 +1,73 @@ +import React from 'react'; +import clsx from 'clsx'; +import PropTypes from 'prop-types'; +import { makeStyles } from '@material-ui/core/styles'; +import FormControl from '@material-ui/core/FormControl'; +import InputLabel from '@material-ui/core/InputLabel'; +import Input from '@material-ui/core/Input'; +import Clear from '@material-ui/icons/Clear'; +import Check from '@material-ui/icons/Check'; +import styles from '../../assets/jss/material-dashboard-react/components/customInputStyle'; + +const useStyles = makeStyles(styles); + +export default function CustomInput(props) { + const classes = useStyles(); + const { formControlProps, labelText, id, labelProps, inputProps, error, success } = props; + + const labelClasses = clsx({ + [classes.labelRootError]: error, + [classes.labelRootSuccess]: success && !error, + }); + const underlineClasses = clsx({ + [classes.underlineError]: error, + [classes.underlineSuccess]: success && !error, + [classes.underline]: true, + }); + const marginTop = clsx({ + [classes.marginTop]: labelText === undefined, + }); + + const generateIcon = () => { + if (error) { + return ; + } + if (success) { + return ; + } + return null; + }; + + return ( + + {labelText !== undefined ? ( + + {labelText} + + ) : null} + + {generateIcon()} + + ); +} + +CustomInput.propTypes = { + labelText: PropTypes.node, + labelProps: PropTypes.object, + id: PropTypes.string, + inputProps: PropTypes.object, + formControlProps: PropTypes.object, + error: PropTypes.bool, + success: PropTypes.bool, +}; diff --git a/src/ui/components/Table/Table.jsx b/src/ui/components/Table/Table.jsx new file mode 100644 index 000000000..c2cebfecf --- /dev/null +++ b/src/ui/components/Table/Table.jsx @@ -0,0 +1,70 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +import { makeStyles } from '@material-ui/core/styles'; + +import Table from '@material-ui/core/Table'; +import TableHead from '@material-ui/core/TableHead'; +import TableRow from '@material-ui/core/TableRow'; +import TableBody from '@material-ui/core/TableBody'; +import TableCell from '@material-ui/core/TableCell'; +import styles from '../../assets/jss/material-dashboard-react/components/tableStyle'; + +const useStyles = makeStyles(styles); + +export default function CustomTable(props) { + const classes = useStyles(); + const { tableHead, tableData, tableHeaderColor } = props; + return ( +
+ + {tableHead !== undefined ? ( + + + {tableHead.map((prop, key) => { + return ( + + {prop} + + ); + })} + + + ) : null} + + {tableData.map((prop, key) => { + return ( + + {prop.map((p, k) => { + return ( + + {p} + + ); + })} + + ); + })} + +
+
+ ); +} + +CustomTable.defaultProps = { + tableHeaderColor: 'gray', +}; + +CustomTable.propTypes = { + tableHeaderColor: PropTypes.oneOf([ + 'warning', + 'primary', + 'danger', + 'success', + 'info', + 'rose', + 'gray', + ]), + tableHead: PropTypes.arrayOf(PropTypes.string), + tableData: PropTypes.arrayOf(PropTypes.arrayOf(PropTypes.string)), +}; diff --git a/src/ui/components/Tasks/Tasks.jsx b/src/ui/components/Tasks/Tasks.jsx new file mode 100644 index 000000000..44fe0c5c4 --- /dev/null +++ b/src/ui/components/Tasks/Tasks.jsx @@ -0,0 +1,89 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +import { makeStyles } from '@material-ui/core/styles'; +import Checkbox from '@material-ui/core/Checkbox'; +import Tooltip from '@material-ui/core/Tooltip'; +import IconButton from '@material-ui/core/IconButton'; +import Table from '@material-ui/core/Table'; +import TableRow from '@material-ui/core/TableRow'; +import TableBody from '@material-ui/core/TableBody'; +import TableCell from '@material-ui/core/TableCell'; +import Edit from '@material-ui/icons/Edit'; +import Close from '@material-ui/icons/Close'; +import Check from '@material-ui/icons/Check'; +import styles from '../../assets/jss/material-dashboard-react/components/tasksStyle'; + +const useStyles = makeStyles(styles); + +export default function Tasks(props) { + const classes = useStyles(); + const [checked, setChecked] = React.useState([...props.checkedIndexes]); + const handleToggle = (value) => { + const currentIndex = checked.indexOf(value); + const newChecked = [...checked]; + if (currentIndex === -1) { + newChecked.push(value); + } else { + newChecked.splice(currentIndex, 1); + } + setChecked(newChecked); + }; + const { tasksIndexes, tasks, rtlActive } = props; + const tableCellClasses = clsx(classes.tableCell, { + [classes.tableCellRTL]: rtlActive, + }); + return ( + + + {tasksIndexes.map((value) => ( + + + handleToggle(value)} + checkedIcon={} + icon={} + classes={{ + checked: classes.checked, + root: classes.root, + }} + /> + + {tasks[value]} + + + + + + + + + + + + + + ))} + +
+ ); +} + +Tasks.propTypes = { + tasksIndexes: PropTypes.arrayOf(PropTypes.number), + tasks: PropTypes.arrayOf(PropTypes.node), + rtlActive: PropTypes.bool, + checkedIndexes: PropTypes.array, +}; diff --git a/src/ui/components/Typography/Danger.jsx b/src/ui/components/Typography/Danger.jsx new file mode 100644 index 000000000..ee6e94b59 --- /dev/null +++ b/src/ui/components/Typography/Danger.jsx @@ -0,0 +1,17 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +import { makeStyles } from '@material-ui/core/styles'; +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Danger(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Danger.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Info.jsx b/src/ui/components/Typography/Info.jsx new file mode 100644 index 000000000..17c3a9ddc --- /dev/null +++ b/src/ui/components/Typography/Info.jsx @@ -0,0 +1,17 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +import { makeStyles } from '@material-ui/core/styles'; +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Info(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Info.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Muted.jsx b/src/ui/components/Typography/Muted.jsx new file mode 100644 index 000000000..9b625c5f2 --- /dev/null +++ b/src/ui/components/Typography/Muted.jsx @@ -0,0 +1,19 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +// @material-ui/core components +import { makeStyles } from '@material-ui/core/styles'; +// core components +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Muted(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Muted.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Primary.jsx b/src/ui/components/Typography/Primary.jsx new file mode 100644 index 000000000..b58206c4f --- /dev/null +++ b/src/ui/components/Typography/Primary.jsx @@ -0,0 +1,19 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +// @material-ui/core components +import { makeStyles } from '@material-ui/core/styles'; +// core components +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Primary(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Primary.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Quote.jsx b/src/ui/components/Typography/Quote.jsx new file mode 100644 index 000000000..3dedbc7bb --- /dev/null +++ b/src/ui/components/Typography/Quote.jsx @@ -0,0 +1,25 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +// @material-ui/core components +import { makeStyles } from '@material-ui/core/styles'; +// core components +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Quote(props) { + const classes = useStyles(); + const { text, author } = props; + return ( +
+

{text}

+ {author} +
+ ); +} + +Quote.propTypes = { + text: PropTypes.node, + author: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Success.jsx b/src/ui/components/Typography/Success.jsx new file mode 100644 index 000000000..a40affc47 --- /dev/null +++ b/src/ui/components/Typography/Success.jsx @@ -0,0 +1,19 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +// @material-ui/core components +import { makeStyles } from '@material-ui/core/styles'; +// core components +import styles from '../../assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Success(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Success.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/components/Typography/Warning.jsx b/src/ui/components/Typography/Warning.jsx new file mode 100644 index 000000000..70db1ea6d --- /dev/null +++ b/src/ui/components/Typography/Warning.jsx @@ -0,0 +1,19 @@ +import React from 'react'; +import PropTypes from 'prop-types'; +import clsx from 'clsx'; +// @material-ui/core components +import { makeStyles } from '@material-ui/core/styles'; +// core components +import styles from 'ui/assets/jss/material-dashboard-react/components/typographyStyle'; + +const useStyles = makeStyles(styles); + +export default function Warning(props) { + const classes = useStyles(); + const { children } = props; + return
{children}
; +} + +Warning.propTypes = { + children: PropTypes.node, +}; diff --git a/src/ui/services/auth.js b/src/ui/services/auth.js new file mode 100644 index 000000000..8dc39a2a7 --- /dev/null +++ b/src/ui/services/auth.js @@ -0,0 +1,55 @@ +import { getCookie } from '../utils'; + +const baseUrl = import.meta.env.VITE_API_URI + ? `${import.meta.env.VITE_API_URI}` + : `${location.origin}`; + +/** + * Gets the current user's information + * @return {Promise} The user's information + */ +export const getUserInfo = async () => { + try { + const response = await fetch(`${baseUrl}/api/auth/me`, { + credentials: 'include', // Sends cookies + }); + + if (!response.ok) throw new Error(`Failed to fetch user info: ${response.statusText}`); + + return await response.json(); + } catch (error) { + console.error('Error fetching user info:', error); + return null; + } +}; + +/** + * Gets the Axios config for the UI + * @return {Object} The Axios config + */ +export const getAxiosConfig = () => { + const jwtToken = localStorage.getItem('ui_jwt_token'); + return { + withCredentials: true, + headers: { + 'X-CSRF-TOKEN': getCookie('csrf'), + Authorization: jwtToken ? `Bearer ${jwtToken}` : undefined, + }, + }; +}; + +/** + * Processes authentication errors and returns a user-friendly error message + * @param {Object} error - The error object + * @return {string} The error message + */ +export const processAuthError = (error) => { + let errorMessage = `Failed to authorize user: ${error.response.data.trim()}. `; + if (!localStorage.getItem('ui_jwt_token')) { + errorMessage += + 'Set your JWT token in the settings page or disable JWT auth in your app configuration.'; + } else { + errorMessage += 'Check your JWT token or disable JWT auth in your app configuration.'; + } + return errorMessage; +}; diff --git a/src/ui/services/config.js b/src/ui/services/config.js new file mode 100644 index 000000000..286aab9a0 --- /dev/null +++ b/src/ui/services/config.js @@ -0,0 +1,35 @@ +import axios from 'axios'; + +const baseUrl = import.meta.env.VITE_API_URI + ? `${import.meta.env.VITE_API_URI}/api/v1` + : `${location.origin}/api/v1`; + +const getAttestationConfig = async (setData) => { + const url = new URL(`${baseUrl}/config/attestation`); + await axios(url.toString()).then((response) => { + setData(response.data.questions); + }); +}; + +const getURLShortener = async (setData) => { + const url = new URL(`${baseUrl}/config/urlShortener`); + await axios(url.toString()).then((response) => { + setData(response.data); + }); +}; + +const getEmailContact = async (setData) => { + const url = new URL(`${baseUrl}/config/contactEmail`); + await axios(url.toString()).then((response) => { + setData(response.data); + }); +}; + +const getUIRouteAuth = async (setData) => { + const url = new URL(`${baseUrl}/config/uiRouteAuth`); + await axios(url.toString()).then((response) => { + setData(response.data); + }); +}; + +export { getAttestationConfig, getURLShortener, getEmailContact, getUIRouteAuth }; diff --git a/src/ui/services/git-push.js b/src/ui/services/git-push.js new file mode 100644 index 000000000..d72746efd --- /dev/null +++ b/src/ui/services/git-push.js @@ -0,0 +1,110 @@ +import axios from 'axios'; +import { getAxiosConfig, processAuthError } from './auth.js'; + +const baseUrl = import.meta.env.VITE_API_URI + ? `${import.meta.env.VITE_API_URI}/api/v1` + : `${location.origin}/api/v1`; + +const getPush = async (id, setIsLoading, setData, setAuth, setIsError) => { + const url = `${baseUrl}/push/${id}`; + setIsLoading(true); + + try { + const response = await axios(url, getAxiosConfig()); + const data = response.data; + data.diff = data.steps.find((x) => x.stepName === 'diff'); + setData(data); + } catch (error) { + if (error.response?.status === 401) setAuth(false); + else setIsError(true); + } finally { + setIsLoading(false); + } +}; + +const getPushes = async ( + setIsLoading, + setData, + setAuth, + setIsError, + setErrorMessage, + query = { + blocked: true, + canceled: false, + authorised: false, + rejected: false, + }, +) => { + const url = new URL(`${baseUrl}/push`); + url.search = new URLSearchParams(query); + + setIsLoading(true); + + try { + const response = await axios(url.toString(), getAxiosConfig()); + setData(response.data); + } catch (error) { + setIsError(true); + + if (error.response?.status === 401) { + setAuth(false); + setErrorMessage(processAuthError(error)); + } else { + const message = error.response?.data?.message || error.message; + setErrorMessage(`Error fetching pushes: ${message}`); + } + } finally { + setIsLoading(false); + } +}; + +const authorisePush = async (id, setMessage, setUserAllowedToApprove, attestation) => { + const url = `${baseUrl}/push/${id}/authorise`; + let errorMsg = ''; + let isUserAllowedToApprove = true; + await axios + .post( + url, + { + params: { + attestation, + }, + }, + getAxiosConfig(), + ) + .catch((error) => { + if (error.response && error.response.status === 401) { + errorMsg = 'You are not authorised to approve...'; + isUserAllowedToApprove = false; + } + }); + await setMessage(errorMsg); + await setUserAllowedToApprove(isUserAllowedToApprove); +}; + +const rejectPush = async (id, setMessage, setUserAllowedToReject) => { + const url = `${baseUrl}/push/${id}/reject`; + let errorMsg = ''; + let isUserAllowedToReject = true; + await axios.post(url, {}, getAxiosConfig()).catch((error) => { + if (error.response && error.response.status === 401) { + errorMsg = 'You are not authorised to reject...'; + isUserAllowedToReject = false; + } + }); + await setMessage(errorMsg); + await setUserAllowedToReject(isUserAllowedToReject); +}; + +const cancelPush = async (id, setAuth, setIsError) => { + const url = `${baseUrl}/push/${id}/cancel`; + await axios.post(url, {}, getAxiosConfig()).catch((error) => { + if (error.response && error.response.status === 401) { + setAuth(false); + } else { + setIsError(true); + } + }); +}; + +export { getPush, getPushes, authorisePush, rejectPush, cancelPush }; diff --git a/src/ui/services/repo.js b/src/ui/services/repo.js new file mode 100644 index 000000000..4327f0520 --- /dev/null +++ b/src/ui/services/repo.js @@ -0,0 +1,132 @@ +import axios from 'axios'; +import { getAxiosConfig, processAuthError } from './auth.js'; + +const baseUrl = import.meta.env.VITE_API_URI + ? `${import.meta.env.VITE_API_URI}/api/v1` + : `${location.origin}/api/v1`; + +const canAddUser = (repoId, user, action) => { + const url = new URL(`${baseUrl}/repo/${repoId}`); + return axios + .get(url.toString(), getAxiosConfig()) + .then((response) => { + const data = response.data; + if (action === 'authorise') { + return !data.users.canAuthorise.includes(user); + } else { + return !data.users.canPush.includes(user); + } + }) + .catch((error) => { + throw error; + }); +}; + +class DupUserValidationError extends Error { + constructor(message) { + super(message); + this.name = 'The user already has this role...'; + } +} + +const getRepos = async ( + setIsLoading, + setData, + setAuth, + setIsError, + setErrorMessage, + query = {}, +) => { + const url = new URL(`${baseUrl}/repo`); + url.search = new URLSearchParams(query); + setIsLoading(true); + await axios(url.toString(), getAxiosConfig()) + .then((response) => { + const sortedRepos = response.data.sort((a, b) => a.name.localeCompare(b.name)); + setData(sortedRepos); + }) + .catch((error) => { + setIsError(true); + if (error.response && error.response.status === 401) { + setAuth(false); + setErrorMessage(processAuthError(error)); + } else { + setErrorMessage(`Error fetching repos: ${error.response.data.message}`); + } + }) + .finally(() => { + setIsLoading(false); + }); +}; + +const getRepo = async (setIsLoading, setData, setAuth, setIsError, id) => { + const url = new URL(`${baseUrl}/repo/${id}`); + setIsLoading(true); + await axios(url.toString(), getAxiosConfig()) + .then((response) => { + const data = response.data; + setData(data); + }) + .catch((error) => { + if (error.response && error.response.status === 401) { + setAuth(false); + } else { + setIsError(true); + } + }) + .finally(() => { + setIsLoading(false); + }); +}; + +const addRepo = async (data) => { + const url = new URL(`${baseUrl}/repo`); + + try { + const response = await axios.post(url, data, getAxiosConfig()); + return { + success: true, + repo: response.data, + }; + } catch (error) { + return { + success: false, + message: error.response?.data?.message || error.message, + }; + } +}; + +const addUser = async (repoId, user, action) => { + const canAdd = await canAddUser(repoId, user, action); + if (canAdd) { + const url = new URL(`${baseUrl}/repo/${repoId}/user/${action}`); + const data = { username: user }; + await axios.patch(url, data, getAxiosConfig()).catch((error) => { + console.log(error.response.data.message); + throw error; + }); + } else { + console.log('Duplicate user can not be added'); + throw new DupUserValidationError(); + } +}; + +const deleteUser = async (user, repoId, action) => { + const url = new URL(`${baseUrl}/repo/${repoId}/user/${action}/${user}`); + + await axios.delete(url, getAxiosConfig()).catch((error) => { + console.log(error.response.data.message); + throw error; + }); +}; + +const deleteRepo = async (repoId) => { + const url = new URL(`${baseUrl}/repo/${repoId}/delete`); + + await axios.delete(url, getAxiosConfig()).catch((error) => { + console.log(error.response.data.message); + throw error; + }); +}; + +export { addUser, deleteUser, getRepos, getRepo, addRepo, deleteRepo }; diff --git a/src/ui/utils.tsx b/src/ui/utils.tsx index 6a8abfc17..46045588e 100644 --- a/src/ui/utils.tsx +++ b/src/ui/utils.tsx @@ -1,5 +1,5 @@ -import axios from 'axios'; import React from 'react'; +import axios from 'axios'; import { GitHubRepositoryMetadata, GitLabRepositoryMetadata, SCMRepositoryMetadata } from './types'; import { CommitData } from '../proxy/processors/types'; import moment from 'moment'; @@ -103,20 +103,22 @@ export const getUserProfileUrl = (username: string, provider: string, hostname: * @param {string} username The username. * @param {string} provider The name of the SCM provider. * @param {string} hostname The hostname of the SCM provider. - * @return {string} A string containing an HTML A tag pointing to the user's profile, if possible, degrading to just the username or 'N/A' when not (e.g. because the SCM provider is unknown). + * @return {JSX.Element} A JSX element containing a link to the user's profile, if possible, degrading to just the username or 'N/A' when not (e.g. because the SCM provider is unknown). */ export const getUserProfileLink = (username: string, provider: string, hostname: string) => { if (username) { - let profileData = ''; const profileUrl = getUserProfileUrl(username, provider, hostname); if (profileUrl) { - profileData = `${username}`; + return ( + + {username} + + ); } else { - profileData = `${username}`; + return {username}; } - return profileData; } else { - return 'N/A'; + return N/A; } }; diff --git a/src/ui/utils/pushUtils.ts b/src/ui/utils/pushUtils.ts new file mode 100644 index 000000000..27c8040de --- /dev/null +++ b/src/ui/utils/pushUtils.ts @@ -0,0 +1,244 @@ +import moment from 'moment'; +import { CommitData, PushData, TagData } from '../../types/models'; +import { trimPrefixRefsHeads, trimTrailingDotGit } from '../../db/helper'; + +/** + * Determines if a push is a tag push + * @param {PushData} pushData - The push data to check + * @return {boolean} True if this is a tag push, false otherwise + */ +export const isTagPush = (pushData: PushData): boolean => { + return Boolean(pushData?.tag && pushData?.tagData && pushData.tagData.length > 0); +}; + +/** + * Gets the display timestamp for a push (handles both commits and tags) + * @param {boolean} isTag - Whether this is a tag push + * @param {CommitData | null} commitData - The commit data + * @param {TagData} [tagData] - The tag data (optional) + * @return {string} Formatted timestamp string or 'N/A' + */ +export const getDisplayTimestamp = ( + isTag: boolean, + commitData: CommitData | null, + tagData?: TagData, +): string => { + // For tag pushes, try to use tag timestamp if available + if (isTag && tagData?.timestamp) { + return moment.unix(parseInt(tagData.timestamp)).toString(); + } + + // Fallback to commit timestamp for both commits and tags without timestamp + if (commitData) { + const timestamp = commitData.commitTimestamp || commitData.commitTs; + return timestamp ? moment.unix(timestamp).toString() : 'N/A'; + } + + return 'N/A'; +}; + +/** + * Safely extracts tag name from git reference + * @param {string} [tagRef] - The git tag reference (e.g., 'refs/tags/v1.0.0') + * @return {string} The tag name without the 'refs/tags/' prefix + */ +export const getTagName = (tagRef?: string): string => { + if (!tagRef || typeof tagRef !== 'string') return ''; + try { + return tagRef.replace('refs/tags/', ''); + } catch (error) { + console.warn('Error parsing tag reference:', tagRef, error); + return ''; + } +}; + +/** + * Gets the appropriate reference to show (tag name or branch name) + * @param {PushData} pushData - The push data + * @return {string} The reference name to display + */ +export const getRefToShow = (pushData: PushData): string => { + if (isTagPush(pushData)) { + return getTagName(pushData.tag); + } + return trimPrefixRefsHeads(pushData.branch); +}; + +/** + * Gets the SHA or tag identifier for display + * @param {PushData} pushData - The push data + * @return {string} The SHA (shortened) or tag name + */ +export const getShaOrTag = (pushData: PushData): string => { + if (isTagPush(pushData)) { + return getTagName(pushData.tag); + } + + if (!pushData.commitTo || typeof pushData.commitTo !== 'string') { + console.warn('Invalid commitTo value:', pushData.commitTo); + return 'N/A'; + } + + return pushData.commitTo.substring(0, 8); +}; + +/** + * Gets the committer or tagger based on push type + * @param {PushData} pushData - The push data + * @return {string} The committer username for commits or tagger for tags + */ +export const getCommitterOrTagger = (pushData: PushData): string => { + if (isTagPush(pushData) && pushData.user) { + return pushData.user; + } + + if ( + !pushData.commitData || + !Array.isArray(pushData.commitData) || + pushData.commitData.length === 0 + ) { + console.warn('Invalid or empty commitData:', pushData.commitData); + return 'N/A'; + } + + return pushData.commitData[0]?.committer || 'N/A'; +}; + +/** + * Gets the author (tagger for tag pushes) + * @param {PushData} pushData - The push data + * @return {string} The author username for commits or tagger for tags + */ +export const getAuthor = (pushData: PushData): string => { + if (isTagPush(pushData)) { + return pushData.tagData?.[0]?.tagger || 'N/A'; + } + return pushData.commitData[0]?.author || 'N/A'; +}; + +/** + * Gets the author email (tagger email for tag pushes) + * @param {PushData} pushData - The push data + * @return {string} The author email for commits or tagger email for tags + */ +export const getAuthorEmail = (pushData: PushData): string => { + if (isTagPush(pushData)) { + return pushData.tagData?.[0]?.taggerEmail || 'N/A'; + } + return pushData.commitData[0]?.authorEmail || 'N/A'; +}; + +/** + * Gets the message (tag message or commit message) + * @param {PushData} pushData - The push data + * @return {string} The appropriate message for the push type + */ +export const getMessage = (pushData: PushData): string => { + if (isTagPush(pushData)) { + // For tags, try tag message first, then fallback to commit message + return pushData.tagData?.[0]?.message || pushData.commitData[0]?.message || ''; + } + return pushData.commitData[0]?.message || 'N/A'; +}; + +/** + * Gets the commit count + * @param {PushData} pushData - The push data + * @return {number} The number of commits in the push + */ +export const getCommitCount = (pushData: PushData): number => { + return pushData.commitData?.length || 0; +}; + +/** + * Gets the cleaned repository name + * @param {string} repo - The repository name (may include .git suffix) + * @return {string} The cleaned repository name without .git suffix + */ +export const getRepoFullName = (repo: string): string => { + return trimTrailingDotGit(repo); +}; + +/** + * Generates GitHub URLs for different reference types (legacy - use getGitUrl instead) + */ +export const getGitHubUrl = { + repo: (repoName: string) => `https://github.com/${repoName}`, + commit: (repoName: string, sha: string) => `https://github.com/${repoName}/commit/${sha}`, + branch: (repoName: string, branch: string) => `https://github.com/${repoName}/tree/${branch}`, + tag: (repoName: string, tagName: string) => + `https://github.com/${repoName}/releases/tag/${tagName}`, + user: (username: string) => `https://github.com/${username}`, +}; + +/** + * Generates URLs for different Git providers and reference types + * @param {string} repoWebUrl - The base repository web URL + * @param {string} gitProvider - The Git provider (github, gitlab, etc.) + * @return {object} Object with URL generation functions + */ +export const getGitUrl = (repoWebUrl: string, gitProvider: string) => ({ + repo: () => repoWebUrl, + commit: (sha: string) => `${repoWebUrl}/commit/${sha}`, + branch: (branch: string) => { + switch (gitProvider) { + case 'gitlab': + return `${repoWebUrl}/-/tree/${branch}`; + default: + return `${repoWebUrl}/tree/${branch}`; + } + }, + tag: (tagName: string) => { + switch (gitProvider) { + case 'gitlab': + return `${repoWebUrl}/-/tags/${tagName}`; + default: + return `${repoWebUrl}/releases/tag/${tagName}`; + } + }, +}); + +/** + * Gets the appropriate URL for a branch or tag reference + * @param {string} repoWebUrl - The base repository web URL + * @param {string} gitProvider - The Git provider + * @param {boolean} isTag - Whether this is a tag reference + * @param {string} refName - The reference name (branch or tag) + * @return {string} The appropriate URL + */ +export const getRefUrl = ( + repoWebUrl: string, + gitProvider: string, + isTag: boolean, + refName: string, +): string => { + const gitUrl = getGitUrl(repoWebUrl, gitProvider); + return isTag ? gitUrl.tag(refName) : gitUrl.branch(refName); +}; + +/** + * Gets the appropriate URL for a commit or tag SHA + * @param {string} repoWebUrl - The base repository web URL + * @param {string} gitProvider - The Git provider + * @param {boolean} isTag - Whether this is a tag reference + * @param {string} sha - The SHA or tag name + * @return {string} The appropriate URL + */ +export const getShaUrl = ( + repoWebUrl: string, + gitProvider: string, + isTag: boolean, + sha: string, +): string => { + const gitUrl = getGitUrl(repoWebUrl, gitProvider); + return isTag ? gitUrl.tag(sha) : gitUrl.commit(sha); +}; + +/** + * Checks if a value is not "N/A" and not empty + * @param {string | undefined} value - The value to check + * @return {boolean} True if the value is valid (not N/A and not empty) + */ +export const isValidValue = (value: string | undefined): value is string => { + return Boolean(value && value !== 'N/A'); +}; diff --git a/src/ui/views/Extras/NotAuthorized.jsx b/src/ui/views/Extras/NotAuthorized.jsx new file mode 100644 index 000000000..f08c478b1 --- /dev/null +++ b/src/ui/views/Extras/NotAuthorized.jsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { useNavigate } from 'react-router-dom'; +import Card from '../../components/Card/Card'; +import CardBody from '../../components/Card/CardBody'; +import GridContainer from '../../components/Grid/GridContainer'; +import GridItem from '../../components/Grid/GridItem'; +import { Button } from '@material-ui/core'; +import LockIcon from '@material-ui/icons/Lock'; + +const NotAuthorized = () => { + const navigate = useNavigate(); + + return ( + + + + + +

403 - Not Authorized

+

+ You do not have permission to access this page. Contact your administrator for more + information, or try logging in with a different account. +

+ +
+
+
+
+ ); +}; + +export default NotAuthorized; diff --git a/src/ui/views/Extras/NotFound.jsx b/src/ui/views/Extras/NotFound.jsx new file mode 100644 index 000000000..d548200de --- /dev/null +++ b/src/ui/views/Extras/NotFound.jsx @@ -0,0 +1,36 @@ +import React from 'react'; +import { useNavigate } from 'react-router-dom'; +import Card from '../../components/Card/Card'; +import CardBody from '../../components/Card/CardBody'; +import GridContainer from '../../components/Grid/GridContainer'; +import GridItem from '../../components/Grid/GridItem'; +import { Button } from '@material-ui/core'; +import ErrorOutlineIcon from '@material-ui/icons/ErrorOutline'; + +const NotFound = () => { + const navigate = useNavigate(); + + return ( + + + + + +

404 - Page Not Found

+

The page you are looking for does not exist. It may have been moved or deleted.

+ +
+
+
+
+ ); +}; + +export default NotFound; diff --git a/src/ui/views/OpenPushRequests/OpenPushRequests.tsx b/src/ui/views/OpenPushRequests/OpenPushRequests.tsx new file mode 100644 index 000000000..a778e08ab --- /dev/null +++ b/src/ui/views/OpenPushRequests/OpenPushRequests.tsx @@ -0,0 +1,76 @@ +import React, { useState } from 'react'; +import GridItem from '../../components/Grid/GridItem'; +import GridContainer from '../../components/Grid/GridContainer'; +import PushesTable from './components/PushesTable'; +import CustomTabs from '../../components/CustomTabs/CustomTabs'; +import Danger from '../../components/Typography/Danger'; +import { Visibility, CheckCircle, Cancel, Block } from '@material-ui/icons'; +import { SvgIconProps } from '@material-ui/core'; + +interface TabConfig { + tabName: string; + tabIcon: React.ComponentType; + tabContent: React.ReactNode; +} + +const Dashboard: React.FC = () => { + const [errorMessage, setErrorMessage] = useState(null); + + const handlePushTableError = (errorMessage: string) => { + setErrorMessage(errorMessage); + }; + + const tabs: TabConfig[] = [ + { + tabName: 'Pending', + tabIcon: Visibility, + tabContent: ( + + ), + }, + { + tabName: 'Approved', + tabIcon: CheckCircle, + tabContent: , + }, + { + tabName: 'Canceled', + tabIcon: Cancel, + tabContent: ( + + ), + }, + { + tabName: 'Rejected', + tabIcon: Block, + tabContent: ( + + ), + }, + ]; + + return ( +
+ {errorMessage && {errorMessage}} + {!errorMessage && ( + + + + + + )} +
+ ); +}; + +export default Dashboard; diff --git a/src/ui/views/OpenPushRequests/components/PushesTable.tsx b/src/ui/views/OpenPushRequests/components/PushesTable.tsx new file mode 100644 index 000000000..1975da214 --- /dev/null +++ b/src/ui/views/OpenPushRequests/components/PushesTable.tsx @@ -0,0 +1,199 @@ +import React, { useState, useEffect } from 'react'; +import { makeStyles } from '@material-ui/core/styles'; +import { useNavigate } from 'react-router-dom'; +import Button from '@material-ui/core/Button'; +import Table from '@material-ui/core/Table'; +import TableBody from '@material-ui/core/TableBody'; +import TableCell from '@material-ui/core/TableCell'; +import TableContainer from '@material-ui/core/TableContainer'; +import TableHead from '@material-ui/core/TableHead'; +import TableRow from '@material-ui/core/TableRow'; +import Paper from '@material-ui/core/Paper'; +import styles from '../../../assets/jss/material-dashboard-react/views/dashboardStyle'; +import { getPushes } from '../../../services/git-push'; +import { KeyboardArrowRight } from '@material-ui/icons'; +import Search from '../../../components/Search/Search'; +import Pagination from '../../../components/Pagination/Pagination'; +import ErrorBoundary from '../../../components/ErrorBoundary/ErrorBoundary'; +import { PushData } from '../../../../types/models'; +import { + isTagPush, + getDisplayTimestamp, + getTagName, + getRefToShow, + getShaOrTag, + getCommitterOrTagger, + getAuthorEmail, + getMessage, + getCommitCount, + getRepoFullName, + isValidValue, + getRefUrl, + getShaUrl, +} from '../../../utils/pushUtils'; +import { trimTrailingDotGit } from '../../../../db/helper'; +import { getGitProvider, generateAuthorLinks, generateEmailLink } from '../../../utils'; + +interface PushesTableProps { + [key: string]: any; +} + +const useStyles = makeStyles(styles as any); + +const PushesTable: React.FC = (props) => { + const classes = useStyles(); + const [data, setData] = useState([]); + const [filteredData, setFilteredData] = useState([]); + const [isLoading, setIsLoading] = useState(false); + const [, setIsError] = useState(false); + const navigate = useNavigate(); + const [, setAuth] = useState(true); + const [currentPage, setCurrentPage] = useState(1); + const itemsPerPage = 5; + const [searchTerm, setSearchTerm] = useState(''); + + const openPush = (pushId: string) => navigate(`/dashboard/push/${pushId}`, { replace: true }); + + useEffect(() => { + const query = { + blocked: props.blocked ?? false, + canceled: props.canceled ?? false, + authorised: props.authorised ?? false, + rejected: props.rejected ?? false, + }; + getPushes(setIsLoading, setData, setAuth, setIsError, props.handleError, query); + }, [props]); + + useEffect(() => { + setFilteredData(data); + }, [data]); + + // Include "tag" in the searchable fields when tag exists + useEffect(() => { + const lowerCaseTerm = searchTerm.toLowerCase(); + const filtered = searchTerm + ? data.filter((item) => { + const repoName = getRepoFullName(item.repo).toLowerCase(); + const message = getMessage(item).toLowerCase(); + const commitToSha = item.commitTo.toLowerCase(); + const tagName = getTagName(item.tag).toLowerCase(); + + return ( + repoName.includes(lowerCaseTerm) || + commitToSha.includes(lowerCaseTerm) || + message.includes(lowerCaseTerm) || + tagName.includes(lowerCaseTerm) + ); + }) + : data; + setFilteredData(filtered); + setCurrentPage(1); + }, [searchTerm, data]); + + const handleSearch = (term: string) => setSearchTerm(term.trim()); + + const handlePageChange = (page: number) => { + setCurrentPage(page); + }; + + const indexOfLastItem = currentPage * itemsPerPage; + const indexOfFirstItem = indexOfLastItem - itemsPerPage; + const currentItems = filteredData.slice(indexOfFirstItem, indexOfLastItem); + + if (isLoading) return
Loading...
; + + return ( + +
+ + + + + + Timestamp + Repository + Branch/Tag + Commit SHA/Tag + Committer/Tagger + Authors + Message + No. of Commits + + + + + {[...currentItems].reverse().map((row) => { + const isTag = isTagPush(row); + const repoFullName = getRepoFullName(row.repo); + const displayTime = getDisplayTimestamp(isTag, row.commitData[0], row.tagData?.[0]); + const refToShow = getRefToShow(row); + const shaOrTag = getShaOrTag(row); + const repoUrl = row.url; + const repoWebUrl = trimTrailingDotGit(repoUrl); + const gitProvider = getGitProvider(repoUrl); + const committerOrTagger = getCommitterOrTagger(row); + const message = getMessage(row); + const commitCount = getCommitCount(row); + + return ( + + {displayTime} + + + {repoFullName} + + + + + {refToShow} + + + + + {shaOrTag} + + + + {isValidValue(committerOrTagger) + ? generateEmailLink(committerOrTagger, getAuthorEmail(row)) + : 'N/A'} + + {generateAuthorLinks(row.commitData)} + {message} + {commitCount} + + + + + ); + })} + +
+
+ +
+
+ ); +}; + +export default PushesTable; diff --git a/src/ui/views/PushDetails/PushDetails.tsx b/src/ui/views/PushDetails/PushDetails.tsx index 05392958d..3a3b02a63 100644 --- a/src/ui/views/PushDetails/PushDetails.tsx +++ b/src/ui/views/PushDetails/PushDetails.tsx @@ -8,7 +8,6 @@ import Card from '../../components/Card/Card'; import CardIcon from '../../components/Card/CardIcon'; import CardBody from '../../components/Card/CardBody'; import CardHeader, { CardHeaderColor } from '../../components/Card/CardHeader'; -import CardFooter from '../../components/Card/CardFooter'; import Button from '../../components/CustomButtons/Button'; import Diff from './components/Diff'; import Attestation from './components/Attestation'; @@ -25,7 +24,14 @@ import type { ServiceResult } from '../../services/errors'; import { CheckCircle, Visibility, Cancel, Block } from '@material-ui/icons'; import Snackbar from '@material-ui/core/Snackbar'; import { PushActionView } from '../../types'; -import { trimPrefixRefsHeads, trimTrailingDotGit } from '../../../db/helper'; +import { + isTagPush, + getTagName, + getRepoFullName, + getRefToShow, + getGitUrl, +} from '../../utils/pushUtils'; +import { trimTrailingDotGit } from '../../../db/helper'; import { generateEmailLink, getGitProvider } from '../../utils'; const Dashboard: React.FC = () => { @@ -89,7 +95,7 @@ const Dashboard: React.FC = () => { if (!id) return; const result = await cancelPush(id); if (result.success) { - navigate(`/dashboard/push/`); + navigate('/dashboard/push/'); return; } handleActionFailure(result); @@ -125,12 +131,14 @@ const Dashboard: React.FC = () => { }; } - const repoFullName = trimTrailingDotGit(push.repo); - const repoBranch = trimPrefixRefsHeads(push.branch ?? ''); + const isTag = isTagPush(push as any); + const repoFullName = getRepoFullName(push.repo); + const refToShow = getRefToShow(push as any); const repoUrl = push.url; const repoWebUrl = trimTrailingDotGit(repoUrl); const gitProvider = getGitProvider(repoUrl); const isGitHub = gitProvider == 'github'; + const gitUrl = getGitUrl(repoWebUrl, gitProvider); const generateIcon = (title: string) => { switch (title) { @@ -191,87 +199,121 @@ const Dashboard: React.FC = () => {

{moment(push.timestamp).toString()}

-

Remote Head

+

Repository

- - {push.commitFrom} + + {repoFullName}

-

Commit SHA

-

- - {push.commitTo} - -

+ {isTag ? ( + <> +

Tag

+

{getTagName((push as any).tag)}

+ + ) : ( + <> +

Branch

+

{refToShow}

+ + )}
-

Repository

+

From

- - {repoFullName} + + {push.commitFrom}

-

Branch

+

To

- - {repoBranch} + + {push.commitTo}

- - -

{headerData.title}

-
- - - - - Timestamp - Committer - Author - Message - - - - {push.commitData?.map((c) => ( - - - {moment.unix(Number(c.commitTimestamp || 0)).toString()} - - {generateEmailLink(c.committer, c.committerEmail)} - {generateEmailLink(c.author, c.authorEmail)} - {c.message} - - ))} - -
-
-
- - - - - - - - - + + {/* Branch push: show commits and diff */} + {!isTag && ( + <> + + + +

{headerData.title}

+
+ + + + + Timestamp + Committer + Author + Message + + + + {push.commitData?.map((c) => ( + + + {moment.unix(Number(c.commitTimestamp || 0)).toString()} + + {generateEmailLink(c.committer, c.committerEmail)} + {generateEmailLink(c.author, c.authorEmail)} + {c.message} + + ))} + +
+
+
+
+ + + + + + + + + )} + + {/* Tag push: show tagData */} + {isTag && ( + + + +

Tag Details

+
+ + + + + Tag Name + Tagger + Message + + + + {(push as any).tagData?.map((t: any) => ( + + {t.tagName} + {generateEmailLink(t.tagger, t.taggerEmail)} + {t.message} + + ))} + +
+
+
+
+ )} ); diff --git a/src/ui/views/PushDetails/attestation.types.ts b/src/ui/views/PushDetails/attestation.types.ts new file mode 100644 index 000000000..47efe9de6 --- /dev/null +++ b/src/ui/views/PushDetails/attestation.types.ts @@ -0,0 +1,21 @@ +interface Question { + label: string; + checked: boolean; +} + +interface Reviewer { + username: string; + gitAccount: string; +} + +export interface AttestationData { + reviewer: Reviewer; + timestamp: string | Date; + questions: Question[]; +} + +export interface AttestationViewProps { + attestation: boolean; + setAttestation: (value: boolean) => void; + data: AttestationData; +} diff --git a/src/ui/views/RepoList/repositories.types.ts b/src/ui/views/RepoList/repositories.types.ts new file mode 100644 index 000000000..2e7660147 --- /dev/null +++ b/src/ui/views/RepoList/repositories.types.ts @@ -0,0 +1,15 @@ +export interface RepositoriesProps { + data: { + _id: string; + project: string; + name: string; + url: string; + proxyURL: string; + users?: { + canPush?: string[]; + canAuthorise?: string[]; + }; + }; + + [key: string]: unknown; +} diff --git a/src/ui/views/Settings/Settings.jsx b/src/ui/views/Settings/Settings.jsx new file mode 100644 index 000000000..7accfce22 --- /dev/null +++ b/src/ui/views/Settings/Settings.jsx @@ -0,0 +1,122 @@ +import React, { useState, useEffect } from 'react'; +import { + TextField, + IconButton, + InputAdornment, + FormLabel, + Snackbar, + Typography, +} from '@material-ui/core'; +import { Visibility, VisibilityOff, Save, Clear } from '@material-ui/icons'; +import { makeStyles } from '@material-ui/core/styles'; + +import GridContainer from '../../components/Grid/GridContainer'; +import GridItem from '../../components/Grid/GridItem'; +import Card from '../../components/Card/Card'; +import CardBody from '../../components/Card/CardBody'; +import Button from '../../components/CustomButtons/Button'; + +const useStyles = makeStyles((theme) => ({ + root: { + '& .MuiTextField-root': { + margin: theme.spacing(1), + width: '100%', + }, + }, + buttonRow: { + display: 'flex', + justifyContent: 'flex-end', + marginTop: theme.spacing(2), + gap: theme.spacing(1), + }, +})); + +export default function SettingsView() { + const classes = useStyles(); + + const [jwtToken, setJwtToken] = useState(''); + const [showToken, setShowToken] = useState(false); + const [snackbarMessage, setSnackbarMessage] = useState(''); + const [snackbarOpen, setSnackbarOpen] = useState(false); + + useEffect(() => { + const savedToken = localStorage.getItem('ui_jwt_token'); + if (savedToken) setJwtToken(savedToken); + }, []); + + const handleSave = () => { + localStorage.setItem('ui_jwt_token', jwtToken); + setSnackbarMessage('JWT token saved'); + setSnackbarOpen(true); + }; + + const handleClear = () => { + setJwtToken(''); + localStorage.removeItem('ui_jwt_token'); + setSnackbarMessage('JWT token cleared'); + setSnackbarOpen(true); + }; + + const toggleShowToken = () => { + setShowToken(!showToken); + }; + + return ( +
+ + + + + {/* Title */} + + JWT Token for UI Authentication + + + Authenticates UI requests to the server when "apiAuthentication" is + enabled in the config. + + setJwtToken(e.target.value)} + InputProps={{ + endAdornment: ( + + + {showToken ? : } + + + ), + style: { + marginTop: '10px', + marginLeft: '-8px', + marginRight: '8px', + }, + }} + /> +
+ + +
+
+
+
+
+ setSnackbarOpen(false)} + message={snackbarMessage} + /> + + ); +} diff --git a/src/ui/views/User/User.tsx b/src/ui/views/User/User.tsx new file mode 100644 index 000000000..517e09b4e --- /dev/null +++ b/src/ui/views/User/User.tsx @@ -0,0 +1,201 @@ +import React, { useState, useEffect } from 'react'; +import { Navigate, useNavigate, useParams } from 'react-router-dom'; +import GridItem from '../../components/Grid/GridItem'; +import GridContainer from '../../components/Grid/GridContainer'; +import Card from '../../components/Card/Card'; +import CardBody from '../../components/Card/CardBody'; +import Button from '../../components/CustomButtons/Button'; +import FormLabel from '@material-ui/core/FormLabel'; +import { getUser, updateUser, getUserLoggedIn } from '../../services/user'; + +import { UserData } from '../../../types/models'; +import { makeStyles } from '@material-ui/core/styles'; + +import { LogoGithubIcon } from '@primer/octicons-react'; +import CloseRounded from '@material-ui/icons/CloseRounded'; +import { Check, Save } from '@material-ui/icons'; +import { TextField, Theme } from '@material-ui/core'; + +const useStyles = makeStyles((theme: Theme) => ({ + root: { + '& .MuiTextField-root': { + margin: theme.spacing(1), + width: '100%', + }, + }, +})); + +export default function UserProfile(): React.ReactElement { + const classes = useStyles(); + const [data, setData] = useState(null); + const [auth, setAuth] = useState(true); + const [isLoading, setIsLoading] = useState(true); + const [isError, setIsError] = useState(false); + const [isProfile, setIsProfile] = useState(false); + const [isAdmin, setIsAdmin] = useState(false); + const [gitAccount, setGitAccount] = useState(''); + const navigate = useNavigate(); + const { id } = useParams<{ id?: string }>(); + + useEffect(() => { + if (id == null) { + setIsProfile(true); + } + + if (id) { + getUser( + setIsLoading, + (userData: UserData) => { + setData(userData); + setGitAccount(userData.gitAccount || ''); + }, + setAuth, + setIsError, + id, + ); + getUserLoggedIn(setIsLoading, setIsAdmin, setIsError, setAuth); + } else { + console.log('getting user data'); + setIsProfile(true); + getUser( + setIsLoading, + (userData: UserData) => { + setData(userData); + setGitAccount(userData.gitAccount || ''); + }, + setAuth, + setIsError, + ); + } + }, [id]); + + if (isLoading) return
Loading...
; + if (isError) return
Something went wrong ...
; + + if (!auth && window.location.pathname === '/dashboard/profile') { + return ; + } + if (!data) return
No user data available
; + + const updateProfile = async (): Promise => { + try { + const updatedData = { + ...data, + gitAccount: escapeHTML(gitAccount), + }; + await updateUser(updatedData); + setData(updatedData); + navigate(`/dashboard/profile`); + } catch { + setIsError(true); + } + }; + + const UpdateButton = (): React.ReactElement => ( + + ); + + const escapeHTML = (str: string): string => { + return str + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, ''') + .replace(/\\/g, ''') + .replace(/\//g, '/'); + }; + + return ( +
+ + + + + + {data.gitAccount && ( + + {`${data.displayName}'s + + )} + + Name + {data.displayName} + + + Role + {data.title} + + + E-mail + {data.email} + + {data.gitAccount && ( + + GitHub Username + + {data.gitAccount} + + + )} + + Administrator + {data.admin ? ( + + + + ) : ( + + )} + + + {isProfile || isAdmin ? ( +
+
+
+ + What is your username? + +
+ ) => + setGitAccount(e.target.value) + } + /> + +
+
+
+ ) : null} +
+
+
+
+
+ ); +} diff --git a/test/1.test.js b/test/1.test.js new file mode 100644 index 000000000..edb6a01fb --- /dev/null +++ b/test/1.test.js @@ -0,0 +1,98 @@ +/* + Template test file. Demonstrates how to: + - Use chai-http to test the server + - Initialize the server + - Stub dependencies with sinon sandbox + - Reset stubs after each test + - Use proxyquire to replace modules + - Clear module cache after a test +*/ + +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); + +const service = require('../src/service'); +const db = require('../src/db'); + +const expect = chai.expect; + +chai.use(chaiHttp); + +const TEST_REPO = { + project: 'finos', + name: 'db-test-repo', + url: 'https://github.com/finos/db-test-repo.git', +}; + +describe('init', () => { + let app; + let sandbox; + + // Runs before all tests + before(async function () { + // Start the service (can also pass config if testing proxy routes) + app = await service.start(); + }); + + // Runs before each test + beforeEach(function () { + // Create a sandbox for stubbing + sandbox = sinon.createSandbox(); + + // Example: stub a DB method + sandbox.stub(db, 'getRepo').resolves(TEST_REPO); + }); + + // Example test: check server is running + it('should return 401 if not logged in', async function () { + const res = await chai.request(app).get('/api/auth/profile'); + expect(res).to.have.status(401); + }); + + // Example test: check db stub is working + it('should get the repo from stubbed db', async function () { + const repo = await db.getRepo('finos/db-test-repo'); + expect(repo).to.deep.equal(TEST_REPO); + }); + + // Example test: use proxyquire to override the config module + it('should return an array of enabled auth methods when overridden', async function () { + const fsStub = { + readFileSync: sandbox.stub().returns( + JSON.stringify({ + authentication: [ + { type: 'local', enabled: true }, + { type: 'ActiveDirectory', enabled: true }, + { type: 'openidconnect', enabled: true }, + ], + }), + ), + }; + + const config = proxyquire('../src/config', { + fs: fsStub, + }); + config.initUserConfig(); + const authMethods = config.getAuthMethods(); + expect(authMethods).to.have.lengthOf(3); + expect(authMethods[0].type).to.equal('local'); + expect(authMethods[1].type).to.equal('ActiveDirectory'); + expect(authMethods[2].type).to.equal('openidconnect'); + + // Clear config module cache so other tests don't use the stubbed config + delete require.cache[require.resolve('../src/config')]; + }); + + // Runs after each test + afterEach(function () { + // Restore all stubs in this sandbox + sandbox.restore(); + }); + + // Runs after all tests + after(async function () { + await service.httpServer.close(); + }); +}); diff --git a/test/ConfigLoader.test.js b/test/ConfigLoader.test.js new file mode 100644 index 000000000..76c659855 --- /dev/null +++ b/test/ConfigLoader.test.js @@ -0,0 +1,758 @@ +import fs from 'fs'; +import path from 'path'; +import { configFile } from '../src/config/file'; +import { expect } from 'chai'; +import { ConfigLoader } from '../src/config/ConfigLoader'; +import { isValidGitUrl, isValidPath, isValidBranchName } from '../src/config/ConfigLoader'; +import sinon from 'sinon'; +import axios from 'axios'; + +describe('ConfigLoader', () => { + let configLoader; + let tempDir; + let tempConfigFile; + + beforeEach(() => { + // Create temp directory for test files + tempDir = fs.mkdtempSync('gitproxy-configloader-test-'); + tempConfigFile = path.join(tempDir, 'test-config.json'); + }); + + afterEach(() => { + // Clean up temp files + if (fs.existsSync(tempDir)) { + fs.rmSync(tempDir, { recursive: true }); + } + sinon.restore(); + configLoader?.stop(); + }); + + after(async () => { + // reset config to default after all tests have run + console.log(`Restoring config to defaults from file ${configFile}`); + configLoader = new ConfigLoader({}); + await configLoader.loadFromFile({ + type: 'file', + enabled: true, + path: configFile, + }); + }); + + after(() => { + // restore default config + }); + + describe('loadFromFile', () => { + it('should load configuration from file', async () => { + const testConfig = { + proxyUrl: 'https://test.com', + cookieSecret: 'test-secret', + }; + fs.writeFileSync(tempConfigFile, JSON.stringify(testConfig)); + + configLoader = new ConfigLoader({}); + const result = await configLoader.loadFromFile({ + type: 'file', + enabled: true, + path: tempConfigFile, + }); + + expect(result).to.be.an('object'); + expect(result.proxyUrl).to.equal('https://test.com'); + expect(result.cookieSecret).to.equal('test-secret'); + }); + }); + + describe('loadFromHttp', () => { + it('should load configuration from HTTP endpoint', async () => { + const testConfig = { + proxyUrl: 'https://test.com', + cookieSecret: 'test-secret', + }; + + sinon.stub(axios, 'get').resolves({ data: testConfig }); + + configLoader = new ConfigLoader({}); + const result = await configLoader.loadFromHttp({ + type: 'http', + enabled: true, + url: 'http://config-service/config', + headers: {}, + }); + + expect(result).to.be.an('object'); + expect(result.proxyUrl).to.equal('https://test.com'); + expect(result.cookieSecret).to.equal('test-secret'); + }); + + it('should include bearer token if provided', async () => { + const axiosStub = sinon.stub(axios, 'get').resolves({ data: {} }); + + configLoader = new ConfigLoader({}); + await configLoader.loadFromHttp({ + type: 'http', + enabled: true, + url: 'http://config-service/config', + auth: { + type: 'bearer', + token: 'test-token', + }, + }); + + expect( + axiosStub.calledWith('http://config-service/config', { + headers: { Authorization: 'Bearer test-token' }, + }), + ).to.be.true; + }); + }); + + describe('reloadConfiguration', () => { + it('should emit configurationChanged event when config changes', async () => { + const initialConfig = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + reloadIntervalSeconds: 0, + }, + }; + + const newConfig = { + proxyUrl: 'https://new-test.com', + }; + + fs.writeFileSync(tempConfigFile, JSON.stringify(newConfig)); + + configLoader = new ConfigLoader(initialConfig); + const spy = sinon.spy(); + configLoader.on('configurationChanged', spy); + + await configLoader.reloadConfiguration(); + + expect(spy.calledOnce).to.be.true; + expect(spy.firstCall.args[0]).to.deep.include(newConfig); + }); + + it('should not emit event if config has not changed', async () => { + const testConfig = { + proxyUrl: 'https://test.com', + }; + + const config = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + reloadIntervalSeconds: 0, + }, + }; + + fs.writeFileSync(tempConfigFile, JSON.stringify(testConfig)); + + configLoader = new ConfigLoader(config); + const spy = sinon.spy(); + configLoader.on('configurationChanged', spy); + + await configLoader.reloadConfiguration(); // First reload should emit + await configLoader.reloadConfiguration(); // Second reload should not emit since config hasn't changed + + expect(spy.calledOnce).to.be.true; // Should only emit once + }); + + it('should not emit event if configurationSources is disabled', async () => { + const config = { + configurationSources: { + enabled: false, + }, + }; + + configLoader = new ConfigLoader(config); + const spy = sinon.spy(); + configLoader.on('configurationChanged', spy); + + await configLoader.reloadConfiguration(); + + expect(spy.called).to.be.false; + }); + }); + + describe('initialize', () => { + it('should initialize cache directory using env-paths', async () => { + configLoader = new ConfigLoader({}); + await configLoader.initialize(); + + // Check that cacheDir is set and is a string + expect(configLoader.cacheDir).to.be.a('string'); + + // Check that it contains 'git-proxy' in the path + expect(configLoader.cacheDir).to.include('git-proxy'); + + // On macOS, it should be in the Library/Caches directory + // On Linux, it should be in the ~/.cache directory + // On Windows, it should be in the AppData/Local directory + if (process.platform === 'darwin') { + expect(configLoader.cacheDir).to.include('Library/Caches'); + } else if (process.platform === 'linux') { + expect(configLoader.cacheDir).to.include('.cache'); + } else if (process.platform === 'win32') { + expect(configLoader.cacheDir).to.include('AppData/Local'); + } + }); + + it('should return cacheDirPath via getter', async () => { + configLoader = new ConfigLoader({}); + await configLoader.initialize(); + + const cacheDirPath = configLoader.cacheDirPath; + expect(cacheDirPath).to.equal(configLoader.cacheDir); + expect(cacheDirPath).to.be.a('string'); + }); + + it('should create cache directory if it does not exist', async () => { + configLoader = new ConfigLoader({}); + await configLoader.initialize(); + + // Check if directory exists + expect(fs.existsSync(configLoader.cacheDir)).to.be.true; + }); + }); + + describe('start', () => { + it('should perform initial load on start if configurationSources is enabled', async () => { + const mockConfig = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + reloadIntervalSeconds: 30, + }, + }; + + configLoader = new ConfigLoader(mockConfig); + const spy = sinon.spy(configLoader, 'reloadConfiguration'); + await configLoader.start(); + + expect(spy.calledOnce).to.be.true; + }); + + it('should clear an existing reload interval if it exists', async () => { + const mockConfig = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + }, + }; + + configLoader = new ConfigLoader(mockConfig); + configLoader.reloadTimer = setInterval(() => {}, 1000); + await configLoader.start(); + expect(configLoader.reloadTimer).to.be.null; + }); + + it('should run reloadConfiguration multiple times on short reload interval', async () => { + const mockConfig = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + reloadIntervalSeconds: 0.01, + }, + }; + + configLoader = new ConfigLoader(mockConfig); + const spy = sinon.spy(configLoader, 'reloadConfiguration'); + await configLoader.start(); + + // Make sure the reload interval is triggered + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(spy.callCount).to.greaterThan(1); + }); + + it('should clear the interval when stop is called', async () => { + const mockConfig = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempConfigFile, + }, + ], + }, + }; + + configLoader = new ConfigLoader(mockConfig); + configLoader.reloadTimer = setInterval(() => {}, 1000); + expect(configLoader.reloadTimer).to.not.be.null; + await configLoader.stop(); + expect(configLoader.reloadTimer).to.be.null; + }); + }); + + describe('loadRemoteConfig', () => { + beforeEach(async () => { + const configFilePath = path.join(__dirname, '..', 'proxy.config.json'); + const config = JSON.parse(fs.readFileSync(configFilePath, 'utf-8')); + + config.configurationSources.enabled = true; + configLoader = new ConfigLoader(config); + await configLoader.initialize(); + }); + + it('should load configuration from git repository', async function () { + this.timeout(10000); + + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: 'proxy.config.json', + branch: 'main', + enabled: true, + }; + + const config = await configLoader.loadFromSource(source); + + // Verify the loaded config has expected structure + expect(config).to.be.an('object'); + expect(config).to.have.property('cookieSecret'); + }); + + it('should throw error for invalid configuration file path (git)', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: '\0', // Invalid path + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.equal('Invalid configuration file path in repository'); + } + }); + + it('should throw error for invalid configuration file path (file)', async function () { + const source = { + type: 'file', + path: '\0', // Invalid path + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.equal('Invalid configuration file path'); + } + }); + + it('should load configuration from http', async function () { + this.timeout(10000); + + const source = { + type: 'http', + url: 'https://raw.githubusercontent.com/finos/git-proxy/refs/heads/main/proxy.config.json', + enabled: true, + }; + + const config = await configLoader.loadFromSource(source); + + // Verify the loaded config has expected structure + expect(config).to.be.an('object'); + expect(config).to.have.property('cookieSecret'); + }); + + it('should throw error if repository is invalid', async function () { + const source = { + type: 'git', + repository: 'invalid-repository', + path: 'proxy.config.json', + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.equal('Invalid repository URL format'); + } + }); + + it('should throw error if branch name is invalid', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: 'proxy.config.json', + branch: '..', // invalid branch pattern + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.equal('Invalid branch name format'); + } + }); + + it('should throw error if configuration source is invalid', async function () { + const source = { + type: 'invalid', + repository: 'https://github.com/finos/git-proxy.git', + path: 'proxy.config.json', + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Unsupported configuration source type'); + } + }); + + it('should throw error if repository is a valid URL but not a git repository', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/made-up-test-repo.git', + path: 'proxy.config.json', + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Failed to clone repository'); + } + }); + + it('should throw error if repository is a valid git repo but the branch does not exist', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: 'proxy.config.json', + branch: 'branch-does-not-exist', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Failed to checkout branch'); + } + }); + + it('should throw error if config path was not found', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: 'path-not-found.json', + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Configuration file not found at'); + } + }); + + it('should throw error if config file is not valid JSON', async function () { + const source = { + type: 'git', + repository: 'https://github.com/finos/git-proxy.git', + path: 'test/fixtures/baz.js', + branch: 'main', + enabled: true, + }; + + try { + await configLoader.loadFromSource(source); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Failed to read or parse configuration file'); + } + }); + }); + + describe('deepMerge', () => { + let configLoader; + + beforeEach(() => { + configLoader = new ConfigLoader({}); + }); + + it('should merge simple objects', () => { + const target = { a: 1, b: 2 }; + const source = { b: 3, c: 4 }; + + const result = configLoader.deepMerge(target, source); + + expect(result).to.deep.equal({ a: 1, b: 3, c: 4 }); + }); + + it('should merge nested objects', () => { + const target = { + a: 1, + b: { x: 1, y: 2 }, + c: { z: 3 }, + }; + const source = { + b: { y: 4, w: 5 }, + c: { z: 6 }, + }; + + const result = configLoader.deepMerge(target, source); + + expect(result).to.deep.equal({ + a: 1, + b: { x: 1, y: 4, w: 5 }, + c: { z: 6 }, + }); + }); + + it('should handle arrays by replacing them', () => { + const target = { + a: [1, 2, 3], + b: { items: [4, 5] }, + }; + const source = { + a: [7, 8], + b: { items: [9] }, + }; + + const result = configLoader.deepMerge(target, source); + + expect(result).to.deep.equal({ + a: [7, 8], + b: { items: [9] }, + }); + }); + + it('should handle null and undefined values', () => { + const target = { + a: 1, + b: null, + c: undefined, + }; + const source = { + a: null, + b: 2, + c: 3, + }; + + const result = configLoader.deepMerge(target, source); + + expect(result).to.deep.equal({ + a: null, + b: 2, + c: 3, + }); + }); + + it('should handle empty objects', () => { + const target = {}; + const source = { a: 1, b: { c: 2 } }; + + const result = configLoader.deepMerge(target, source); + + expect(result).to.deep.equal({ a: 1, b: { c: 2 } }); + }); + + it('should not modify the original objects', () => { + const target = { a: 1, b: { c: 2 } }; + const source = { b: { c: 3 } }; + const originalTarget = { ...target }; + const originalSource = { ...source }; + + configLoader.deepMerge(target, source); + + expect(target).to.deep.equal(originalTarget); + expect(source).to.deep.equal(originalSource); + }); + }); +}); + +describe('Validation Helpers', () => { + describe('isValidGitUrl', () => { + it('should validate git URLs correctly', () => { + // Valid URLs + expect(isValidGitUrl('git://github.com/user/repo.git')).to.be.true; + expect(isValidGitUrl('https://github.com/user/repo.git')).to.be.true; + expect(isValidGitUrl('ssh://git@github.com/user/repo.git')).to.be.true; + expect(isValidGitUrl('user@github.com:user/repo.git')).to.be.true; + + // Invalid URLs + expect(isValidGitUrl('not-a-git-url')).to.be.false; + expect(isValidGitUrl('http://github.com/user/repo')).to.be.false; + expect(isValidGitUrl('')).to.be.false; + expect(isValidGitUrl(null)).to.be.false; + expect(isValidGitUrl(undefined)).to.be.false; + expect(isValidGitUrl(123)).to.be.false; + }); + }); + + describe('isValidPath', () => { + it('should validate file paths correctly', () => { + const cwd = process.cwd(); + + // Valid paths + expect(isValidPath(path.join(cwd, 'config.json'))).to.be.true; + expect(isValidPath(path.join(cwd, 'subfolder/config.json'))).to.be.true; + expect(isValidPath('/etc/passwd')).to.be.true; + expect(isValidPath('../config.json')).to.be.true; + + // Invalid paths + expect(isValidPath('')).to.be.false; + expect(isValidPath(null)).to.be.false; + expect(isValidPath(undefined)).to.be.false; + + // Additional edge cases + expect(isValidPath({})).to.be.false; + expect(isValidPath([])).to.be.false; + expect(isValidPath(123)).to.be.false; + expect(isValidPath(true)).to.be.false; + expect(isValidPath('\0invalid')).to.be.false; + expect(isValidPath('\u0000')).to.be.false; + }); + + it('should handle path resolution errors', () => { + // Mock path.resolve to throw an error + const originalResolve = path.resolve; + path.resolve = () => { + throw new Error('Mock path resolution error'); + }; + + expect(isValidPath('some/path')).to.be.false; + + // Restore original path.resolve + path.resolve = originalResolve; + }); + }); + + describe('isValidBranchName', () => { + it('should validate git branch names correctly', () => { + // Valid branch names + expect(isValidBranchName('main')).to.be.true; + expect(isValidBranchName('feature/new-feature')).to.be.true; + expect(isValidBranchName('release-1.0')).to.be.true; + expect(isValidBranchName('fix_123')).to.be.true; + expect(isValidBranchName('user/feature/branch')).to.be.true; + + // Invalid branch names + expect(isValidBranchName('.invalid')).to.be.false; + expect(isValidBranchName('-invalid')).to.be.false; + expect(isValidBranchName('branch with spaces')).to.be.false; + expect(isValidBranchName('')).to.be.false; + expect(isValidBranchName(null)).to.be.false; + expect(isValidBranchName(undefined)).to.be.false; + expect(isValidBranchName('branch..name')).to.be.false; + }); + }); +}); + +describe('ConfigLoader Error Handling', () => { + let configLoader; + let tempDir; + let tempConfigFile; + + beforeEach(() => { + tempDir = fs.mkdtempSync('gitproxy-configloader-test-'); + tempConfigFile = path.join(tempDir, 'test-config.json'); + }); + + afterEach(() => { + if (fs.existsSync(tempDir)) { + fs.rmSync(tempDir, { recursive: true }); + } + sinon.restore(); + configLoader?.stop(); + }); + + it('should handle invalid JSON in file source', async () => { + fs.writeFileSync(tempConfigFile, 'invalid json content'); + + configLoader = new ConfigLoader({}); + try { + await configLoader.loadFromFile({ + type: 'file', + enabled: true, + path: tempConfigFile, + }); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Invalid configuration file format'); + } + }); + + it('should handle HTTP request errors', async () => { + sinon.stub(axios, 'get').rejects(new Error('Network error')); + + configLoader = new ConfigLoader({}); + try { + await configLoader.loadFromHttp({ + type: 'http', + enabled: true, + url: 'http://config-service/config', + }); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.equal('Network error'); + } + }); + + it('should handle invalid JSON from HTTP response', async () => { + sinon.stub(axios, 'get').resolves({ data: 'invalid json response' }); + + configLoader = new ConfigLoader({}); + try { + await configLoader.loadFromHttp({ + type: 'http', + enabled: true, + url: 'http://config-service/config', + }); + throw new Error('Expected error was not thrown'); + } catch (error) { + expect(error.message).to.contain('Invalid configuration format from HTTP source'); + } + }); +}); diff --git a/test/chain.test.js b/test/chain.test.js new file mode 100644 index 000000000..8e088b7a0 --- /dev/null +++ b/test/chain.test.js @@ -0,0 +1,522 @@ +const { Action } = require('../src/proxy/actions/Action'); +const chai = require('chai'); +const sinon = require('sinon'); +const { PluginLoader } = require('../src/plugin'); +const db = require('../src/db'); + +chai.should(); +const expect = chai.expect; + +const mockLoader = { + pushPlugins: [ + { exec: Object.assign(async () => console.log('foo'), { displayName: 'foo.exec' }) }, + ], + pullPlugins: [ + { exec: Object.assign(async () => console.log('foo'), { displayName: 'bar.exec' }) }, + ], +}; + +const initMockPushProcessors = (sinon) => { + const mockPushProcessors = { + parsePush: sinon.stub(), + checkEmptyBranch: sinon.stub(), + audit: sinon.stub(), + checkRepoInAuthorisedList: sinon.stub(), + checkCommitMessages: sinon.stub(), + checkAuthorEmails: sinon.stub(), + checkUserPushPermission: sinon.stub(), + checkIfWaitingAuth: sinon.stub(), + checkHiddenCommits: sinon.stub(), + pullRemote: sinon.stub(), + writePack: sinon.stub(), + preReceive: sinon.stub(), + getDiff: sinon.stub(), + gitleaks: sinon.stub(), + clearBareClone: sinon.stub(), + scanDiff: sinon.stub(), + blockForAuth: sinon.stub(), + }; + mockPushProcessors.parsePush.displayName = 'parsePush'; + mockPushProcessors.checkEmptyBranch.displayName = 'checkEmptyBranch'; + mockPushProcessors.audit.displayName = 'audit'; + mockPushProcessors.checkRepoInAuthorisedList.displayName = 'checkRepoInAuthorisedList'; + mockPushProcessors.checkCommitMessages.displayName = 'checkCommitMessages'; + mockPushProcessors.checkAuthorEmails.displayName = 'checkAuthorEmails'; + mockPushProcessors.checkUserPushPermission.displayName = 'checkUserPushPermission'; + mockPushProcessors.checkIfWaitingAuth.displayName = 'checkIfWaitingAuth'; + mockPushProcessors.checkHiddenCommits.displayName = 'checkHiddenCommits'; + mockPushProcessors.pullRemote.displayName = 'pullRemote'; + mockPushProcessors.writePack.displayName = 'writePack'; + mockPushProcessors.preReceive.displayName = 'preReceive'; + mockPushProcessors.getDiff.displayName = 'getDiff'; + mockPushProcessors.gitleaks.displayName = 'gitleaks'; + mockPushProcessors.clearBareClone.displayName = 'clearBareClone'; + mockPushProcessors.scanDiff.displayName = 'scanDiff'; + mockPushProcessors.blockForAuth.displayName = 'blockForAuth'; + return mockPushProcessors; +}; +const mockPreProcessors = { + parseAction: sinon.stub(), +}; + +let mockPushProcessors; + +const clearCache = (sandbox) => { + delete require.cache[require.resolve('../src/proxy/processors')]; + delete require.cache[require.resolve('../src/proxy/chain')]; + sandbox.restore(); +}; + +describe('proxy chain', function () { + let processors; + let chain; + let mockPushProcessors; + let sandboxSinon; + + beforeEach(async () => { + // Create a new sandbox for each test + sandboxSinon = sinon.createSandbox(); + // Initialize the mock push processors + mockPushProcessors = initMockPushProcessors(sandboxSinon); + + // Re-import the processors module after clearing the cache + processors = await import('../src/proxy/processors'); + + // Mock the processors module + sandboxSinon.stub(processors, 'pre').value(mockPreProcessors); + + sandboxSinon.stub(processors, 'push').value(mockPushProcessors); + + // Re-import the chain module after stubbing processors + chain = require('../src/proxy/chain').default; + + chain.chainPluginLoader = new PluginLoader([]); + }); + + afterEach(() => { + // Clear the module from the cache after each test + clearCache(sandboxSinon); + }); + + it('getChain should set pluginLoaded if loader is undefined', async function () { + chain.chainPluginLoader = undefined; + const actual = await chain.getChain({ type: 'push' }); + expect(actual).to.deep.equal(chain.branchPushChain); + expect(chain.chainPluginLoader).to.be.undefined; + expect(chain.pluginsInserted).to.be.true; + }); + + it('getChain should load plugins from an initialized PluginLoader', async function () { + chain.chainPluginLoader = mockLoader; + const initialChain = [...chain.branchPushChain]; + const actual = await chain.getChain({ type: 'push' }); + expect(actual.length).to.be.greaterThan(initialChain.length); + expect(chain.pluginsInserted).to.be.true; + }); + + it('getChain should load pull plugins from an initialized PluginLoader', async function () { + chain.chainPluginLoader = mockLoader; + const initialChain = [...chain.pullActionChain]; + const actual = await chain.getChain({ type: 'pull' }); + expect(actual.length).to.be.greaterThan(initialChain.length); + expect(chain.pluginsInserted).to.be.true; + }); + + it('executeChain should stop executing if action has continue returns false', async function () { + const req = {}; + const continuingAction = { type: 'push', continue: () => true, allowPush: false }; + mockPreProcessors.parseAction.resolves({ type: 'push' }); + mockPushProcessors.parsePush.resolves(continuingAction); + mockPushProcessors.checkEmptyBranch.resolves(continuingAction); + mockPushProcessors.checkRepoInAuthorisedList.resolves(continuingAction); + mockPushProcessors.checkCommitMessages.resolves(continuingAction); + mockPushProcessors.checkAuthorEmails.resolves(continuingAction); + mockPushProcessors.checkUserPushPermission.resolves(continuingAction); + mockPushProcessors.checkHiddenCommits.resolves(continuingAction); + mockPushProcessors.pullRemote.resolves(continuingAction); + mockPushProcessors.writePack.resolves(continuingAction); + // this stops the chain from further execution + mockPushProcessors.checkIfWaitingAuth.resolves({ + type: 'push', + continue: () => false, + allowPush: false, + }); + const result = await chain.executeChain(req); + + expect(mockPreProcessors.parseAction.called).to.be.true; + expect(mockPushProcessors.parsePush.called).to.be.true; + expect(mockPushProcessors.checkRepoInAuthorisedList.called).to.be.true; + expect(mockPushProcessors.checkCommitMessages.called).to.be.true; + expect(mockPushProcessors.checkAuthorEmails.called).to.be.true; + expect(mockPushProcessors.checkUserPushPermission.called).to.be.true; + expect(mockPushProcessors.checkIfWaitingAuth.called).to.be.true; + expect(mockPushProcessors.pullRemote.called).to.be.true; + expect(mockPushProcessors.checkHiddenCommits.called).to.be.true; + expect(mockPushProcessors.writePack.called).to.be.true; + expect(mockPushProcessors.checkEmptyBranch.called).to.be.true; + expect(mockPushProcessors.audit.called).to.be.true; + + expect(result.type).to.equal('push'); + expect(result.allowPush).to.be.false; + expect(result.continue).to.be.a('function'); + }); + + it('executeChain should stop executing if action has allowPush is set to true', async function () { + const req = {}; + const continuingAction = { type: 'push', continue: () => true, allowPush: false }; + mockPreProcessors.parseAction.resolves({ type: 'push' }); + mockPushProcessors.parsePush.resolves(continuingAction); + mockPushProcessors.checkEmptyBranch.resolves(continuingAction); + mockPushProcessors.checkRepoInAuthorisedList.resolves(continuingAction); + mockPushProcessors.checkCommitMessages.resolves(continuingAction); + mockPushProcessors.checkAuthorEmails.resolves(continuingAction); + mockPushProcessors.checkUserPushPermission.resolves(continuingAction); + mockPushProcessors.checkHiddenCommits.resolves(continuingAction); + mockPushProcessors.pullRemote.resolves(continuingAction); + mockPushProcessors.writePack.resolves(continuingAction); + // this stops the chain from further execution + mockPushProcessors.checkIfWaitingAuth.resolves({ + type: 'push', + continue: () => true, + allowPush: true, + }); + const result = await chain.executeChain(req); + + expect(mockPreProcessors.parseAction.called).to.be.true; + expect(mockPushProcessors.parsePush.called).to.be.true; + expect(mockPushProcessors.checkEmptyBranch.called).to.be.true; + expect(mockPushProcessors.checkRepoInAuthorisedList.called).to.be.true; + expect(mockPushProcessors.checkCommitMessages.called).to.be.true; + expect(mockPushProcessors.checkAuthorEmails.called).to.be.true; + expect(mockPushProcessors.checkUserPushPermission.called).to.be.true; + expect(mockPushProcessors.checkIfWaitingAuth.called).to.be.true; + expect(mockPushProcessors.pullRemote.called).to.be.true; + expect(mockPushProcessors.checkHiddenCommits.called).to.be.true; + expect(mockPushProcessors.writePack.called).to.be.true; + expect(mockPushProcessors.audit.called).to.be.true; + + expect(result.type).to.equal('push'); + expect(result.allowPush).to.be.true; + expect(result.continue).to.be.a('function'); + }); + + it('executeChain should execute all steps if all actions succeed', async function () { + const req = {}; + const continuingAction = { type: 'push', continue: () => true, allowPush: false }; + mockPreProcessors.parseAction.resolves({ type: 'push' }); + mockPushProcessors.parsePush.resolves(continuingAction); + mockPushProcessors.checkEmptyBranch.resolves(continuingAction); + mockPushProcessors.checkRepoInAuthorisedList.resolves(continuingAction); + mockPushProcessors.checkCommitMessages.resolves(continuingAction); + mockPushProcessors.checkAuthorEmails.resolves(continuingAction); + mockPushProcessors.checkUserPushPermission.resolves(continuingAction); + mockPushProcessors.checkIfWaitingAuth.resolves(continuingAction); + mockPushProcessors.pullRemote.resolves(continuingAction); + mockPushProcessors.writePack.resolves(continuingAction); + mockPushProcessors.checkHiddenCommits.resolves(continuingAction); + mockPushProcessors.preReceive.resolves(continuingAction); + mockPushProcessors.getDiff.resolves(continuingAction); + mockPushProcessors.gitleaks.resolves(continuingAction); + mockPushProcessors.clearBareClone.resolves(continuingAction); + mockPushProcessors.scanDiff.resolves(continuingAction); + mockPushProcessors.blockForAuth.resolves(continuingAction); + + const result = await chain.executeChain(req); + + expect(mockPreProcessors.parseAction.called).to.be.true; + expect(mockPushProcessors.parsePush.called).to.be.true; + expect(mockPushProcessors.checkEmptyBranch.called).to.be.true; + expect(mockPushProcessors.checkRepoInAuthorisedList.called).to.be.true; + expect(mockPushProcessors.checkCommitMessages.called).to.be.true; + expect(mockPushProcessors.checkAuthorEmails.called).to.be.true; + expect(mockPushProcessors.checkUserPushPermission.called).to.be.true; + expect(mockPushProcessors.checkIfWaitingAuth.called).to.be.true; + expect(mockPushProcessors.pullRemote.called).to.be.true; + expect(mockPushProcessors.checkHiddenCommits.called).to.be.true; + expect(mockPushProcessors.writePack.called).to.be.true; + expect(mockPushProcessors.preReceive.called).to.be.true; + expect(mockPushProcessors.getDiff.called).to.be.true; + expect(mockPushProcessors.gitleaks.called).to.be.true; + expect(mockPushProcessors.clearBareClone.called).to.be.true; + expect(mockPushProcessors.scanDiff.called).to.be.true; + expect(mockPushProcessors.blockForAuth.called).to.be.true; + expect(mockPushProcessors.audit.called).to.be.true; + + expect(result.type).to.equal('push'); + expect(result.allowPush).to.be.false; + expect(result.continue).to.be.a('function'); + }); + + it('executeChain should run the expected steps for pulls', async function () { + const req = {}; + const continuingAction = { type: 'pull', continue: () => true, allowPush: false }; + mockPreProcessors.parseAction.resolves({ type: 'pull' }); + mockPushProcessors.checkRepoInAuthorisedList.resolves(continuingAction); + const result = await chain.executeChain(req); + + expect(mockPushProcessors.checkRepoInAuthorisedList.called).to.be.true; + expect(mockPushProcessors.parsePush.called).to.be.false; + expect(result.type).to.equal('pull'); + }); + + it('executeChain should handle errors and still call audit', async function () { + const req = {}; + const action = { type: 'push', continue: () => true, allowPush: true }; + + processors.pre.parseAction.resolves(action); + mockPushProcessors.parsePush.rejects(new Error('Audit error')); + + try { + await chain.executeChain(req); + } catch { + // Ignore the error + } + + expect(mockPushProcessors.audit.called).to.be.true; + }); + + it('executeChain should always run at least checkRepoInAuthList', async function () { + const req = {}; + const action = { type: 'foo', continue: () => true, allowPush: true }; + + mockPreProcessors.parseAction.resolves(action); + mockPushProcessors.checkRepoInAuthorisedList.resolves(action); + + await chain.executeChain(req); + expect(mockPushProcessors.checkRepoInAuthorisedList.called).to.be.true; + }); + + it('should approve push automatically and record in the database', async function () { + const req = {}; + const action = { + type: 'push', + continue: () => true, + allowPush: false, + setAutoApproval: sinon.stub(), + repoName: 'test-repo', + commitTo: 'newCommitHash', + }; + + mockPreProcessors.parseAction.resolves(action); + mockPushProcessors.parsePush.resolves(action); + mockPushProcessors.checkEmptyBranch.resolves(action); + mockPushProcessors.checkRepoInAuthorisedList.resolves(action); + mockPushProcessors.checkCommitMessages.resolves(action); + mockPushProcessors.checkAuthorEmails.resolves(action); + mockPushProcessors.checkUserPushPermission.resolves(action); + mockPushProcessors.checkIfWaitingAuth.resolves(action); + mockPushProcessors.pullRemote.resolves(action); + mockPushProcessors.writePack.resolves(action); + mockPushProcessors.checkHiddenCommits.resolves(action); + + mockPushProcessors.preReceive.resolves({ + ...action, + steps: [{ error: false, logs: ['Push automatically approved by pre-receive hook.'] }], + allowPush: true, + autoApproved: true, + }); + + mockPushProcessors.getDiff.resolves(action); + mockPushProcessors.gitleaks.resolves(action); + mockPushProcessors.clearBareClone.resolves(action); + mockPushProcessors.scanDiff.resolves(action); + mockPushProcessors.blockForAuth.resolves(action); + const dbStub = sinon.stub(db, 'authorise').resolves(true); + + const result = await chain.executeChain(req); + + expect(result.type).to.equal('push'); + expect(result.allowPush).to.be.true; + expect(result.continue).to.be.a('function'); + + expect(dbStub.calledOnce).to.be.true; + + dbStub.restore(); + }); + + it('should reject push automatically and record in the database', async function () { + const req = {}; + const action = { + type: 'push', + continue: () => true, + allowPush: false, + setAutoRejection: sinon.stub(), + repoName: 'test-repo', + commitTo: 'newCommitHash', + }; + + mockPreProcessors.parseAction.resolves(action); + mockPushProcessors.parsePush.resolves(action); + mockPushProcessors.checkEmptyBranch.resolves(action); + mockPushProcessors.checkRepoInAuthorisedList.resolves(action); + mockPushProcessors.checkCommitMessages.resolves(action); + mockPushProcessors.checkAuthorEmails.resolves(action); + mockPushProcessors.checkUserPushPermission.resolves(action); + mockPushProcessors.checkIfWaitingAuth.resolves(action); + mockPushProcessors.pullRemote.resolves(action); + mockPushProcessors.writePack.resolves(action); + mockPushProcessors.checkHiddenCommits.resolves(action); + + mockPushProcessors.preReceive.resolves({ + ...action, + steps: [{ error: false, logs: ['Push automatically rejected by pre-receive hook.'] }], + allowPush: true, + autoRejected: true, + }); + + mockPushProcessors.getDiff.resolves(action); + mockPushProcessors.gitleaks.resolves(action); + mockPushProcessors.clearBareClone.resolves(action); + mockPushProcessors.scanDiff.resolves(action); + mockPushProcessors.blockForAuth.resolves(action); + + const dbStub = sinon.stub(db, 'reject').resolves(true); + + const result = await chain.executeChain(req); + + expect(result.type).to.equal('push'); + expect(result.allowPush).to.be.true; + expect(result.continue).to.be.a('function'); + + expect(dbStub.calledOnce).to.be.true; + + dbStub.restore(); + }); + + it('executeChain should handle exceptions in attemptAutoApproval', async function () { + const req = {}; + const action = { + type: 'push', + continue: () => true, + allowPush: false, + setAutoApproval: sinon.stub(), + repoName: 'test-repo', + commitTo: 'newCommitHash', + }; + + mockPreProcessors.parseAction.resolves(action); + mockPushProcessors.parsePush.resolves(action); + mockPushProcessors.checkEmptyBranch.resolves(action); + mockPushProcessors.checkRepoInAuthorisedList.resolves(action); + mockPushProcessors.checkCommitMessages.resolves(action); + mockPushProcessors.checkAuthorEmails.resolves(action); + mockPushProcessors.checkUserPushPermission.resolves(action); + mockPushProcessors.checkIfWaitingAuth.resolves(action); + mockPushProcessors.pullRemote.resolves(action); + mockPushProcessors.writePack.resolves(action); + mockPushProcessors.checkHiddenCommits.resolves(action); + + mockPushProcessors.preReceive.resolves({ + ...action, + steps: [{ error: false, logs: ['Push automatically approved by pre-receive hook.'] }], + allowPush: true, + autoApproved: true, + }); + + mockPushProcessors.getDiff.resolves(action); + mockPushProcessors.gitleaks.resolves(action); + mockPushProcessors.clearBareClone.resolves(action); + mockPushProcessors.scanDiff.resolves(action); + mockPushProcessors.blockForAuth.resolves(action); + + const error = new Error('Database error'); + + const consoleErrorStub = sinon.stub(console, 'error'); + sinon.stub(db, 'authorise').rejects(error); + await chain.executeChain(req); + expect(consoleErrorStub.calledOnceWith('Error during auto-approval:', error.message)).to.be + .true; + db.authorise.restore(); + consoleErrorStub.restore(); + }); + + it('executeChain should handle exceptions in attemptAutoRejection', async function () { + const req = {}; + const action = { + type: 'push', + continue: () => true, + allowPush: false, + setAutoRejection: sinon.stub(), + repoName: 'test-repo', + commitTo: 'newCommitHash', + autoRejected: true, + }; + + mockPreProcessors.parseAction.resolves(action); + mockPushProcessors.parsePush.resolves(action); + mockPushProcessors.checkEmptyBranch.resolves(action); + mockPushProcessors.checkRepoInAuthorisedList.resolves(action); + mockPushProcessors.checkCommitMessages.resolves(action); + mockPushProcessors.checkAuthorEmails.resolves(action); + mockPushProcessors.checkUserPushPermission.resolves(action); + mockPushProcessors.checkIfWaitingAuth.resolves(action); + mockPushProcessors.pullRemote.resolves(action); + mockPushProcessors.writePack.resolves(action); + mockPushProcessors.checkHiddenCommits.resolves(action); + + mockPushProcessors.preReceive.resolves({ + ...action, + steps: [{ error: false, logs: ['Push automatically rejected by pre-receive hook.'] }], + allowPush: false, + autoRejected: true, + }); + + mockPushProcessors.getDiff.resolves(action); + mockPushProcessors.gitleaks.resolves(action); + mockPushProcessors.clearBareClone.resolves(action); + mockPushProcessors.scanDiff.resolves(action); + mockPushProcessors.blockForAuth.resolves(action); + + const error = new Error('Database error'); + + const consoleErrorStub = sinon.stub(console, 'error'); + sinon.stub(db, 'reject').rejects(error); + + await chain.executeChain(req); + + expect(consoleErrorStub.calledOnceWith('Error during auto-rejection:', error.message)).to.be + .true; + + db.reject.restore(); + consoleErrorStub.restore(); + }); + + it('returns pullActionChain for pull actions', async () => { + const action = new Action('1', 'pull', 'GET', Date.now(), 'owner/repo.git'); + const pullChain = await chain.getChain(action); + expect(pullChain).to.deep.equal(chain.pullActionChain); + }); + + it('returns tagPushChain when action.type is push and action.actionType is TAG', async () => { + const { ActionType } = require('../src/proxy/actions/Action'); + const action = new Action('2', 'push', 'POST', Date.now(), 'owner/repo.git'); + action.actionType = ActionType.TAG; + const tagChain = await chain.getChain(action); + expect(tagChain).to.deep.equal(chain.tagPushChain); + }); + + it('returns branchPushChain when action.type is push and actionType is not TAG', async () => { + const { ActionType } = require('../src/proxy/actions/Action'); + const action = new Action('3', 'push', 'POST', Date.now(), 'owner/repo.git'); + action.actionType = ActionType.BRANCH; + const branchChain = await chain.getChain(action); + expect(branchChain).to.deep.equal(chain.branchPushChain); + }); + it('getChain should set pluginsInserted and return tagPushChain if loader is undefined for tag pushes', async function () { + chain.chainPluginLoader = undefined; + const { ActionType } = require('../src/proxy/actions/Action'); + const actual = await chain.getChain({ type: 'push', actionType: ActionType.TAG }); + expect(actual).to.deep.equal(chain.tagPushChain); + expect(chain.chainPluginLoader).to.be.undefined; + expect(chain.pluginsInserted).to.be.true; + }); + + it('getChain should load tag plugins from an initialized PluginLoader', async function () { + chain.chainPluginLoader = mockLoader; + const initialChain = [...chain.tagPushChain]; + const { ActionType } = require('../src/proxy/actions/Action'); + const actual = await chain.getChain({ type: 'push', actionType: ActionType.TAG }); + expect(actual.length).to.be.greaterThan(initialChain.length); + expect(chain.pluginsInserted).to.be.true; + }); +}); diff --git a/test/checkHiddenCommit.test.js b/test/checkHiddenCommit.test.js new file mode 100644 index 000000000..b4013fb8e --- /dev/null +++ b/test/checkHiddenCommit.test.js @@ -0,0 +1,122 @@ +const fs = require('fs'); +const childProcess = require('child_process'); +const sinon = require('sinon'); +const { expect } = require('chai'); + +const { exec: checkHidden } = require('../src/proxy/processors/push-action/checkHiddenCommits'); +const { Action } = require('../src/proxy/actions'); + +describe('checkHiddenCommits.exec', () => { + let action; + let sandbox; + let spawnSyncStub; + let readdirSyncStub; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + // stub spawnSync and fs.readdirSync + spawnSyncStub = sandbox.stub(childProcess, 'spawnSync'); + readdirSyncStub = sandbox.stub(fs, 'readdirSync'); + + // prepare a fresh Action + action = new Action('some-id', 'push', 'POST', Date.now(), 'repo.git'); + action.proxyGitPath = '/fake'; + action.commitFrom = '0000000000000000000000000000000000000000'; + action.commitTo = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'; + action.newIdxFiles = ['pack-test.idx']; + }); + + afterEach(() => { + sandbox.restore(); + }); + + it('reports all commits unreferenced and sets error=true', async () => { + const COMMIT_1 = 'deadbeef'; + const COMMIT_2 = 'cafebabe'; + + // 1) rev-list → no introduced commits + // 2) verify-pack → two commits in pack + spawnSyncStub + .onFirstCall() + .returns({ stdout: '' }) + .onSecondCall() + .returns({ + stdout: `${COMMIT_1} commit 100 1\n${COMMIT_2} commit 100 2\n`, + }); + + readdirSyncStub.returns(['pack-test.idx']); + + await checkHidden({ body: '' }, action); + + const step = action.steps.find((s) => s.stepName === 'checkHiddenCommits'); + expect(step.logs).to.include(`checkHiddenCommits - Referenced commits: 0`); + expect(step.logs).to.include(`checkHiddenCommits - Unreferenced commits: 2`); + expect(step.logs).to.include( + `checkHiddenCommits - Unreferenced commits in pack (2): ${COMMIT_1}, ${COMMIT_2}.\n` + + `This usually happens when a branch was made from a commit that hasn't been approved and pushed to the remote.\n` + + `Please get approval on the commits, push them and try again.`, + ); + expect(action.error).to.be.true; + }); + + it('mixes referenced & unreferenced correctly', async () => { + const COMMIT_1 = 'deadbeef'; + const COMMIT_2 = 'cafebabe'; + + // 1) git rev-list → introduces one commit “deadbeef” + // 2) git verify-pack → the pack contains two commits + spawnSyncStub + .onFirstCall() + .returns({ stdout: `${COMMIT_1}\n` }) + .onSecondCall() + .returns({ + stdout: `${COMMIT_1} commit 100 1\n${COMMIT_2} commit 100 2\n`, + }); + + readdirSyncStub.returns(['pack-test.idx']); + + await checkHidden({ body: '' }, action); + + const step = action.steps.find((s) => s.stepName === 'checkHiddenCommits'); + expect(step.logs).to.include('checkHiddenCommits - Referenced commits: 1'); + expect(step.logs).to.include('checkHiddenCommits - Unreferenced commits: 1'); + expect(step.logs).to.include( + `checkHiddenCommits - Unreferenced commits in pack (1): ${COMMIT_2}.\n` + + `This usually happens when a branch was made from a commit that hasn't been approved and pushed to the remote.\n` + + `Please get approval on the commits, push them and try again.`, + ); + expect(action.error).to.be.true; + }); + + it('reports all commits referenced and sets error=false', async () => { + // 1) rev-list → introduces both commits + // 2) verify-pack → the pack contains the same two commits + spawnSyncStub.onFirstCall().returns({ stdout: 'deadbeef\ncafebabe\n' }).onSecondCall().returns({ + stdout: 'deadbeef commit 100 1\ncafebabe commit 100 2\n', + }); + + readdirSyncStub.returns(['pack-test.idx']); + + await checkHidden({ body: '' }, action); + const step = action.steps.find((s) => s.stepName === 'checkHiddenCommits'); + + expect(step.logs).to.include('checkHiddenCommits - Total introduced commits: 2'); + expect(step.logs).to.include('checkHiddenCommits - Total commits in the pack: 2'); + expect(step.logs).to.include( + 'checkHiddenCommits - All pack commits are referenced in the introduced range.', + ); + expect(action.error).to.be.false; + }); + + it('throws if commitFrom or commitTo is missing', async () => { + delete action.commitFrom; + + try { + await checkHidden({ body: '' }, action); + throw new Error('Expected checkHidden to throw'); + } catch (err) { + expect(err.message).to.match(/Both action.commitFrom and action.commitTo must be defined/); + } + }); +}); diff --git a/test/db-helper.test.js b/test/db-helper.test.js new file mode 100644 index 000000000..6b973f2c2 --- /dev/null +++ b/test/db-helper.test.js @@ -0,0 +1,63 @@ +const { expect } = require('chai'); +const { trimPrefixRefsHeads, trimTrailingDotGit } = require('../src/db/helper'); + +describe('db helpers', () => { + describe('trimPrefixRefsHeads', () => { + it('removes `refs/heads/`', () => { + const res = trimPrefixRefsHeads('refs/heads/test'); + expect(res).to.equal('test'); + }); + + it('removes only one `refs/heads/`', () => { + const res = trimPrefixRefsHeads('refs/heads/refs/heads/'); + expect(res).to.equal('refs/heads/'); + }); + + it('removes only the first `refs/heads/`', () => { + const res = trimPrefixRefsHeads('refs/heads/middle/refs/heads/end/refs/heads/'); + expect(res).to.equal('middle/refs/heads/end/refs/heads/'); + }); + + it('handles empty string', () => { + const res = trimPrefixRefsHeads(''); + expect(res).to.equal(''); + }); + + it("doesn't remove `refs/heads`", () => { + const res = trimPrefixRefsHeads('refs/headstest'); + expect(res).to.equal('refs/headstest'); + }); + + it("doesn't remove `/refs/heads/`", () => { + const res = trimPrefixRefsHeads('/refs/heads/test'); + expect(res).to.equal('/refs/heads/test'); + }); + }); + + describe('trimTrailingDotGit', () => { + it('removes `.git`', () => { + const res = trimTrailingDotGit('test.git'); + expect(res).to.equal('test'); + }); + + it('removes only one `.git`', () => { + const res = trimTrailingDotGit('.git.git'); + expect(res).to.equal('.git'); + }); + + it('removes only the last `.git`', () => { + const res = trimTrailingDotGit('.git-middle.git-end.git'); + expect(res).to.equal('.git-middle.git-end'); + }); + + it('handles empty string', () => { + const res = trimTrailingDotGit(''); + expect(res).to.equal(''); + }); + + it("doesn't remove just `git`", () => { + const res = trimTrailingDotGit('testgit'); + expect(res).to.equal('testgit'); + }); + }); +}); diff --git a/test/db/db.test.js b/test/db/db.test.js new file mode 100644 index 000000000..0a54c22b6 --- /dev/null +++ b/test/db/db.test.js @@ -0,0 +1,52 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const db = require('../../src/db'); + +const { expect } = chai; + +describe('db', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('isUserPushAllowed', () => { + it('returns true if user is in canPush', async () => { + sinon.stub(db, 'getRepoByUrl').resolves({ + users: { + canPush: ['alice'], + canAuthorise: [], + }, + }); + const result = await db.isUserPushAllowed('myrepo', 'alice'); + expect(result).to.be.true; + }); + + it('returns true if user is in canAuthorise', async () => { + sinon.stub(db, 'getRepoByUrl').resolves({ + users: { + canPush: [], + canAuthorise: ['bob'], + }, + }); + const result = await db.isUserPushAllowed('myrepo', 'bob'); + expect(result).to.be.true; + }); + + it('returns false if user is in neither', async () => { + sinon.stub(db, 'getRepoByUrl').resolves({ + users: { + canPush: [], + canAuthorise: [], + }, + }); + const result = await db.isUserPushAllowed('myrepo', 'charlie'); + expect(result).to.be.false; + }); + + it('returns false if repo is not registered', async () => { + sinon.stub(db, 'getRepoByUrl').resolves(null); + const result = await db.isUserPushAllowed('myrepo', 'charlie'); + expect(result).to.be.false; + }); + }); +}); diff --git a/test/db/file/repo.test.js b/test/db/file/repo.test.js new file mode 100644 index 000000000..f55ff35d7 --- /dev/null +++ b/test/db/file/repo.test.js @@ -0,0 +1,67 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const repoModule = require('../../../src/db/file/repo'); + +describe('File DB', () => { + let sandbox; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + }); + + afterEach(() => { + sandbox.restore(); + }); + + describe('getRepo', () => { + it('should get the repo using the name', async () => { + const repoData = { + name: 'sample', + users: { canPush: [] }, + url: 'http://example.com/sample-repo.git', + }; + + sandbox.stub(repoModule.db, 'findOne').callsFake((query, cb) => cb(null, repoData)); + + const result = await repoModule.getRepo('Sample'); + expect(result).to.deep.equal(repoData); + }); + }); + + describe('getRepoByUrl', () => { + it('should get the repo using the url', async () => { + const repoData = { + name: 'sample', + users: { canPush: [] }, + url: 'https://github.com/finos/git-proxy.git', + }; + + sandbox.stub(repoModule.db, 'findOne').callsFake((query, cb) => cb(null, repoData)); + + const result = await repoModule.getRepoByUrl('https://github.com/finos/git-proxy.git'); + expect(result).to.deep.equal(repoData); + }); + it('should return null if the repo is not found', async () => { + sandbox.stub(repoModule.db, 'findOne').callsFake((query, cb) => cb(null, null)); + + const result = await repoModule.getRepoByUrl('https://github.com/finos/missing-repo.git'); + expect(result).to.be.null; + expect( + repoModule.db.findOne.calledWith( + sinon.match({ url: 'https://github.com/finos/missing-repo.git' }), + ), + ).to.be.true; + }); + + it('should reject if the database returns an error', async () => { + sandbox.stub(repoModule.db, 'findOne').callsFake((query, cb) => cb(new Error('DB error'))); + + try { + await repoModule.getRepoByUrl('https://github.com/finos/git-proxy.git'); + expect.fail('Expected promise to be rejected'); + } catch (err) { + expect(err.message).to.equal('DB error'); + } + }); + }); +}); diff --git a/test/db/mongo/repo.test.js b/test/db/mongo/repo.test.js new file mode 100644 index 000000000..828aa1bd2 --- /dev/null +++ b/test/db/mongo/repo.test.js @@ -0,0 +1,55 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const proxyqquire = require('proxyquire'); + +const repoCollection = { + findOne: sinon.stub(), +}; + +const connectionStub = sinon.stub().returns(repoCollection); + +const { getRepo, getRepoByUrl } = proxyqquire('../../../src/db/mongo/repo', { + './helper': { connect: connectionStub }, +}); + +describe('MongoDB', () => { + afterEach(function () { + sinon.restore(); + }); + + describe('getRepo', () => { + it('should get the repo using the name', async () => { + const repoData = { + name: 'sample', + users: { canPush: [] }, + url: 'http://example.com/sample-repo.git', + }; + repoCollection.findOne.resolves(repoData); + + const result = await getRepo('Sample'); + expect(result).to.deep.equal(repoData); + expect(connectionStub.calledWith('repos')).to.be.true; + expect(repoCollection.findOne.calledWith({ name: { $eq: 'sample' } })).to.be.true; + }); + }); + + describe('getRepoByUrl', () => { + it('should get the repo using the url', async () => { + const repoData = { + name: 'sample', + users: { canPush: [] }, + url: 'https://github.com/finos/git-proxy.git', + }; + repoCollection.findOne.resolves(repoData); + + const result = await getRepoByUrl('https://github.com/finos/git-proxy.git'); + expect(result).to.deep.equal(repoData); + expect(connectionStub.calledWith('repos')).to.be.true; + expect( + repoCollection.findOne.calledWith({ + url: { $eq: 'https://github.com/finos/git-proxy.git' }, + }), + ).to.be.true; + }); + }); +}); diff --git a/test/generated-config.test.js b/test/generated-config.test.js new file mode 100644 index 000000000..cf68b2109 --- /dev/null +++ b/test/generated-config.test.js @@ -0,0 +1,378 @@ +const chai = require('chai'); +const { Convert } = require('../src/config/generated/config'); +const defaultSettings = require('../proxy.config.json'); + +const { expect } = chai; + +describe('Generated Config (QuickType)', () => { + describe('Convert class', () => { + it('should parse valid configuration JSON', () => { + const validConfig = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'test-secret', + authorisedList: [ + { + project: 'test', + name: 'repo', + url: 'https://github.com/test/repo.git', + }, + ], + authentication: [ + { + type: 'local', + enabled: true, + }, + ], + sink: [ + { + type: 'memory', + enabled: true, + }, + ], + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(validConfig)); + + expect(result).to.be.an('object'); + expect(result.proxyUrl).to.equal('https://proxy.example.com'); + expect(result.cookieSecret).to.equal('test-secret'); + expect(result.authorisedList).to.be.an('array'); + expect(result.authentication).to.be.an('array'); + expect(result.sink).to.be.an('array'); + }); + + it('should convert config object back to JSON', () => { + const configObject = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'test-secret', + authorisedList: [], + authentication: [ + { + type: 'local', + enabled: true, + }, + ], + }; + + const jsonString = Convert.gitProxyConfigToJson(configObject); + const parsed = JSON.parse(jsonString); + + expect(parsed).to.be.an('object'); + expect(parsed.proxyUrl).to.equal('https://proxy.example.com'); + expect(parsed.cookieSecret).to.equal('test-secret'); + }); + + it('should handle empty configuration object', () => { + const emptyConfig = {}; + + const result = Convert.toGitProxyConfig(JSON.stringify(emptyConfig)); + expect(result).to.be.an('object'); + }); + + it('should throw error for invalid JSON string', () => { + expect(() => { + Convert.toGitProxyConfig('invalid json'); + }).to.throw(); + }); + + it('should handle configuration with valid rate limit structure', () => { + const validConfig = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + sessionMaxAgeHours: 24, + rateLimit: { + windowMs: 60000, + limit: 150, + }, + tempPassword: { + sendEmail: false, + emailConfig: {}, + }, + authorisedList: [ + { + project: 'test', + name: 'repo', + url: 'https://github.com/test/repo.git', + }, + ], + sink: [ + { + type: 'fs', + params: { + filepath: './.', + }, + enabled: true, + }, + ], + authentication: [ + { + type: 'local', + enabled: true, + }, + ], + contactEmail: 'admin@example.com', + csrfProtection: true, + plugins: [], + privateOrganizations: ['private-org'], + urlShortener: 'https://shortener.example.com', + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(validConfig)); + + expect(result).to.be.an('object'); + expect(result.authentication).to.be.an('array'); + expect(result.authorisedList).to.be.an('array'); + expect(result.contactEmail).to.be.a('string'); + expect(result.cookieSecret).to.be.a('string'); + expect(result.csrfProtection).to.be.a('boolean'); + expect(result.plugins).to.be.an('array'); + expect(result.privateOrganizations).to.be.an('array'); + expect(result.proxyUrl).to.be.a('string'); + expect(result.rateLimit).to.be.an('object'); + expect(result.sessionMaxAgeHours).to.be.a('number'); + expect(result.sink).to.be.an('array'); + }); + + it('should handle malformed configuration gracefully', () => { + const malformedConfig = { + proxyUrl: 123, // Wrong type + authentication: 'not-an-array', // Wrong type + }; + + try { + const result = Convert.toGitProxyConfig(JSON.stringify(malformedConfig)); + expect(result).to.be.an('object'); + } catch (error) { + expect(error).to.be.an('error'); + } + }); + + it('should preserve array structures', () => { + const configWithArrays = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + authorisedList: [ + { project: 'proj1', name: 'repo1', url: 'https://github.com/proj1/repo1.git' }, + { project: 'proj2', name: 'repo2', url: 'https://github.com/proj2/repo2.git' }, + ], + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + plugins: ['plugin1', 'plugin2'], + privateOrganizations: ['org1', 'org2'], + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(configWithArrays)); + + expect(result.authorisedList).to.have.lengthOf(2); + expect(result.authentication).to.have.lengthOf(1); + expect(result.plugins).to.have.lengthOf(2); + expect(result.privateOrganizations).to.have.lengthOf(2); + }); + + it('should handle nested object structures', () => { + const configWithNesting = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + tls: { + enabled: true, + key: '/path/to/key.pem', + cert: '/path/to/cert.pem', + }, + rateLimit: { + windowMs: 60000, + limit: 150, + }, + tempPassword: { + sendEmail: false, + emailConfig: {}, + }, + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(configWithNesting)); + + expect(result.tls).to.be.an('object'); + expect(result.tls.enabled).to.be.a('boolean'); + expect(result.rateLimit).to.be.an('object'); + expect(result.tempPassword).to.be.an('object'); + }); + + it('should handle complex validation scenarios', () => { + // Test configuration that will trigger more validation paths + const complexConfig = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + + api: { + github: { + baseUrl: 'https://api.github.com', + token: 'secret-token', + rateLimit: 100, + enabled: true, + }, + }, + + domains: { + localhost: 'http://localhost:3000', + 'example.com': 'https://example.com', + }, + + // Complex nested structures + attestationConfig: { + enabled: true, + questions: [ + { + id: 'q1', + type: 'boolean', + required: true, + label: 'Test Question', + }, + ], + }, + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(complexConfig)); + expect(result).to.be.an('object'); + expect(result.api).to.be.an('object'); + expect(result.domains).to.be.an('object'); + }); + + it('should handle array validation edge cases', () => { + const configWithArrays = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + + // Test different array structures + authorisedList: [ + { + project: 'test1', + name: 'repo1', + url: 'https://github.com/test1/repo1.git', + }, + { + project: 'test2', + name: 'repo2', + url: 'https://github.com/test2/repo2.git', + }, + ], + + plugins: ['plugin-a', 'plugin-b', 'plugin-c'], + privateOrganizations: ['org1', 'org2'], + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(configWithArrays)); + expect(result.authorisedList).to.have.lengthOf(2); + expect(result.plugins).to.have.lengthOf(3); + expect(result.privateOrganizations).to.have.lengthOf(2); + }); + + it('should exercise transformation functions with edge cases', () => { + const edgeCaseConfig = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: 'secret', + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + + sessionMaxAgeHours: 0, + csrfProtection: false, + + tempPassword: { + sendEmail: true, + emailConfig: { + host: 'smtp.example.com', + port: 587, + secure: false, + auth: { + user: 'user@example.com', + pass: 'password', + }, + }, + length: 12, + expiry: 7200, + }, + + rateLimit: { + windowMs: 900000, + limit: 1000, + message: 'Rate limit exceeded', + }, + }; + + const result = Convert.toGitProxyConfig(JSON.stringify(edgeCaseConfig)); + expect(result.sessionMaxAgeHours).to.equal(0); + expect(result.csrfProtection).to.equal(false); + expect(result.tempPassword).to.be.an('object'); + expect(result.tempPassword.length).to.equal(12); + }); + + it('should test validation error paths', () => { + try { + // Try to parse something that looks like valid JSON but has wrong structure + Convert.toGitProxyConfig('{"proxyUrl": 123, "authentication": "not-array"}'); + } catch (error) { + expect(error).to.be.an('error'); + } + }); + + it('should test date and null handling', () => { + // Test that null values cause validation errors (covers error paths) + const configWithNulls = { + proxyUrl: 'https://proxy.example.com', + cookieSecret: null, + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + contactEmail: null, + urlShortener: null, + }; + + expect(() => { + Convert.toGitProxyConfig(JSON.stringify(configWithNulls)); + }).to.throw('Invalid value'); + }); + + it('should test serialization back to JSON', () => { + const testConfig = { + proxyUrl: 'https://test.com', + cookieSecret: 'secret', + authentication: [{ type: 'local', enabled: true }], + sink: [{ type: 'fs', params: { filepath: './.' }, enabled: true }], + rateLimit: { + windowMs: 60000, + limit: 150, + }, + tempPassword: { + sendEmail: false, + emailConfig: {}, + }, + }; + + const parsed = Convert.toGitProxyConfig(JSON.stringify(testConfig)); + const serialized = Convert.gitProxyConfigToJson(parsed); + const reparsed = JSON.parse(serialized); + + expect(reparsed.proxyUrl).to.equal('https://test.com'); + expect(reparsed.rateLimit).to.be.an('object'); + }); + + it('should validate the default configuration from proxy.config.json', () => { + // This test ensures that the default config always passes QuickType validation + // This catches cases where schema updates haven't been reflected in the default config + const result = Convert.toGitProxyConfig(JSON.stringify(defaultSettings)); + + expect(result).to.be.an('object'); + expect(result.cookieSecret).to.be.a('string'); + expect(result.authorisedList).to.be.an('array'); + expect(result.authentication).to.be.an('array'); + expect(result.sink).to.be.an('array'); + + // Validate that serialization also works + const serialized = Convert.gitProxyConfigToJson(result); + expect(() => JSON.parse(serialized)).to.not.throw(); + }); + }); +}); diff --git a/test/integration/forcePush.integration.test.js b/test/integration/forcePush.integration.test.js new file mode 100644 index 000000000..0ef35c8fb --- /dev/null +++ b/test/integration/forcePush.integration.test.js @@ -0,0 +1,164 @@ +const path = require('path'); +const simpleGit = require('simple-git'); +const fs = require('fs').promises; +const { Action } = require('../../src/proxy/actions'); +const { exec: getDiff } = require('../../src/proxy/processors/push-action/getDiff'); +const { exec: scanDiff } = require('../../src/proxy/processors/push-action/scanDiff'); + +const chai = require('chai'); +const expect = chai.expect; + +describe('Force Push Integration Test', () => { + let tempDir; + let git; + let initialCommitSHA; + let rebasedCommitSHA; + + before(async function () { + this.timeout(10000); + + tempDir = path.join(__dirname, '../temp-integration-repo'); + await fs.mkdir(tempDir, { recursive: true }); + git = simpleGit(tempDir); + + await git.init(); + await git.addConfig('user.name', 'Test User'); + await git.addConfig('user.email', 'test@example.com'); + + // Create initial commit + await fs.writeFile(path.join(tempDir, 'base.txt'), 'base content'); + await git.add('.'); + await git.commit('Initial commit'); + + // Create feature commit + await fs.writeFile(path.join(tempDir, 'feature.txt'), 'feature content'); + await git.add('.'); + await git.commit('Add feature'); + + const log = await git.log(); + initialCommitSHA = log.latest.hash; + + // Simulate rebase by amending commit (changes SHA) + await git.commit(['--amend', '-m', 'Add feature (rebased)']); + + const newLog = await git.log(); + rebasedCommitSHA = newLog.latest.hash; + + console.log(`Initial SHA: ${initialCommitSHA}`); + console.log(`Rebased SHA: ${rebasedCommitSHA}`); + }); + + after(async () => { + try { + await fs.rmdir(tempDir, { recursive: true }); + } catch (e) { + // Ignore cleanup errors + } + }); + + describe('Complete force push pipeline', () => { + it('should handle valid diff after rebase scenario', async function () { + this.timeout(5000); + + // Create action simulating force push with valid SHAs that have actual changes + const action = new Action( + 'valid-diff-integration', + 'push', + 'POST', + Date.now(), + 'test/repo.git', + ); + action.proxyGitPath = path.dirname(tempDir); + action.repoName = path.basename(tempDir); + + // Parent of initial commit to get actual diff content + const parentSHA = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'; + action.commitFrom = parentSHA; + action.commitTo = rebasedCommitSHA; + action.commitData = [ + { + parent: parentSHA, + commit: rebasedCommitSHA, + message: 'Add feature (rebased)', + author: 'Test User', + }, + ]; + + const afterGetDiff = await getDiff({}, action); + expect(afterGetDiff.steps).to.have.length.greaterThan(0); + + const diffStep = afterGetDiff.steps.find((s) => s.stepName === 'diff'); + expect(diffStep).to.exist; + expect(diffStep.error).to.be.false; + expect(diffStep.content).to.be.a('string'); + expect(diffStep.content.length).to.be.greaterThan(0); + + const afterScanDiff = await scanDiff({}, afterGetDiff); + const scanStep = afterScanDiff.steps.find((s) => s.stepName === 'scanDiff'); + + expect(scanStep).to.exist; + expect(scanStep.error).to.be.false; + }); + + it('should handle unreachable commit SHA error', async function () { + this.timeout(5000); + + // Invalid SHA to trigger error + const action = new Action( + 'unreachable-sha-integration', + 'push', + 'POST', + Date.now(), + 'test/repo.git', + ); + action.proxyGitPath = path.dirname(tempDir); + action.repoName = path.basename(tempDir); + action.commitFrom = 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; // Invalid SHA + action.commitTo = rebasedCommitSHA; + action.commitData = [ + { + parent: 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef', + commit: rebasedCommitSHA, + message: 'Add feature (rebased)', + author: 'Test User', + }, + ]; + + const afterGetDiff = await getDiff({}, action); + expect(afterGetDiff.steps).to.have.length.greaterThan(0); + + const diffStep = afterGetDiff.steps.find((s) => s.stepName === 'diff'); + expect(diffStep).to.exist; + expect(diffStep.error).to.be.true; + expect(diffStep.errorMessage).to.be.a('string'); + expect(diffStep.errorMessage.length).to.be.greaterThan(0); + expect(diffStep.errorMessage).to.satisfy( + (msg) => msg.includes('fatal:') && msg.includes('Invalid revision range'), + 'Error message should contain git diff specific error for invalid SHA', + ); + + // scanDiff should not block on missing diff due to error + const afterScanDiff = await scanDiff({}, afterGetDiff); + const scanStep = afterScanDiff.steps.find((s) => s.stepName === 'scanDiff'); + + expect(scanStep).to.exist; + expect(scanStep.error).to.be.false; + }); + + it('should handle missing diff step gracefully', async function () { + const action = new Action( + 'missing-diff-integration', + 'push', + 'POST', + Date.now(), + 'test/repo.git', + ); + + const result = await scanDiff({}, action); + + expect(result.steps).to.have.length(1); + expect(result.steps[0].stepName).to.equal('scanDiff'); + expect(result.steps[0].error).to.be.false; + }); + }); +}); diff --git a/test/plugin/plugin.test.js b/test/plugin/plugin.test.js new file mode 100644 index 000000000..8aff66bdf --- /dev/null +++ b/test/plugin/plugin.test.js @@ -0,0 +1,99 @@ +import chai from 'chai'; +import { spawnSync } from 'child_process'; +import { rmSync } from 'fs'; +import { join } from 'path'; +import { + isCompatiblePlugin, + PullActionPlugin, + PushActionPlugin, + PluginLoader, +} from '../../src/plugin.ts'; + +chai.should(); + +const expect = chai.expect; + +const testPackagePath = join(__dirname, '../fixtures', 'test-package'); + +describe('loading plugins from packages', function () { + this.timeout(10000); + + before(function () { + spawnSync('npm', ['install'], { cwd: testPackagePath, timeout: 5000 }); + }); + + it('should load plugins that are the default export (module.exports = pluginObj)', async function () { + const loader = new PluginLoader([join(testPackagePath, 'default-export.js')]); + await loader.load(); + expect(loader.pushPlugins.length).to.equal(1); + expect(loader.pushPlugins.every((p) => isCompatiblePlugin(p))).to.be.true; + expect(loader.pushPlugins[0]).to.be.an.instanceOf(PushActionPlugin); + }).timeout(10000); + + it('should load multiple plugins from a module that match the plugin class (module.exports = { pluginFoo, pluginBar })', async function () { + const loader = new PluginLoader([join(testPackagePath, 'multiple-export.js')]); + await loader.load(); + expect(loader.pushPlugins.length).to.equal(1); + expect(loader.pullPlugins.length).to.equal(1); + expect(loader.pushPlugins.every((p) => isCompatiblePlugin(p))).to.be.true; + expect(loader.pushPlugins.every((p) => isCompatiblePlugin(p, 'isGitProxyPushActionPlugin'))).to + .be.true; + expect(loader.pullPlugins.every((p) => isCompatiblePlugin(p, 'isGitProxyPullActionPlugin'))).to + .be.true; + expect(loader.pushPlugins[0]).to.be.instanceOf(PushActionPlugin); + expect(loader.pullPlugins[0]).to.be.instanceOf(PullActionPlugin); + }).timeout(10000); + + it('should load plugins that are subclassed from plugin classes', async function () { + const loader = new PluginLoader([join(testPackagePath, 'subclass.js')]); + await loader.load(); + expect(loader.pushPlugins.length).to.equal(1); + expect(loader.pushPlugins.every((p) => isCompatiblePlugin(p))).to.be.true; + expect(loader.pushPlugins.every((p) => isCompatiblePlugin(p, 'isGitProxyPushActionPlugin'))).to + .be.true; + expect(loader.pushPlugins[0]).to.be.instanceOf(PushActionPlugin); + }).timeout(10000); + + it('should not load plugins that are not valid modules', async function () { + const loader = new PluginLoader([join(__dirname, './dummy.js')]); + await loader.load(); + expect(loader.pushPlugins.length).to.equal(0); + expect(loader.pullPlugins.length).to.equal(0); + }).timeout(10000); + + it('should not load plugins that are not extended from plugin objects', async function () { + const loader = new PluginLoader([join(__dirname, './fixtures/baz.js')]); + await loader.load(); + expect(loader.pushPlugins.length).to.equal(0); + expect(loader.pullPlugins.length).to.equal(0); + }).timeout(10000); + + after(function () { + rmSync(join(testPackagePath, 'node_modules'), { recursive: true }); + }); +}); + +describe('plugin functions', function () { + it('should return true for isCompatiblePlugin', function () { + const plugin = new PushActionPlugin(); + expect(isCompatiblePlugin(plugin)).to.be.true; + expect(isCompatiblePlugin(plugin, 'isGitProxyPushActionPlugin')).to.be.true; + }); + + it('should return false for isCompatiblePlugin', function () { + const plugin = {}; + expect(isCompatiblePlugin(plugin)).to.be.false; + }); + + it('should return true for isCompatiblePlugin with a custom type', function () { + class CustomPlugin extends PushActionPlugin { + constructor() { + super(); + this.isCustomPlugin = true; + } + } + const plugin = new CustomPlugin(); + expect(isCompatiblePlugin(plugin)).to.be.true; + expect(isCompatiblePlugin(plugin, 'isGitProxyPushActionPlugin')).to.be.true; + }); +}); diff --git a/test/preReceive/preReceive.test.js b/test/preReceive/preReceive.test.js new file mode 100644 index 000000000..b9cfe0ecb --- /dev/null +++ b/test/preReceive/preReceive.test.js @@ -0,0 +1,138 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const path = require('path'); +const { exec } = require('../../src/proxy/processors/push-action/preReceive'); + +describe('Pre-Receive Hook Execution', function () { + let action; + let req; + + beforeEach(() => { + req = {}; + action = { + steps: [], + commitFrom: 'oldCommitHash', + commitTo: 'newCommitHash', + branch: 'feature-branch', + proxyGitPath: 'test/preReceive/mock/repo', + repoName: 'test-repo', + addStep: function (step) { + this.steps.push(step); + }, + setAutoApproval: sinon.stub(), + setAutoRejection: sinon.stub(), + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should skip execution when hook file does not exist', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/missing-hook.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect( + result.steps[0].logs.some((log) => + log.includes('Pre-receive hook not found, skipping execution.'), + ), + ).to.be.true; + expect(action.setAutoApproval.called).to.be.false; + expect(action.setAutoRejection.called).to.be.false; + }); + + it('should skip execution when hook directory does not exist', async () => { + const scriptPath = path.resolve(__dirname, 'non-existent-directory/pre-receive.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect( + result.steps[0].logs.some((log) => + log.includes('Pre-receive hook not found, skipping execution.'), + ), + ).to.be.true; + expect(action.setAutoApproval.called).to.be.false; + expect(action.setAutoRejection.called).to.be.false; + }); + + it('should catch and handle unexpected errors', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/always-exit-0.sh'); + + sinon.stub(require('fs'), 'existsSync').throws(new Error('Unexpected FS error')); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect( + result.steps[0].logs.some((log) => log.includes('Hook execution error: Unexpected FS error')), + ).to.be.true; + expect(action.setAutoApproval.called).to.be.false; + expect(action.setAutoRejection.called).to.be.false; + }); + + it('should approve push automatically when hook returns status 0', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/always-exit-0.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect( + result.steps[0].logs.some((log) => + log.includes('Push automatically approved by pre-receive hook.'), + ), + ).to.be.true; + expect(action.setAutoApproval.calledOnce).to.be.true; + expect(action.setAutoRejection.called).to.be.false; + }); + + it('should reject push automatically when hook returns status 1', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/always-exit-1.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect( + result.steps[0].logs.some((log) => + log.includes('Push automatically rejected by pre-receive hook.'), + ), + ).to.be.true; + expect(action.setAutoRejection.calledOnce).to.be.true; + expect(action.setAutoApproval.called).to.be.false; + }); + + it('should execute hook successfully and require manual approval', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/always-exit-2.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.steps[0].logs.some((log) => log.includes('Push requires manual approval.'))).to.be + .true; + expect(action.setAutoApproval.called).to.be.false; + expect(action.setAutoRejection.called).to.be.false; + }); + + it('should handle unexpected hook status codes', async () => { + const scriptPath = path.resolve(__dirname, 'pre-receive-hooks/always-exit-99.sh'); + + const result = await exec(req, action, scriptPath); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(result.steps[0].logs.some((log) => log.includes('Unexpected hook status: 99'))).to.be + .true; + expect(result.steps[0].logs.some((log) => log.includes('Unknown pre-receive hook error.'))).to + .be.true; + expect(action.setAutoApproval.called).to.be.false; + expect(action.setAutoRejection.called).to.be.false; + }); +}); diff --git a/test/processors/blockForAuth.test.js b/test/processors/blockForAuth.test.js new file mode 100644 index 000000000..18f4262e9 --- /dev/null +++ b/test/processors/blockForAuth.test.js @@ -0,0 +1,135 @@ +const fc = require('fast-check'); +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire').noCallThru(); +const { Step } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('blockForAuth', () => { + let action; + let exec; + let getServiceUIURLStub; + let req; + let stepInstance; + let StepSpy; + + beforeEach(() => { + req = { + protocol: 'https', + headers: { host: 'example.com' }, + }; + + action = { + id: 'push_123', + addStep: sinon.stub(), + }; + + stepInstance = new Step('temp'); + sinon.stub(stepInstance, 'setAsyncBlock'); + + StepSpy = sinon.stub().returns(stepInstance); + + getServiceUIURLStub = sinon.stub().returns('http://localhost:8080'); + + const blockForAuth = proxyquire('../../src/proxy/processors/push-action/blockForAuth', { + '../../../service/urls': { getServiceUIURL: getServiceUIURLStub }, + '../../actions': { Step: StepSpy }, + }); + + exec = blockForAuth.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + it('should generate a correct shareable URL', async () => { + await exec(req, action); + expect(getServiceUIURLStub.calledOnce).to.be.true; + expect(getServiceUIURLStub.calledWithExactly(req)).to.be.true; + }); + + it('should create step with correct parameters', async () => { + await exec(req, action); + + expect(StepSpy.calledOnce).to.be.true; + expect(StepSpy.calledWithExactly('authBlock')).to.be.true; + expect(stepInstance.setAsyncBlock.calledOnce).to.be.true; + + const message = stepInstance.setAsyncBlock.firstCall.args[0]; + expect(message).to.include('http://localhost:8080/dashboard/push/push_123'); + expect(message).to.include('\x1B[32mGitProxy has received your push ✅\x1B[0m'); + expect(message).to.include('\x1B[34mhttp://localhost:8080/dashboard/push/push_123\x1B[0m'); + expect(message).to.include('🔗 Shareable Link'); + }); + + it('should add step to action exactly once', async () => { + await exec(req, action); + expect(action.addStep.calledOnce).to.be.true; + expect(action.addStep.calledWithExactly(stepInstance)).to.be.true; + }); + + it('should return action instance', async () => { + const result = await exec(req, action); + expect(result).to.equal(action); + }); + + it('should handle https URL format', async () => { + getServiceUIURLStub.returns('https://git-proxy-hosted-ui.com'); + await exec(req, action); + + const message = stepInstance.setAsyncBlock.firstCall.args[0]; + expect(message).to.include('https://git-proxy-hosted-ui.com/dashboard/push/push_123'); + }); + + it('should handle special characters in action ID', async () => { + action.id = 'push@special#chars!'; + await exec(req, action); + + const message = stepInstance.setAsyncBlock.firstCall.args[0]; + expect(message).to.include('/push/push@special#chars!'); + }); + }); + + describe('fuzzing', () => { + it('should create a step with correct parameters regardless of action ID', () => { + fc.assert( + fc.asyncProperty(fc.string(), async (actionId) => { + action.id = actionId; + + const freshStepInstance = new Step('temp'); + const setAsyncBlockStub = sinon.stub(freshStepInstance, 'setAsyncBlock'); + + const StepSpyLocal = sinon.stub().returns(freshStepInstance); + const getServiceUIURLStubLocal = sinon.stub().returns('http://localhost:8080'); + + const blockForAuth = proxyquire('../../src/proxy/processors/push-action/blockForAuth', { + '../../../service/urls': { getServiceUIURL: getServiceUIURLStubLocal }, + '../../actions': { Step: StepSpyLocal }, + }); + + const result = await blockForAuth.exec(req, action); + + expect(StepSpyLocal.calledOnce).to.be.true; + expect(StepSpyLocal.calledWithExactly('authBlock')).to.be.true; + expect(setAsyncBlockStub.calledOnce).to.be.true; + + const message = setAsyncBlockStub.firstCall.args[0]; + expect(message).to.include(`http://localhost:8080/dashboard/push/${actionId}`); + expect(message).to.include('\x1B[32mGitProxy has received your push ✅\x1B[0m'); + expect(message).to.include( + `\x1B[34mhttp://localhost:8080/dashboard/push/${actionId}\x1B[0m`, + ); + expect(message).to.include('🔗 Shareable Link'); + expect(result).to.equal(action); + }), + { + numRuns: 1000, + }, + ); + }); + }); +}); diff --git a/test/processors/checkAuthorEmails.test.js b/test/processors/checkAuthorEmails.test.js new file mode 100644 index 000000000..d96cc38b1 --- /dev/null +++ b/test/processors/checkAuthorEmails.test.js @@ -0,0 +1,231 @@ +const sinon = require('sinon'); +const proxyquire = require('proxyquire').noCallThru(); +const { expect } = require('chai'); +const fc = require('fast-check'); + +describe('checkAuthorEmails', () => { + let action; + let commitConfig; + let exec; + let getCommitConfigStub; + let stepSpy; + let StepStub; + + beforeEach(() => { + StepStub = class { + constructor() { + this.error = undefined; + } + log() {} + setError() {} + }; + stepSpy = sinon.spy(StepStub.prototype, 'log'); + sinon.spy(StepStub.prototype, 'setError'); + + commitConfig = { + author: { + email: { + domain: { allow: null }, + local: { block: null }, + }, + }, + }; + getCommitConfigStub = sinon.stub().returns(commitConfig); + + action = { + commitData: [], + addStep: sinon.stub().callsFake((step) => { + action.step = new StepStub(); + Object.assign(action.step, step); + return action.step; + }), + }; + + const checkAuthorEmails = proxyquire( + '../../src/proxy/processors/push-action/checkAuthorEmails', + { + '../../../config': { getCommitConfig: getCommitConfigStub }, + '../../actions': { Step: StepStub }, + }, + ); + + exec = checkAuthorEmails.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + it('should allow valid emails when no restrictions', async () => { + action.commitData = [ + { authorEmail: 'valid@example.com' }, + { authorEmail: 'another.valid@test.org' }, + ]; + + await exec({}, action); + + expect(action.step.error).to.be.undefined; + }); + + it('should block emails from forbidden domains', async () => { + commitConfig.author.email.domain.allow = 'example\\.com$'; + action.commitData = [ + { authorEmail: 'valid@example.com' }, + { authorEmail: 'invalid@forbidden.org' }, + ]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect( + stepSpy.calledWith( + 'The following commit author e-mails are illegal: invalid@forbidden.org', + ), + ).to.be.true; + expect( + StepStub.prototype.setError.calledWith( + 'Your push has been blocked. Please verify your Git configured e-mail address is valid (e.g. john.smith@example.com)', + ), + ).to.be.true; + }); + + it('should block emails with forbidden usernames', async () => { + commitConfig.author.email.local.block = 'blocked'; + action.commitData = [ + { authorEmail: 'allowed@example.com' }, + { authorEmail: 'blocked.user@test.org' }, + ]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect( + stepSpy.calledWith( + 'The following commit author e-mails are illegal: blocked.user@test.org', + ), + ).to.be.true; + }); + + it('should handle empty email strings', async () => { + action.commitData = [{ authorEmail: '' }, { authorEmail: 'valid@example.com' }]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect(stepSpy.calledWith('The following commit author e-mails are illegal: ')).to.be.true; + }); + + it('should allow emails when both checks pass', async () => { + commitConfig.author.email.domain.allow = 'example\\.com$'; + commitConfig.author.email.local.block = 'forbidden'; + action.commitData = [ + { authorEmail: 'allowed@example.com' }, + { authorEmail: 'also.allowed@example.com' }, + ]; + + await exec({}, action); + + expect(action.step.error).to.be.undefined; + }); + + it('should block emails that fail both checks', async () => { + commitConfig.author.email.domain.allow = 'example\\.com$'; + commitConfig.author.email.local.block = 'forbidden'; + action.commitData = [{ authorEmail: 'forbidden@wrong.org' }]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect( + stepSpy.calledWith('The following commit author e-mails are illegal: forbidden@wrong.org'), + ).to.be.true; + }); + + it('should handle emails without domain', async () => { + action.commitData = [{ authorEmail: 'nodomain@' }]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect(stepSpy.calledWith('The following commit author e-mails are illegal: nodomain@')).to.be + .true; + }); + + it('should handle multiple illegal emails', async () => { + commitConfig.author.email.domain.allow = 'example\\.com$'; + action.commitData = [ + { authorEmail: 'invalid1@bad.org' }, + { authorEmail: 'invalid2@wrong.net' }, + { authorEmail: 'valid@example.com' }, + ]; + + await exec({}, action); + + expect(action.step.error).to.be.true; + expect( + stepSpy.calledWith( + 'The following commit author e-mails are illegal: invalid1@bad.org,invalid2@wrong.net', + ), + ).to.be.true; + }); + }); + + describe('fuzzing', () => { + it('should not crash on random string in commit email', () => { + fc.assert( + fc.property(fc.string(), (commitEmail) => { + action.commitData = [{ authorEmail: commitEmail }]; + exec({}, action); + }), + { + numRuns: 1000, + }, + ); + + expect(action.step.error).to.be.true; + expect(stepSpy.calledWith('The following commit author e-mails are illegal: ')).to.be.true; + }); + + it('should handle valid emails with random characters', () => { + fc.assert( + fc.property(fc.emailAddress(), (commitEmail) => { + action.commitData = [{ authorEmail: commitEmail }]; + exec({}, action); + }), + { + numRuns: 1000, + }, + ); + expect(action.step.error).to.be.undefined; + }); + + it('should handle invalid types in commit email', () => { + fc.assert( + fc.property(fc.anything(), (commitEmail) => { + action.commitData = [{ authorEmail: commitEmail }]; + exec({}, action); + }), + { + numRuns: 1000, + }, + ); + + expect(action.step.error).to.be.true; + expect(stepSpy.calledWith('The following commit author e-mails are illegal: ')).to.be.true; + }); + + it('should handle arrays of valid emails', () => { + fc.assert( + fc.property(fc.array(fc.emailAddress()), (commitEmails) => { + action.commitData = commitEmails.map((email) => ({ authorEmail: email })); + exec({}, action); + }), + { + numRuns: 1000, + }, + ); + expect(action.step.error).to.be.undefined; + }); + }); +}); diff --git a/test/processors/checkCommitMessages.test.js b/test/processors/checkCommitMessages.test.js new file mode 100644 index 000000000..73a10ca9d --- /dev/null +++ b/test/processors/checkCommitMessages.test.js @@ -0,0 +1,196 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const { Action, Step } = require('../../src/proxy/actions'); +const fc = require('fast-check'); + +chai.should(); +const expect = chai.expect; + +describe('checkCommitMessages', () => { + let commitConfig; + let exec; + let getCommitConfigStub; + let logStub; + + beforeEach(() => { + logStub = sinon.stub(console, 'log'); + + commitConfig = { + message: { + block: { + literals: ['secret', 'password'], + patterns: ['\\b\\d{4}-\\d{4}-\\d{4}-\\d{4}\\b'], // Credit card pattern + }, + }, + }; + + getCommitConfigStub = sinon.stub().returns(commitConfig); + + const checkCommitMessages = proxyquire( + '../../src/proxy/processors/push-action/checkCommitMessages', + { + '../../../config': { getCommitConfig: getCommitConfigStub }, + }, + ); + + exec = checkCommitMessages.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + let action; + let req; + let stepSpy; + + beforeEach(() => { + req = {}; + action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.commitData = [ + { message: 'Fix bug', author: 'test@example.com' }, + { message: 'Update docs', author: 'test@example.com' }, + ]; + stepSpy = sinon.spy(Step.prototype, 'log'); + }); + + it('should allow commit with valid messages', async () => { + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(logStub.calledWith('The following commit messages are legal: Fix bug,Update docs')).to + .be.true; + }); + + it('should block commit with illegal messages', async () => { + action.commitData?.push({ message: 'secret password here', author: 'test@example.com' }); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('The following commit messages are illegal: secret password here')) + .to.be.true; + expect(result.steps[0].errorMessage).to.include('Your push has been blocked'); + expect(logStub.calledWith('The following commit messages are illegal: secret password here')) + .to.be.true; + }); + + it('should handle duplicate messages only once', async () => { + action.commitData = [ + { message: 'secret', author: 'test@example.com' }, + { message: 'secret', author: 'test@example.com' }, + { message: 'password', author: 'test@example.com' }, + ]; + + const result = await exec(req, action); + + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('The following commit messages are illegal: secret,password')).to.be + .true; + expect(logStub.calledWith('The following commit messages are illegal: secret,password')).to.be + .true; + }); + + it('should not error when commit data is empty', async () => { + // Empty commit data happens when making a branch from an unapproved commit + // or when pushing an empty branch or deleting a branch + // This is handled in the checkEmptyBranch.exec action + action.commitData = []; + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(logStub.calledWith('The following commit messages are legal: ')).to.be.true; + }); + + it('should handle commit data with null values', async () => { + action.commitData = [ + { message: null, author: 'test@example.com' }, + { message: undefined, author: 'test@example.com' }, + ]; + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + }); + + it('should handle commit messages of incorrect type', async () => { + action.commitData = [ + { message: 123, author: 'test@example.com' }, + { message: {}, author: 'test@example.com' }, + ]; + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('The following commit messages are illegal: 123,[object Object]')) + .to.be.true; + expect(logStub.calledWith('The following commit messages are illegal: 123,[object Object]')) + .to.be.true; + }); + + it('should handle a mix of valid and invalid messages', async () => { + action.commitData = [ + { message: 'Fix bug', author: 'test@example.com' }, + { message: 'secret password here', author: 'test@example.com' }, + ]; + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('The following commit messages are illegal: secret password here')) + .to.be.true; + expect(logStub.calledWith('The following commit messages are illegal: secret password here')) + .to.be.true; + }); + + describe('fuzzing', () => { + it('should not crash on arbitrary commit messages', async () => { + await fc.assert( + fc.asyncProperty( + fc.array( + fc.record({ + message: fc.oneof( + fc.string(), + fc.constant(null), + fc.constant(undefined), + fc.integer(), + fc.double(), + fc.boolean(), + ), + author: fc.string(), + }), + { maxLength: 20 }, + ), + async (fuzzedCommits) => { + const fuzzAction = new Action('fuzz', 'push', 'POST', Date.now(), 'fuzz/repo'); + fuzzAction.commitData = Array.isArray(fuzzedCommits) ? fuzzedCommits : []; + + const result = await exec({}, fuzzAction); + + expect(result).to.have.property('steps'); + expect(result.steps[0]).to.have.property('error').that.is.a('boolean'); + }, + ), + { + examples: [ + [{ message: '', author: 'me' }], + [{ message: '1234-5678-9012-3456', author: 'me' }], + [{ message: null, author: 'me' }], + [{ message: {}, author: 'me' }], + [{ message: 'SeCrEt', author: 'me' }], + ], + numRuns: 1000, + }, + ); + }); + }); + }); +}); diff --git a/test/processors/checkEmptyBranch.test.js b/test/processors/checkEmptyBranch.test.js new file mode 100644 index 000000000..b2833122f --- /dev/null +++ b/test/processors/checkEmptyBranch.test.js @@ -0,0 +1,111 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const { Action } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('checkEmptyBranch', () => { + let exec; + let simpleGitStub; + let gitRawStub; + + beforeEach(() => { + gitRawStub = sinon.stub(); + simpleGitStub = sinon.stub().callsFake((workingDir) => { + return { + raw: gitRawStub, + cwd: workingDir, + }; + }); + + const checkEmptyBranch = proxyquire('../../src/proxy/processors/push-action/checkEmptyBranch', { + 'simple-git': { + default: simpleGitStub, + __esModule: true, + '@global': true, + '@noCallThru': true, + }, + // deeply mocking fs to prevent simple-git from validating directories (which fails) + fs: { + existsSync: sinon.stub().returns(true), + lstatSync: sinon.stub().returns({ + isDirectory: () => true, + isFile: () => false, + }), + '@global': true, + }, + }); + + exec = checkEmptyBranch.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + let action; + let req; + + beforeEach(() => { + req = {}; + action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo'); + action.proxyGitPath = '/tmp/gitproxy'; + action.repoName = 'test-repo'; + action.commitFrom = '0000000000000000000000000000000000000000'; + action.commitTo = 'abcdef1234567890abcdef1234567890abcdef12'; + action.commitData = []; + }); + + it('should pass through if commitData is already populated', async () => { + action.commitData = [{ message: 'Existing commit' }]; + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(0); + expect(simpleGitStub.called).to.be.false; + }); + + it('should block empty branch pushes with a commit that exists', async () => { + gitRawStub.resolves('commit\n'); + + const result = await exec(req, action); + + expect(simpleGitStub.calledWith('/tmp/gitproxy/test-repo')).to.be.true; + expect(gitRawStub.calledWith(['cat-file', '-t', action.commitTo])).to.be.true; + + const step = result.steps.find((s) => s.stepName === 'checkEmptyBranch'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Push blocked: Empty branch'); + }); + + it('should block pushes if commitTo does not resolve', async () => { + gitRawStub.rejects(new Error('fatal: Not a valid object name')); + + const result = await exec(req, action); + + expect(gitRawStub.calledWith(['cat-file', '-t', action.commitTo])).to.be.true; + + const step = result.steps.find((s) => s.stepName === 'checkEmptyBranch'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Push blocked: Commit data not found'); + }); + + it('should block non-empty branch pushes with empty commitData', async () => { + action.commitFrom = 'abcdef1234567890abcdef1234567890abcdef12'; + + const result = await exec(req, action); + + expect(simpleGitStub.called).to.be.false; + + const step = result.steps.find((s) => s.stepName === 'checkEmptyBranch'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Push blocked: Commit data not found'); + }); + }); +}); diff --git a/test/processors/checkIfWaitingAuth.test.js b/test/processors/checkIfWaitingAuth.test.js new file mode 100644 index 000000000..0ee9988bb --- /dev/null +++ b/test/processors/checkIfWaitingAuth.test.js @@ -0,0 +1,121 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const { Action } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('checkIfWaitingAuth', () => { + let exec; + let getPushStub; + + beforeEach(() => { + getPushStub = sinon.stub(); + + const checkIfWaitingAuth = proxyquire( + '../../src/proxy/processors/push-action/checkIfWaitingAuth', + { + '../../../db': { getPush: getPushStub }, + }, + ); + + exec = checkIfWaitingAuth.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + let action; + let req; + + beforeEach(() => { + req = {}; + action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + }); + + it('should set allowPush when action exists and is authorized', async () => { + const authorizedAction = new Action( + '1234567890', + 'push', + 'POST', + 1234567890, + 'test/repo.git', + ); + authorizedAction.authorised = true; + getPushStub.resolves(authorizedAction); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.allowPush).to.be.true; + expect(result).to.deep.equal(authorizedAction); + }); + + it('should not set allowPush when action exists but not authorized', async () => { + const unauthorizedAction = new Action( + '1234567890', + 'push', + 'POST', + 1234567890, + 'test/repo.git', + ); + unauthorizedAction.authorised = false; + getPushStub.resolves(unauthorizedAction); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.allowPush).to.be.false; + }); + + it('should not set allowPush when action does not exist', async () => { + getPushStub.resolves(null); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.allowPush).to.be.false; + }); + + it('should not modify action when it has an error', async () => { + action.error = true; + const authorizedAction = new Action( + '1234567890', + 'push', + 'POST', + 1234567890, + 'test/repo.git', + ); + authorizedAction.authorised = true; + getPushStub.resolves(authorizedAction); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.allowPush).to.be.false; + expect(result.error).to.be.true; + }); + + it('should add step with error when getPush throws', async () => { + const error = new Error('DB error'); + getPushStub.rejects(error); + + try { + await exec(req, action); + throw new Error('Should have thrown'); + } catch (e) { + expect(e).to.equal(error); + expect(action.steps).to.have.lengthOf(1); + expect(action.steps[0].error).to.be.true; + expect(action.steps[0].errorMessage).to.contain('DB error'); + } + }); + }); +}); diff --git a/test/processors/checkUserPushPermission.test.js b/test/processors/checkUserPushPermission.test.js new file mode 100644 index 000000000..c566ca362 --- /dev/null +++ b/test/processors/checkUserPushPermission.test.js @@ -0,0 +1,158 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const fc = require('fast-check'); +const { Action, Step } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('checkUserPushPermission', () => { + let exec; + let getUsersStub; + let isUserPushAllowedStub; + let logStub; + let errorStub; + + beforeEach(() => { + logStub = sinon.stub(console, 'log'); + errorStub = sinon.stub(console, 'error'); + getUsersStub = sinon.stub(); + isUserPushAllowedStub = sinon.stub(); + + const checkUserPushPermission = proxyquire( + '../../src/proxy/processors/push-action/checkUserPushPermission', + { + '../../../db': { + getUsers: getUsersStub, + isUserPushAllowed: isUserPushAllowedStub, + }, + }, + ); + + exec = checkUserPushPermission.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + let action; + let req; + let stepSpy; + + beforeEach(() => { + req = {}; + action = new Action( + '1234567890', + 'push', + 'POST', + 1234567890, + 'https://github.com/finos/git-proxy.git', + ); + action.user = 'git-user'; + action.userEmail = 'db-user@test.com'; + stepSpy = sinon.spy(Step.prototype, 'log'); + }); + + it('should allow push when user has permission', async () => { + getUsersStub.resolves([ + { username: 'db-user', email: 'db-user@test.com', gitAccount: 'git-user' }, + ]); + isUserPushAllowedStub.resolves(true); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(stepSpy.lastCall.args[0]).to.equal( + 'User db-user@test.com is allowed to push on repo https://github.com/finos/git-proxy.git', + ); + expect(logStub.lastCall.args[0]).to.equal( + 'User db-user@test.com permission on Repo https://github.com/finos/git-proxy.git : true', + ); + }); + + it('should reject push when user has no permission', async () => { + getUsersStub.resolves([ + { username: 'db-user', email: 'db-user@test.com', gitAccount: 'git-user' }, + ]); + isUserPushAllowedStub.resolves(false); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.lastCall.args[0]).to.equal( + 'Your push has been blocked (db-user@test.com is not allowed to push on repo https://github.com/finos/git-proxy.git)', + ); + expect(result.steps[0].errorMessage).to.include('Your push has been blocked'); + expect(logStub.lastCall.args[0]).to.equal('User not allowed to Push'); + }); + + it('should reject push when no user found for git account', async () => { + getUsersStub.resolves([]); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.lastCall.args[0]).to.equal( + 'Your push has been blocked (db-user@test.com is not allowed to push on repo https://github.com/finos/git-proxy.git)', + ); + expect(result.steps[0].errorMessage).to.include('Your push has been blocked'); + }); + + it('should handle multiple users for git account by rejecting the push', async () => { + getUsersStub.resolves([ + { username: 'user1', email: 'db-user@test.com', gitAccount: 'git-user' }, + { username: 'user2', email: 'db-user@test.com', gitAccount: 'git-user' }, + ]); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.lastCall.args[0]).to.equal( + 'Your push has been blocked (there are multiple users with email db-user@test.com)', + ); + expect(errorStub.lastCall.args[0]).to.equal( + 'Multiple users found with email address db-user@test.com, ending', + ); + }); + + it('should return error when no user is set in the action', async () => { + action.user = null; + action.userEmail = null; + getUsersStub.resolves([]); + const result = await exec(req, action); + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(result.steps[0].errorMessage).to.include( + 'Push blocked: User not found. Please contact an administrator for support.', + ); + }); + + describe('fuzzing', () => { + it('should not crash on arbitrary getUsers return values (fuzzing)', async () => { + const userList = fc.sample( + fc.array( + fc.record({ + username: fc.string(), + gitAccount: fc.string(), + }), + { maxLength: 5 }, + ), + 1, + )[0]; + getUsersStub.resolves(userList); + + const result = await exec(req, action); + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + }); + }); + }); +}); diff --git a/test/processors/clearBareClone.test.js b/test/processors/clearBareClone.test.js new file mode 100644 index 000000000..3f869ff98 --- /dev/null +++ b/test/processors/clearBareClone.test.js @@ -0,0 +1,42 @@ +const fs = require('fs'); +const chai = require('chai'); +const clearBareClone = require('../../src/proxy/processors/push-action/clearBareClone').exec; +const pullRemote = require('../../src/proxy/processors/push-action/pullRemote').exec; +const { Action } = require('../../src/proxy/actions/Action'); +chai.should(); + +const expect = chai.expect; +const timestamp = Date.now(); + +describe('clear bare and local clones', async () => { + it('pull remote generates a local .remote folder', async () => { + const action = new Action('123', 'type', 'get', timestamp, 'finos/git-proxy.git'); + action.url = 'https://github.com/finos/git-proxy.git'; + + const authorization = `Basic ${Buffer.from('JamieSlome:test').toString('base64')}`; + + await pullRemote( + { + headers: { + authorization, + }, + }, + action, + ); + + expect(fs.existsSync(`./.remote/${timestamp}`)).to.be.true; + }).timeout(20000); + + it('clear bare clone function purges .remote folder and specific clone folder', async () => { + const action = new Action('123', 'type', 'get', timestamp, 'finos/git-proxy.git'); + await clearBareClone(null, action); + expect(fs.existsSync(`./.remote`)).to.throw; + expect(fs.existsSync(`./.remote/${timestamp}`)).to.throw; + }); + + afterEach(() => { + if (fs.existsSync(`./.remote`)) { + fs.rmdirSync(`./.remote`, { recursive: true }); + } + }); +}); diff --git a/test/processors/getDiff.test.js b/test/processors/getDiff.test.js new file mode 100644 index 000000000..a6b2a64bd --- /dev/null +++ b/test/processors/getDiff.test.js @@ -0,0 +1,173 @@ +const path = require('path'); +const simpleGit = require('simple-git'); +const fs = require('fs').promises; +const fc = require('fast-check'); +const { Action } = require('../../src/proxy/actions'); +const { exec } = require('../../src/proxy/processors/push-action/getDiff'); + +const chai = require('chai'); +const expect = chai.expect; + +describe('getDiff', () => { + let tempDir; + let git; + + before(async () => { + // Create a temp repo to avoid mocking simple-git + tempDir = path.join(__dirname, 'temp-test-repo'); + await fs.mkdir(tempDir, { recursive: true }); + git = simpleGit(tempDir); + + await git.init(); + await git.addConfig('user.name', 'test'); + await git.addConfig('user.email', 'test@test.com'); + + await fs.writeFile(path.join(tempDir, 'test.txt'), 'initial content'); + await git.add('.'); + await git.commit('initial commit'); + }); + + after(async () => { + await fs.rmdir(tempDir, { recursive: true }); + }); + + it('should get diff between commits', async () => { + await fs.writeFile(path.join(tempDir, 'test.txt'), 'modified content'); + await git.add('.'); + await git.commit('second commit'); + + const action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.proxyGitPath = __dirname; // Temp dir parent path + action.repoName = 'temp-test-repo'; + action.commitFrom = 'HEAD~1'; + action.commitTo = 'HEAD'; + action.commitData = [{ parent: '0000000000000000000000000000000000000000' }]; + + const result = await exec({}, action); + + expect(result.steps[0].error).to.be.false; + expect(result.steps[0].content).to.include('modified content'); + expect(result.steps[0].content).to.include('initial content'); + }); + + it('should get diff between commits with no changes', async () => { + const action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.proxyGitPath = __dirname; // Temp dir parent path + action.repoName = 'temp-test-repo'; + action.commitFrom = 'HEAD~1'; + action.commitTo = 'HEAD'; + action.commitData = [{ parent: '0000000000000000000000000000000000000000' }]; + + const result = await exec({}, action); + + expect(result.steps[0].error).to.be.false; + expect(result.steps[0].content).to.include('initial content'); + }); + + it('should throw an error if no commit data is provided', async () => { + const action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.proxyGitPath = __dirname; // Temp dir parent path + action.repoName = 'temp-test-repo'; + action.commitFrom = 'HEAD~1'; + action.commitTo = 'HEAD'; + action.commitData = []; + + const result = await exec({}, action); + expect(result.steps[0].error).to.be.true; + expect(result.steps[0].errorMessage).to.contain( + 'Your push has been blocked because no commit data was found', + ); + }); + + it('should throw an error if no commit data is provided', async () => { + const action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.proxyGitPath = __dirname; // Temp dir parent path + action.repoName = 'temp-test-repo'; + action.commitFrom = 'HEAD~1'; + action.commitTo = 'HEAD'; + action.commitData = undefined; + + const result = await exec({}, action); + expect(result.steps[0].error).to.be.true; + expect(result.steps[0].errorMessage).to.contain( + 'Your push has been blocked because no commit data was found', + ); + }); + + it('should handle empty commit hash in commitFrom', async () => { + await fs.writeFile(path.join(tempDir, 'test.txt'), 'new content for parent test'); + await git.add('.'); + await git.commit('commit for parent test'); + + const log = await git.log(); + const parentCommit = log.all[1].hash; + const headCommit = log.all[0].hash; + + const action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + + action.proxyGitPath = path.dirname(tempDir); + action.repoName = path.basename(tempDir); + action.commitFrom = '0000000000000000000000000000000000000000'; + action.commitTo = headCommit; + action.commitData = [{ parent: parentCommit }]; + + const result = await exec({}, action); + + expect(result.steps[0].error).to.be.false; + expect(result.steps[0].content).to.not.be.null; + expect(result.steps[0].content.length).to.be.greaterThan(0); + }); + + describe('fuzzing', () => { + it('should handle random action inputs without crashing', async function () { + // Not comprehensive but helps prevent crashing on bad input + await fc.assert( + fc.asyncProperty( + fc.string({ minLength: 0, maxLength: 40 }), + fc.string({ minLength: 0, maxLength: 40 }), + fc.array(fc.record({ parent: fc.string({ minLength: 0, maxLength: 40 }) }), { + maxLength: 3, + }), + async (from, to, commitData) => { + const action = new Action('id', 'push', 'POST', Date.now(), 'test/repo'); + action.proxyGitPath = __dirname; + action.repoName = 'temp-test-repo'; + action.commitFrom = from; + action.commitTo = to; + action.commitData = commitData; + + const result = await exec({}, action); + + expect(result).to.have.property('steps'); + expect(result.steps[0]).to.have.property('error'); + expect(result.steps[0]).to.have.property('content'); + }, + ), + { numRuns: 10 }, + ); + }); + + it('should handle randomized commitFrom and commitTo of proper length', async function () { + await fc.assert( + fc.asyncProperty( + fc.stringMatching(/^[0-9a-fA-F]{40}$/), + fc.stringMatching(/^[0-9a-fA-F]{40}$/), + async (from, to) => { + const action = new Action('id', 'push', 'POST', Date.now(), 'test/repo'); + action.proxyGitPath = __dirname; + action.repoName = 'temp-test-repo'; + action.commitFrom = from; + action.commitTo = to; + action.commitData = [{ parent: '0000000000000000000000000000000000000000' }]; + + const result = await exec({}, action); + + expect(result.steps[0].error).to.be.true; + expect(result.steps[0].errorMessage).to.contain('Invalid revision range'); + }, + ), + { numRuns: 10 }, + ); + }); + }); +}); diff --git a/test/processors/gitLeaks.test.js b/test/processors/gitLeaks.test.js new file mode 100644 index 000000000..eca181c61 --- /dev/null +++ b/test/processors/gitLeaks.test.js @@ -0,0 +1,324 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const { Action, Step } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('gitleaks', () => { + describe('exec', () => { + let exec; + let stubs; + let action; + let req; + let stepSpy; + let logStub; + let errorStub; + + beforeEach(() => { + stubs = { + getAPIs: sinon.stub(), + fs: { + stat: sinon.stub(), + access: sinon.stub(), + constants: { R_OK: 0 }, + }, + spawn: sinon.stub(), + }; + + logStub = sinon.stub(console, 'log'); + errorStub = sinon.stub(console, 'error'); + + const gitleaksModule = proxyquire('../../src/proxy/processors/push-action/gitleaks', { + '../../../config': { getAPIs: stubs.getAPIs }, + 'node:fs/promises': stubs.fs, + 'node:child_process': { spawn: stubs.spawn }, + }); + + exec = gitleaksModule.exec; + + req = {}; + action = new Action('1234567890', 'push', 'POST', 1234567890, 'test/repo.git'); + action.proxyGitPath = '/tmp'; + action.repoName = 'test-repo'; + action.commitFrom = 'abc123'; + action.commitTo = 'def456'; + + stepSpy = sinon.spy(Step.prototype, 'setError'); + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should handle config loading failure', async () => { + stubs.getAPIs.throws(new Error('Config error')); + + const result = await exec(req, action); + + expect(result.error).to.be.true; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('failed setup gitleaks, please contact an administrator\n')).to.be + .true; + expect(errorStub.calledWith('failed to get gitleaks config, please fix the error:')).to.be + .true; + }); + + it('should skip scanning when plugin is disabled', async () => { + stubs.getAPIs.returns({ gitleaks: { enabled: false } }); + + const result = await exec(req, action); + + expect(result.error).to.be.false; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(logStub.calledWith('gitleaks is disabled, skipping')).to.be.true; + }); + + it('should handle successful scan with no findings', async () => { + stubs.getAPIs.returns({ gitleaks: { enabled: true } }); + + const gitRootCommitMock = { + exitCode: 0, + stdout: 'rootcommit123\n', + stderr: '', + }; + + const gitleaksMock = { + exitCode: 0, + stdout: '', + stderr: 'No leaks found', + }; + + stubs.spawn + .onFirstCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitRootCommitMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitRootCommitMock.stdout) }, + stderr: { on: (_, cb) => cb(gitRootCommitMock.stderr) }, + }) + .onSecondCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitleaksMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitleaksMock.stdout) }, + stderr: { on: (_, cb) => cb(gitleaksMock.stderr) }, + }); + + const result = await exec(req, action); + + expect(result.error).to.be.false; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(logStub.calledWith('succeded')).to.be.true; + expect(logStub.calledWith('No leaks found')).to.be.true; + }); + + it('should handle scan with findings', async () => { + stubs.getAPIs.returns({ gitleaks: { enabled: true } }); + + const gitRootCommitMock = { + exitCode: 0, + stdout: 'rootcommit123\n', + stderr: '', + }; + + const gitleaksMock = { + exitCode: 99, + stdout: 'Found secret in file.txt\n', + stderr: 'Warning: potential leak', + }; + + stubs.spawn + .onFirstCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitRootCommitMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitRootCommitMock.stdout) }, + stderr: { on: (_, cb) => cb(gitRootCommitMock.stderr) }, + }) + .onSecondCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitleaksMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitleaksMock.stdout) }, + stderr: { on: (_, cb) => cb(gitleaksMock.stderr) }, + }); + + const result = await exec(req, action); + + expect(result.error).to.be.true; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('\nFound secret in file.txt\nWarning: potential leak')).to.be.true; + }); + + it('should handle gitleaks execution failure', async () => { + stubs.getAPIs.returns({ gitleaks: { enabled: true } }); + + const gitRootCommitMock = { + exitCode: 0, + stdout: 'rootcommit123\n', + stderr: '', + }; + + const gitleaksMock = { + exitCode: 1, + stdout: '', + stderr: 'Command failed', + }; + + stubs.spawn + .onFirstCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitRootCommitMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitRootCommitMock.stdout) }, + stderr: { on: (_, cb) => cb(gitRootCommitMock.stderr) }, + }) + .onSecondCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitleaksMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitleaksMock.stdout) }, + stderr: { on: (_, cb) => cb(gitleaksMock.stderr) }, + }); + + const result = await exec(req, action); + + expect(result.error).to.be.true; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('failed to run gitleaks, please contact an administrator\n')).to.be + .true; + }); + + it('should handle gitleaks spawn failure', async () => { + stubs.getAPIs.returns({ gitleaks: { enabled: true } }); + stubs.spawn.onFirstCall().throws(new Error('Spawn error')); + + const result = await exec(req, action); + + expect(result.error).to.be.true; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect(stepSpy.calledWith('failed to spawn gitleaks, please contact an administrator\n')).to + .be.true; + }); + + it('should handle empty gitleaks entry in proxy.config.json', async () => { + stubs.getAPIs.returns({ gitleaks: {} }); + const result = await exec(req, action); + expect(result.error).to.be.false; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + }); + + it('should handle invalid gitleaks entry in proxy.config.json', async () => { + stubs.getAPIs.returns({ gitleaks: 'invalid config' }); + stubs.spawn.onFirstCall().returns({ + on: (event, cb) => { + if (event === 'close') cb(0); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb('') }, + stderr: { on: (_, cb) => cb('') }, + }); + + const result = await exec(req, action); + + expect(result.error).to.be.false; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + }); + + it('should handle custom config path', async () => { + stubs.getAPIs.returns({ + gitleaks: { + enabled: true, + configPath: `../fixtures/gitleaks-config.toml`, + }, + }); + + stubs.fs.stat.resolves({ isFile: () => true }); + stubs.fs.access.resolves(); + + const gitRootCommitMock = { + exitCode: 0, + stdout: 'rootcommit123\n', + stderr: '', + }; + + const gitleaksMock = { + exitCode: 0, + stdout: '', + stderr: 'No leaks found', + }; + + stubs.spawn + .onFirstCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitRootCommitMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitRootCommitMock.stdout) }, + stderr: { on: (_, cb) => cb(gitRootCommitMock.stderr) }, + }) + .onSecondCall() + .returns({ + on: (event, cb) => { + if (event === 'close') cb(gitleaksMock.exitCode); + return { stdout: { on: () => {} }, stderr: { on: () => {} } }; + }, + stdout: { on: (_, cb) => cb(gitleaksMock.stdout) }, + stderr: { on: (_, cb) => cb(gitleaksMock.stderr) }, + }); + + const result = await exec(req, action); + + expect(result.error).to.be.false; + expect(result.steps[0].error).to.be.false; + expect(stubs.spawn.secondCall.args[1]).to.include( + '--config=../fixtures/gitleaks-config.toml', + ); + }); + + it('should handle invalid custom config path', async () => { + stubs.getAPIs.returns({ + gitleaks: { + enabled: true, + configPath: '/invalid/path.toml', + }, + }); + + stubs.fs.stat.rejects(new Error('File not found')); + + const result = await exec(req, action); + + expect(result.error).to.be.true; + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.true; + expect( + errorStub.calledWith( + 'could not read file at the config path provided, will not be fed to gitleaks', + ), + ).to.be.true; + }); + }); +}); diff --git a/test/processors/scanDiff.emptyDiff.test.js b/test/processors/scanDiff.emptyDiff.test.js new file mode 100644 index 000000000..4a89aba2e --- /dev/null +++ b/test/processors/scanDiff.emptyDiff.test.js @@ -0,0 +1,91 @@ +const { Action } = require('../../src/proxy/actions'); +const { exec } = require('../../src/proxy/processors/push-action/scanDiff'); + +const chai = require('chai'); +const expect = chai.expect; + +describe('scanDiff - Empty Diff Handling', () => { + describe('Empty diff scenarios', () => { + it('should allow empty diff (legitimate empty push)', async () => { + const action = new Action('empty-diff-test', 'push', 'POST', Date.now(), 'test/repo.git'); + + // Simulate getDiff step with empty content + const diffStep = { stepName: 'diff', content: '', error: false }; + action.steps = [diffStep]; + + const result = await exec({}, action); + + expect(result.steps.length).to.equal(2); // diff step + scanDiff step + expect(result.steps[1].error).to.be.false; + expect(result.steps[1].errorMessage).to.be.null; + }); + + it('should allow null diff', async () => { + const action = new Action('null-diff-test', 'push', 'POST', Date.now(), 'test/repo.git'); + + // Simulate getDiff step with null content + const diffStep = { stepName: 'diff', content: null, error: false }; + action.steps = [diffStep]; + + const result = await exec({}, action); + + expect(result.steps.length).to.equal(2); + expect(result.steps[1].error).to.be.false; + expect(result.steps[1].errorMessage).to.be.null; + }); + + it('should allow undefined diff', async () => { + const action = new Action('undefined-diff-test', 'push', 'POST', Date.now(), 'test/repo.git'); + + // Simulate getDiff step with undefined content + const diffStep = { stepName: 'diff', content: undefined, error: false }; + action.steps = [diffStep]; + + const result = await exec({}, action); + + expect(result.steps.length).to.equal(2); + expect(result.steps[1].error).to.be.false; + expect(result.steps[1].errorMessage).to.be.null; + }); + }); + + describe('Normal diff processing', () => { + it('should process valid diff content without blocking', async () => { + const action = new Action('valid-diff-test', 'push', 'POST', Date.now(), 'test/repo.git'); + action.project = 'test-org'; + + // Simulate normal diff content + const normalDiff = `diff --git a/config.js b/config.js +index 1234567..abcdefg 100644 +--- a/config.js ++++ b/config.js +@@ -1,3 +1,4 @@ + module.exports = { ++ newFeature: true, + database: "production" + };`; + + const diffStep = { stepName: 'diff', content: normalDiff, error: false }; + action.steps = [diffStep]; + + const result = await exec({}, action); + + expect(result.steps[1].error).to.be.false; + expect(result.steps[1].errorMessage).to.be.null; + }); + }); + + describe('Error conditions', () => { + it('should handle non-string diff content', async () => { + const action = new Action('non-string-test', 'push', 'POST', Date.now(), 'test/repo.git'); + + const diffStep = { stepName: 'diff', content: 12345, error: false }; + action.steps = [diffStep]; + + const result = await exec({}, action); + + expect(result.steps[1].error).to.be.true; + expect(result.steps[1].errorMessage).to.include('non-string value'); + }); + }); +}); diff --git a/test/processors/scanDiff.test.js b/test/processors/scanDiff.test.js new file mode 100644 index 000000000..bd8afd99d --- /dev/null +++ b/test/processors/scanDiff.test.js @@ -0,0 +1,321 @@ +const chai = require('chai'); +const crypto = require('crypto'); +const processor = require('../../src/proxy/processors/push-action/scanDiff'); +const { Action } = require('../../src/proxy/actions/Action'); +const { expect } = chai; +const config = require('../../src/config'); +const db = require('../../src/db'); +chai.should(); + +// Load blocked literals and patterns from configuration... +const commitConfig = require('../../src/config/index').getCommitConfig(); +const privateOrganizations = config.getPrivateOrganizations(); + +const blockedLiterals = commitConfig.diff.block.literals; +const generateDiff = (value) => { + return `diff --git a/package.json b/package.json +index 38cdc3e..8a9c321 100644 +--- a/package.json ++++ b/package.json +@@ -36,7 +36,7 @@ + "express-session": "^1.17.1", + "generate-password": "^1.5.1", + "history": "5.3.0", +- "lodash": "^4.17.21", ++ "lodash": "^4.1${value}7.21", + "moment": "^2.29.4", + "mongodb": "^5.0", + "nodemailer": "^6.6.1", + `; +}; + +const generateMultiLineDiff = () => { + return `diff --git a/README.md b/README.md +index 8b97e49..de18d43 100644 +--- a/README.md ++++ b/README.md +@@ -1,2 +1,5 @@ + # gitproxy-test-delete-me + Project to test gitproxy ++AKIAIOSFODNN7EXAMPLE ++AKIAIOSFODNN7EXAMPLE ++AKIAIOSFODNN8EXAMPLE +`; +}; + +const generateMultiLineDiffWithLiteral = () => { + return `diff --git a/README.md b/README.md +index 8b97e49..de18d43 100644 +--- a/README.md ++++ b/README.md +@@ -1,2 +1,5 @@ + # gitproxy-test-delete-me + Project to test gitproxy ++AKIAIOSFODNN7EXAMPLE ++AKIAIOSFODNN8EXAMPLE ++blockedTestLiteral +`; +}; +describe('Scan commit diff...', async () => { + privateOrganizations[0] = 'private-org-test'; + commitConfig.diff = { + block: { + literals: ['blockedTestLiteral'], + patterns: [], + providers: { + 'AWS (Amazon Web Services) Access Key ID': + 'A(AG|CC|GP|ID|IP|KI|NP|NV|PK|RO|SC|SI)A[A-Z0-9]{16}', + 'Google Cloud Platform API Key': 'AIza[0-9A-Za-z-_]{35}', + 'GitHub Personal Access Token': 'ghp_[a-zA-Z0-9]{36}', + 'GitHub Fine Grained Personal Access Token': 'github_pat_[a-zA-Z0-9]{22}_[a-zA-Z0-9]{59}', + 'GitHub Actions Token': 'ghs_[a-zA-Z0-9]{36}', + 'JSON Web Token (JWT)': 'ey[A-Za-z0-9-_=]{18,}.ey[A-Za-z0-9-_=]{18,}.[A-Za-z0-9-_.]{18,}', + }, + }, + }; + + before(async () => { + // needed for private org tests + const repo = await db.createRepo(TEST_REPO); + TEST_REPO._id = repo._id; + }); + + after(async () => { + await db.deleteRepo(TEST_REPO._id); + }); + + it('A diff including an AWS (Amazon Web Services) Access Key ID blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff('AKIAIOSFODNN7EXAMPLE'), + }, + ]; + action.setCommit('38cdc3e', '8a9c321'); + action.setBranch('b'); + action.setMessage('Message'); + + const { error, errorMessage } = await processor.exec(null, action); + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + // Formatting test + it('A diff including multiple AWS (Amazon Web Services) Access Keys ID blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateMultiLineDiff(), + }, + ]; + action.setCommit('8b97e49', 'de18d43'); + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + expect(errorMessage).to.contains('Line(s) of code: 3,4'); // blocked lines + expect(errorMessage).to.contains('#1 AWS (Amazon Web Services) Access Key ID'); // type of error + expect(errorMessage).to.contains('#2 AWS (Amazon Web Services) Access Key ID'); // type of error + }); + + // Formatting test + it('A diff including multiple AWS Access Keys ID and Literal blocks the proxy with appropriate message...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateMultiLineDiffWithLiteral(), + }, + ]; + action.setCommit('8b97e49', 'de18d43'); + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + expect(errorMessage).to.contains('Line(s) of code: 3'); // blocked lines + expect(errorMessage).to.contains('Line(s) of code: 4'); // blocked lines + expect(errorMessage).to.contains('Line(s) of code: 5'); // blocked lines + expect(errorMessage).to.contains('#1 AWS (Amazon Web Services) Access Key ID'); // type of error + expect(errorMessage).to.contains('#2 AWS (Amazon Web Services) Access Key ID'); // type of error + expect(errorMessage).to.contains('#3 Offending Literal'); + }); + + it('A diff including a Google Cloud Platform API Key blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff('AIza0aB7Z4Rfs23MnPqars81yzu19KbH72zaFda'), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff including a GitHub Personal Access Token blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff(`ghp_${crypto.randomBytes(36).toString('hex')}`), + }, + ]; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff including a GitHub Fine Grained Personal Access Token blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff( + `github_pat_1SMAGDFOYZZK3P9ndFemen_${crypto.randomBytes(59).toString('hex')}`, + ), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff including a GitHub Actions Token blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff(`ghs_${crypto.randomBytes(20).toString('hex')}`), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff including a JSON Web Token (JWT) blocks the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff( + `eyJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJ1cm46Z21haWwuY29tOmNsaWVudElkOjEyMyIsInN1YiI6IkphbmUgRG9lIiwiaWF0IjoxNTIzOTAxMjM0LCJleHAiOjE1MjM5ODc2MzR9.s5_hA8hyIT5jXfU9PlXJ-R74m5F_aPcVEFJSV-g-_kX`, + ), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff including a blocked literal blocks the proxy...', async () => { + for (const [literal] of blockedLiterals.entries()) { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff(literal), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + } + }); + it('When no diff is present, the proxy allows the push (legitimate empty diff)...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: null, + }, + ]; + + const result = await processor.exec(null, action); + const scanDiffStep = result.steps.find((s) => s.stepName === 'scanDiff'); + + expect(scanDiffStep.error).to.be.false; + }); + + it('When diff is not a string, the proxy is blocked...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: 1337, + }, + ]; + + const { error, errorMessage } = await processor.exec(null, action); + + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('A diff with no secrets or sensitive information does not block the proxy...', async () => { + const action = new Action('1', 'type', 'method', 1, 'test/repo.git'); + action.steps = [ + { + stepName: 'diff', + content: generateDiff(''), + }, + ]; + action.commitFrom = '38cdc3e'; + action.commitTo = '8a9c321'; + + const { error } = await processor.exec(null, action); + expect(error).to.be.false; + }); + + const TEST_REPO = { + project: 'private-org-test', + name: 'repo.git', + url: 'https://github.com/private-org-test/repo.git', + }; + + it('A diff including a provider token in a private organization does not block the proxy...', async () => { + const action = new Action( + '1', + 'type', + 'method', + 1, + 'https://github.com/private-org-test/repo.git', // URL needs to be parseable AND exist in DB + ); + action.steps = [ + { + stepName: 'diff', + content: generateDiff('AKIAIOSFODNN7EXAMPLE'), + }, + ]; + + const { error } = await processor.exec(null, action); + expect(error).to.be.false; + }); +}); diff --git a/test/processors/testCheckRepoInAuthList.test.js b/test/processors/testCheckRepoInAuthList.test.js new file mode 100644 index 000000000..9328cb8c3 --- /dev/null +++ b/test/processors/testCheckRepoInAuthList.test.js @@ -0,0 +1,52 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const fc = require('fast-check'); +const actions = require('../../src/proxy/actions/Action'); +const processor = require('../../src/proxy/processors/push-action/checkRepoInAuthorisedList'); +const expect = chai.expect; +const db = require('../../src/db'); + +describe('Check a Repo is in the authorised list', async () => { + afterEach(() => { + sinon.restore(); + }); + + it('accepts the action if the repository is whitelisted in the db', async () => { + sinon.stub(db, 'getRepoByUrl').resolves({ + name: 'repo-is-ok', + project: 'thisproject', + url: 'https://github.com/thisproject/repo-is-ok', + }); + + const action = new actions.Action('123', 'type', 'get', 1234, 'thisproject/repo-is-ok'); + const result = await processor.exec(null, action); + expect(result.error).to.be.false; + expect(result.steps[0].logs[0]).to.eq( + 'checkRepoInAuthorisedList - repo thisproject/repo-is-ok is in the authorisedList', + ); + }); + + it('rejects the action if repository not in the db', async () => { + sinon.stub(db, 'getRepoByUrl').resolves(null); + + const action = new actions.Action('123', 'type', 'get', 1234, 'thisproject/repo-is-not-ok'); + const result = await processor.exec(null, action); + expect(result.error).to.be.true; + expect(result.steps[0].logs[0]).to.eq( + 'checkRepoInAuthorisedList - repo thisproject/repo-is-not-ok is not in the authorised whitelist, ending', + ); + }); + + describe('fuzzing', () => { + it('should not crash on random repo names', async () => { + await fc.assert( + fc.asyncProperty(fc.string(), async (repoName) => { + const action = new actions.Action('123', 'type', 'get', 1234, repoName); + const result = await processor.exec(null, action); + expect(result.error).to.be.true; + }), + { numRuns: 1000 }, + ); + }); + }); +}); diff --git a/test/processors/writePack.test.js b/test/processors/writePack.test.js new file mode 100644 index 000000000..746b700ac --- /dev/null +++ b/test/processors/writePack.test.js @@ -0,0 +1,115 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const { Action, Step } = require('../../src/proxy/actions'); + +chai.should(); +const expect = chai.expect; + +describe('writePack', () => { + let exec; + let readdirSyncStub; + let spawnSyncStub; + let stepLogSpy; + let stepSetContentSpy; + let stepSetErrorSpy; + + beforeEach(() => { + spawnSyncStub = sinon.stub(); + readdirSyncStub = sinon.stub(); + + readdirSyncStub.onFirstCall().returns(['old1.idx']); + readdirSyncStub.onSecondCall().returns(['old1.idx', 'new1.idx']); + + stepLogSpy = sinon.spy(Step.prototype, 'log'); + stepSetContentSpy = sinon.spy(Step.prototype, 'setContent'); + stepSetErrorSpy = sinon.spy(Step.prototype, 'setError'); + + const writePack = proxyquire('../../src/proxy/processors/push-action/writePack', { + child_process: { spawnSync: spawnSyncStub }, + fs: { readdirSync: readdirSyncStub }, + }); + + exec = writePack.exec; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('exec', () => { + let action; + let req; + + beforeEach(() => { + req = { + body: 'pack data', + }; + action = new Action( + '1234567890', + 'push', + 'POST', + 1234567890, + 'https://github.com/finos/git-proxy.git', + ); + action.proxyGitPath = '/path/to'; + action.repoName = 'repo'; + }); + + it('should execute git receive-pack with correct parameters', async () => { + const dummySpawnOutput = { stdout: 'git receive-pack output', stderr: '', status: 0 }; + spawnSyncStub.returns(dummySpawnOutput); + + const result = await exec(req, action); + + expect(spawnSyncStub.callCount).to.equal(2); + expect(spawnSyncStub.firstCall.args[0]).to.equal('git'); + expect(spawnSyncStub.firstCall.args[1]).to.deep.equal(['config', 'receive.unpackLimit', '0']); + expect(spawnSyncStub.firstCall.args[2]).to.include({ cwd: '/path/to/repo' }); + + expect(spawnSyncStub.secondCall.args[0]).to.equal('git'); + expect(spawnSyncStub.secondCall.args[1]).to.deep.equal(['receive-pack', 'repo']); + expect(spawnSyncStub.secondCall.args[2]).to.include({ + cwd: '/path/to', + input: 'pack data', + }); + + expect(stepLogSpy.calledWith('new idx files: new1.idx')).to.be.true; + expect(stepSetContentSpy.calledWith(dummySpawnOutput)).to.be.true; + + expect(result.steps).to.have.lengthOf(1); + expect(result.steps[0].error).to.be.false; + expect(result.newIdxFiles).to.deep.equal(['new1.idx']); + }); + + it('should handle errors from git receive-pack', async () => { + const error = new Error('git error'); + spawnSyncStub.throws(error); + + try { + await exec(req, action); + throw new Error('Expected error to be thrown'); + } catch (e) { + expect(stepSetErrorSpy.calledOnce).to.be.true; + expect(stepSetErrorSpy.firstCall.args[0]).to.include('git error'); + + expect(action.steps).to.have.lengthOf(1); + expect(action.steps[0].error).to.be.true; + } + }); + + it('should always add the step to the action even if error occurs', async () => { + spawnSyncStub.throws(new Error('git error')); + + try { + await exec(req, action); + } catch (e) { + expect(action.steps).to.have.lengthOf(1); + } + }); + + it('should have the correct displayName', () => { + expect(exec.displayName).to.equal('writePack.exec'); + }); + }); +}); diff --git a/test/proxy.test.js b/test/proxy.test.js new file mode 100644 index 000000000..2612e9383 --- /dev/null +++ b/test/proxy.test.js @@ -0,0 +1,142 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const sinonChai = require('sinon-chai'); +const fs = require('fs'); + +chai.use(sinonChai); +const { expect } = chai; + +describe('Proxy Module TLS Certificate Loading', () => { + let sandbox; + let mockConfig; + let mockHttpServer; + let mockHttpsServer; + let proxyModule; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + mockConfig = { + getTLSEnabled: sandbox.stub(), + getTLSKeyPemPath: sandbox.stub(), + getTLSCertPemPath: sandbox.stub(), + getPlugins: sandbox.stub().returns([]), + getAuthorisedList: sandbox.stub().returns([]), + }; + + const mockDb = { + getRepos: sandbox.stub().resolves([]), + createRepo: sandbox.stub().resolves(), + addUserCanPush: sandbox.stub().resolves(), + addUserCanAuthorise: sandbox.stub().resolves(), + }; + + const mockPluginLoader = { + load: sandbox.stub().resolves(), + }; + + mockHttpServer = { + listen: sandbox.stub().callsFake((port, callback) => { + if (callback) callback(); + return mockHttpServer; + }), + close: sandbox.stub().callsFake((callback) => { + if (callback) callback(); + }), + }; + + mockHttpsServer = { + listen: sandbox.stub().callsFake((port, callback) => { + if (callback) callback(); + return mockHttpsServer; + }), + close: sandbox.stub().callsFake((callback) => { + if (callback) callback(); + }), + }; + + sandbox.stub(require('../src/plugin'), 'PluginLoader').returns(mockPluginLoader); + + const configModule = require('../src/config'); + sandbox.stub(configModule, 'getTLSEnabled').callsFake(mockConfig.getTLSEnabled); + sandbox.stub(configModule, 'getTLSKeyPemPath').callsFake(mockConfig.getTLSKeyPemPath); + sandbox.stub(configModule, 'getTLSCertPemPath').callsFake(mockConfig.getTLSCertPemPath); + sandbox.stub(configModule, 'getPlugins').callsFake(mockConfig.getPlugins); + sandbox.stub(configModule, 'getAuthorisedList').callsFake(mockConfig.getAuthorisedList); + + const dbModule = require('../src/db'); + sandbox.stub(dbModule, 'getRepos').callsFake(mockDb.getRepos); + sandbox.stub(dbModule, 'createRepo').callsFake(mockDb.createRepo); + sandbox.stub(dbModule, 'addUserCanPush').callsFake(mockDb.addUserCanPush); + sandbox.stub(dbModule, 'addUserCanAuthorise').callsFake(mockDb.addUserCanAuthorise); + + const chain = require('../src/proxy/chain'); + chain.chainPluginLoader = null; + + process.env.NODE_ENV = 'test'; + process.env.GIT_PROXY_HTTPS_SERVER_PORT = '8443'; + + // Import proxy module after mocks are set up + delete require.cache[require.resolve('../src/proxy/index')]; + const ProxyClass = require('../src/proxy/index').default; + proxyModule = new ProxyClass(); + }); + + afterEach(async () => { + try { + await proxyModule.stop(); + } catch (error) { + // Ignore errors during cleanup + } + sandbox.restore(); + }); + + describe('TLS certificate file reading', () => { + it('should read TLS key and cert files when TLS is enabled and paths are provided', async () => { + const mockKeyContent = Buffer.from('mock-key-content'); + const mockCertContent = Buffer.from('mock-cert-content'); + + mockConfig.getTLSEnabled.returns(true); + mockConfig.getTLSKeyPemPath.returns('/path/to/key.pem'); + mockConfig.getTLSCertPemPath.returns('/path/to/cert.pem'); + + const fsStub = sandbox.stub(fs, 'readFileSync'); + fsStub.returns(Buffer.from('default-cert')); + fsStub.withArgs('/path/to/key.pem').returns(mockKeyContent); + fsStub.withArgs('/path/to/cert.pem').returns(mockCertContent); + await proxyModule.start(); + + // Check if files should have been read + if (fsStub.called) { + expect(fsStub).to.have.been.calledWith('/path/to/key.pem'); + expect(fsStub).to.have.been.calledWith('/path/to/cert.pem'); + } else { + console.log('fs.readFileSync was never called - TLS certificate reading not triggered'); + } + }); + + it('should not read TLS files when TLS is disabled', async () => { + mockConfig.getTLSEnabled.returns(false); + mockConfig.getTLSKeyPemPath.returns('/path/to/key.pem'); + mockConfig.getTLSCertPemPath.returns('/path/to/cert.pem'); + + const fsStub = sandbox.stub(fs, 'readFileSync'); + + await proxyModule.start(); + + expect(fsStub).not.to.have.been.called; + }); + + it('should not read TLS files when paths are not provided', async () => { + mockConfig.getTLSEnabled.returns(true); + mockConfig.getTLSKeyPemPath.returns(null); + mockConfig.getTLSCertPemPath.returns(null); + + const fsStub = sandbox.stub(fs, 'readFileSync'); + + await proxyModule.start(); + + expect(fsStub).not.to.have.been.called; + }); + }); +}); diff --git a/test/proxyURL.test.js b/test/proxyURL.test.js new file mode 100644 index 000000000..4d12b5199 --- /dev/null +++ b/test/proxyURL.test.js @@ -0,0 +1,51 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const express = require('express'); +const chaiHttp = require('chai-http'); +const { getProxyURL } = require('../src/service/urls'); +const config = require('../src/config'); + +chai.use(chaiHttp); +chai.should(); +const expect = chai.expect; + +const genSimpleServer = () => { + const app = express(); + app.get('/', (req, res) => { + res.contentType('text/html'); + res.send(getProxyURL(req)); + }); + return app; +}; + +describe('proxyURL', async () => { + afterEach(() => { + sinon.restore(); + }); + + it('pulls the request path with no override', async () => { + const app = genSimpleServer(); + const res = await chai.request(app).get('/').send(); + res.should.have.status(200); + + // request url without trailing slash + const reqURL = res.request.url.slice(0, -1); + expect(res.text).to.equal(reqURL); + expect(res.text).to.match(/https?:\/\/127.0.0.1:\d+/); + }); + + it('can override providing a proxy value', async () => { + const proxyURL = 'https://amazing-proxy.path.local'; + // stub getDomains + const configGetDomainsStub = sinon.stub(config, 'getDomains').returns({ proxy: proxyURL }); + + const app = genSimpleServer(); + const res = await chai.request(app).get('/').send(); + res.should.have.status(200); + + // the stub worked + expect(configGetDomainsStub.calledOnce).to.be.true; + + expect(res.text).to.equal(proxyURL); + }); +}); diff --git a/test/pushUtils.test.js b/test/pushUtils.test.js new file mode 100644 index 000000000..06493461a --- /dev/null +++ b/test/pushUtils.test.js @@ -0,0 +1,352 @@ +const { expect } = require('chai'); +const { + isTagPush, + getDisplayTimestamp, + getTagName, + getRefToShow, + getShaOrTag, + getCommitterOrTagger, + getAuthor, + getAuthorEmail, + getMessage, + getCommitCount, + getRepoFullName, + getGitHubUrl, + isValidValue, +} = require('../src/ui/utils/pushUtils'); + +describe('pushUtils', () => { + const mockCommitData = [ + { + commitTs: 1640995200, // 2022-01-01 00:00:00 + commitTimestamp: 1640995200, + message: 'feat: add new feature', + committer: 'john-doe', + author: 'jane-smith', + authorEmail: 'jane@example.com', + }, + ]; + + const mockTagData = [ + { + tagName: 'v1.0.0', + type: 'annotated', + tagger: 'release-bot', + message: 'Release version 1.0.0', + timestamp: 1640995300, // 2022-01-01 00:01:40 + }, + ]; + + const mockCommitPush = { + id: 'push-1', + repo: 'test-repo.git', + branch: 'refs/heads/main', + commitTo: '1234567890abcdef', + commitData: mockCommitData, + }; + + const mockTagPush = { + id: 'push-2', + repo: 'test-repo.git', + branch: 'refs/heads/main', + tag: 'refs/tags/v1.0.0', + tagData: mockTagData, + user: 'release-bot', + commitTo: '1234567890abcdef', + commitData: mockCommitData, + }; + + describe('isTagPush', () => { + it('returns true for tag push with tag data', () => { + expect(isTagPush(mockTagPush)).to.be.true; + }); + + it('returns false for regular commit push', () => { + expect(isTagPush(mockCommitPush)).to.be.false; + }); + + it('returns false for tag push without tagData', () => { + const pushWithoutTagData = { ...mockTagPush, tagData: [] }; + expect(isTagPush(pushWithoutTagData)).to.be.false; + }); + + it('returns false for undefined push data', () => { + expect(isTagPush(undefined)).to.be.false; + }); + }); + + describe('getDisplayTimestamp', () => { + it('returns tag timestamp when isTag is true and tagData exists', () => { + const result = getDisplayTimestamp(true, mockCommitData[0], mockTagData[0]); + expect(result).to.include('2022'); + }); + + it('returns commit timestamp when isTag is false', () => { + const result = getDisplayTimestamp(false, mockCommitData[0]); + expect(result).to.include('2022'); + }); + + it('returns commit timestamp when isTag is true but no tagData', () => { + const result = getDisplayTimestamp(true, mockCommitData[0], undefined); + expect(result).to.include('2022'); + }); + + it('returns N/A when no valid timestamps', () => { + const result = getDisplayTimestamp(false, null); + expect(result).to.equal('N/A'); + }); + + it('prefers commitTimestamp over commitTs', () => { + const commitWithBothTimestamps = { + commitTs: 1640995100, + commitTimestamp: 1640995200, + }; + const result = getDisplayTimestamp(false, commitWithBothTimestamps); + expect(result).to.include('2022'); + }); + }); + + describe('getTagName', () => { + it('extracts tag name from refs/tags/ reference', () => { + expect(getTagName('refs/tags/v1.0.0')).to.equal('v1.0.0'); + }); + + it('handles tag name without refs/tags/ prefix', () => { + expect(getTagName('v1.0.0')).to.equal('v1.0.0'); + }); + + it('returns empty string for undefined input', () => { + expect(getTagName(undefined)).to.equal(''); + }); + + it('returns empty string for null input', () => { + expect(getTagName(null)).to.equal(''); + }); + + it('returns empty string for non-string input', () => { + expect(getTagName(123)).to.equal(''); + }); + + it('handles complex tag names', () => { + expect(getTagName('refs/tags/v1.0.0-beta.1+build.123')).to.equal('v1.0.0-beta.1+build.123'); + }); + }); + + describe('getRefToShow', () => { + it('returns tag name for tag push', () => { + expect(getRefToShow(mockTagPush)).to.equal('v1.0.0'); + }); + + it('returns branch name for commit push', () => { + expect(getRefToShow(mockCommitPush)).to.equal('main'); + }); + }); + + describe('getShaOrTag', () => { + it('returns tag name for tag push', () => { + expect(getShaOrTag(mockTagPush)).to.equal('v1.0.0'); + }); + + it('returns shortened SHA for commit push', () => { + expect(getShaOrTag(mockCommitPush)).to.equal('12345678'); + }); + + it('handles invalid commitTo gracefully', () => { + const pushWithInvalidCommit = { ...mockCommitPush, commitTo: null }; + expect(getShaOrTag(pushWithInvalidCommit)).to.equal('N/A'); + }); + + it('handles non-string commitTo', () => { + const pushWithInvalidCommit = { ...mockCommitPush, commitTo: 123 }; + expect(getShaOrTag(pushWithInvalidCommit)).to.equal('N/A'); + }); + }); + + describe('getCommitterOrTagger', () => { + it('returns tagger for tag push', () => { + expect(getCommitterOrTagger(mockTagPush)).to.equal('release-bot'); + }); + + it('returns committer for commit push', () => { + expect(getCommitterOrTagger(mockCommitPush)).to.equal('john-doe'); + }); + + it('returns N/A for empty commitData', () => { + const pushWithEmptyCommits = { ...mockCommitPush, commitData: [] }; + expect(getCommitterOrTagger(pushWithEmptyCommits)).to.equal('N/A'); + }); + + it('returns N/A for invalid commitData', () => { + const pushWithInvalidCommits = { ...mockCommitPush, commitData: null }; + expect(getCommitterOrTagger(pushWithInvalidCommits)).to.equal('N/A'); + }); + }); + + describe('getAuthor', () => { + it('returns tagger for tag push', () => { + expect(getAuthor(mockTagPush)).to.equal('release-bot'); + }); + + it('returns author for commit push', () => { + expect(getAuthor(mockCommitPush)).to.equal('jane-smith'); + }); + + it('returns N/A when author is missing', () => { + const pushWithoutAuthor = { + ...mockCommitPush, + commitData: [{ ...mockCommitData[0], author: undefined }], + }; + expect(getAuthor(pushWithoutAuthor)).to.equal('N/A'); + }); + }); + + describe('getAuthorEmail', () => { + it('returns N/A for tag push', () => { + expect(getAuthorEmail(mockTagPush)).to.equal('N/A'); + }); + + it('returns author email for commit push', () => { + expect(getAuthorEmail(mockCommitPush)).to.equal('jane@example.com'); + }); + + it('returns N/A when email is missing', () => { + const pushWithoutEmail = { + ...mockCommitPush, + commitData: [{ ...mockCommitData[0], authorEmail: undefined }], + }; + expect(getAuthorEmail(pushWithoutEmail)).to.equal('N/A'); + }); + }); + + describe('getMessage', () => { + it('returns tag message for tag push', () => { + expect(getMessage(mockTagPush)).to.equal('Release version 1.0.0'); + }); + + it('returns commit message for commit push', () => { + expect(getMessage(mockCommitPush)).to.equal('feat: add new feature'); + }); + + it('falls back to commit message for tag push without tag message', () => { + const tagPushWithoutMessage = { + ...mockTagPush, + tagData: [{ ...mockTagData[0], message: undefined }], + }; + expect(getMessage(tagPushWithoutMessage)).to.equal('feat: add new feature'); + }); + + it('returns empty string for tag push without any message', () => { + const tagPushWithoutAnyMessage = { + ...mockTagPush, + tagData: [{ ...mockTagData[0], message: undefined }], + commitData: [{ ...mockCommitData[0], message: undefined }], + }; + expect(getMessage(tagPushWithoutAnyMessage)).to.equal(''); + }); + }); + + describe('getCommitCount', () => { + it('returns commit count', () => { + expect(getCommitCount(mockCommitPush)).to.equal(1); + }); + + it('returns 0 for empty commitData', () => { + const pushWithoutCommits = { ...mockCommitPush, commitData: [] }; + expect(getCommitCount(pushWithoutCommits)).to.equal(0); + }); + + it('returns 0 for undefined commitData', () => { + const pushWithoutCommits = { ...mockCommitPush, commitData: undefined }; + expect(getCommitCount(pushWithoutCommits)).to.equal(0); + }); + }); + + describe('getRepoFullName', () => { + it('removes .git suffix', () => { + expect(getRepoFullName('test-repo.git')).to.equal('test-repo'); + }); + + it('handles repo without .git suffix', () => { + expect(getRepoFullName('test-repo')).to.equal('test-repo'); + }); + }); + + describe('getGitHubUrl', () => { + it('generates correct repo URL', () => { + expect(getGitHubUrl.repo('owner/repo')).to.equal('https://github.com/owner/repo'); + }); + + it('generates correct commit URL', () => { + expect(getGitHubUrl.commit('owner/repo', 'abc123')).to.equal( + 'https://github.com/owner/repo/commit/abc123', + ); + }); + + it('generates correct branch URL', () => { + expect(getGitHubUrl.branch('owner/repo', 'main')).to.equal( + 'https://github.com/owner/repo/tree/main', + ); + }); + + it('generates correct tag URL', () => { + expect(getGitHubUrl.tag('owner/repo', 'v1.0.0')).to.equal( + 'https://github.com/owner/repo/releases/tag/v1.0.0', + ); + }); + + it('generates correct user URL', () => { + expect(getGitHubUrl.user('username')).to.equal('https://github.com/username'); + }); + }); + + describe('isValidValue', () => { + it('returns true for valid string', () => { + expect(isValidValue('valid')).to.be.true; + }); + + it('returns false for N/A', () => { + expect(isValidValue('N/A')).to.be.false; + }); + + it('returns false for empty string', () => { + expect(isValidValue('')).to.be.false; + }); + + it('returns false for undefined', () => { + expect(isValidValue(undefined)).to.be.false; + }); + + it('returns false for null', () => { + expect(isValidValue(null)).to.be.false; + }); + }); + + describe('edge cases and error handling', () => { + it('handles malformed tag reference in getTagName', () => { + // Should not throw error + expect(() => getTagName('malformed-ref')).to.not.throw(); + expect(getTagName('malformed-ref')).to.equal('malformed-ref'); + }); + + it('handles missing properties gracefully', () => { + const incompletePush = { + id: 'incomplete', + commitData: [], + }; + + expect(() => getCommitterOrTagger(incompletePush)).to.not.throw(); + expect(() => getAuthor(incompletePush)).to.not.throw(); + expect(() => getMessage(incompletePush)).to.not.throw(); + expect(() => getCommitCount(incompletePush)).to.not.throw(); + }); + + it('handles non-array commitData', () => { + const pushWithInvalidCommits = { + ...mockCommitPush, + commitData: 'not-an-array', + }; + + expect(getCommitterOrTagger(pushWithInvalidCommits)).to.equal('N/A'); + }); + }); +}); diff --git a/test/services/routes/auth.test.js b/test/services/routes/auth.test.js new file mode 100644 index 000000000..52106184b --- /dev/null +++ b/test/services/routes/auth.test.js @@ -0,0 +1,228 @@ +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const sinon = require('sinon'); +const express = require('express'); +const { router, loginSuccessHandler } = require('../../../src/service/routes/auth'); +const db = require('../../../src/db'); + +const { expect } = chai; +chai.use(chaiHttp); + +const newApp = (username) => { + const app = express(); + app.use(express.json()); + + if (username) { + app.use((req, res, next) => { + req.user = { username }; + next(); + }); + } + + app.use('/auth', router); + return app; +}; + +describe('Auth API', function () { + afterEach(function () { + sinon.restore(); + }); + + describe('/gitAccount', () => { + beforeEach(() => { + sinon.stub(db, 'findUser').callsFake((username) => { + if (username === 'alice') { + return Promise.resolve({ + username: 'alice', + displayName: 'Alice Munro', + gitAccount: 'ORIGINAL_GIT_ACCOUNT', + email: 'alice@example.com', + admin: true, + }); + } else if (username === 'bob') { + return Promise.resolve({ + username: 'bob', + displayName: 'Bob Woodward', + gitAccount: 'WOODY_GIT_ACCOUNT', + email: 'bob@example.com', + admin: false, + }); + } + return Promise.resolve(null); + }); + }); + + afterEach(() => { + sinon.restore(); + }); + + it('POST /gitAccount returns Unauthorized if authenticated user not in request', async () => { + const res = await chai.request(newApp()).post('/auth/gitAccount').send({ + username: 'alice', + gitAccount: '', + }); + + expect(res).to.have.status(401); + }); + + it('POST /gitAccount updates git account for authenticated user', async () => { + const updateUserStub = sinon.stub(db, 'updateUser').resolves(); + + const res = await chai.request(newApp('alice')).post('/auth/gitAccount').send({ + username: 'alice', + gitAccount: 'UPDATED_GIT_ACCOUNT', + }); + + expect(res).to.have.status(200); + expect( + updateUserStub.calledOnceWith({ + username: 'alice', + displayName: 'Alice Munro', + gitAccount: 'UPDATED_GIT_ACCOUNT', + email: 'alice@example.com', + admin: true, + }), + ).to.be.true; + }); + + it('POST /gitAccount prevents non-admin user changing a different user gitAccount', async () => { + const updateUserStub = sinon.stub(db, 'updateUser').resolves(); + + const res = await chai.request(newApp('bob')).post('/auth/gitAccount').send({ + username: 'phil', + gitAccount: 'UPDATED_GIT_ACCOUNT', + }); + + expect(res).to.have.status(403); + expect(updateUserStub.called).to.be.false; + }); + + it('POST /gitAccount lets admin user change a different users gitAccount', async () => { + const updateUserStub = sinon.stub(db, 'updateUser').resolves(); + + const res = await chai.request(newApp('alice')).post('/auth/gitAccount').send({ + username: 'bob', + gitAccount: 'UPDATED_GIT_ACCOUNT', + }); + + expect(res).to.have.status(200); + expect( + updateUserStub.calledOnceWith({ + username: 'bob', + displayName: 'Bob Woodward', + email: 'bob@example.com', + admin: false, + gitAccount: 'UPDATED_GIT_ACCOUNT', + }), + ).to.be.true; + }); + + it('POST /gitAccount allows non-admin user to update their own gitAccount', async () => { + const updateUserStub = sinon.stub(db, 'updateUser').resolves(); + + const res = await chai.request(newApp('bob')).post('/auth/gitAccount').send({ + username: 'bob', + gitAccount: 'UPDATED_GIT_ACCOUNT', + }); + + expect(res).to.have.status(200); + expect( + updateUserStub.calledOnceWith({ + username: 'bob', + displayName: 'Bob Woodward', + email: 'bob@example.com', + admin: false, + gitAccount: 'UPDATED_GIT_ACCOUNT', + }), + ).to.be.true; + }); + }); + + describe('loginSuccessHandler', function () { + it('should log in user and return public user data', async function () { + const user = { + username: 'bob', + password: 'secret', + email: 'bob@example.com', + displayName: 'Bob', + }; + + const res = { + send: sinon.spy(), + }; + + await loginSuccessHandler()({ user }, res); + + expect(res.send.calledOnce).to.be.true; + expect(res.send.firstCall.args[0]).to.deep.equal({ + message: 'success', + user: { + admin: false, + displayName: 'Bob', + email: 'bob@example.com', + gitAccount: '', + title: '', + username: 'bob', + }, + }); + }); + }); + + describe('/me', function () { + it('GET /me returns Unauthorized if authenticated user not in request', async () => { + const res = await chai.request(newApp()).get('/auth/me'); + + expect(res).to.have.status(401); + }); + + it('GET /me serializes public data representation of current authenticated user', async function () { + sinon.stub(db, 'findUser').resolves({ + username: 'alice', + password: 'secret-hashed-password', + email: 'alice@example.com', + displayName: 'Alice Walker', + otherUserData: 'should not be returned', + }); + + const res = await chai.request(newApp('alice')).get('/auth/me'); + expect(res).to.have.status(200); + expect(res.body).to.deep.equal({ + username: 'alice', + displayName: 'Alice Walker', + email: 'alice@example.com', + title: '', + gitAccount: '', + admin: false, + }); + }); + }); + + describe('/profile', function () { + it('GET /profile returns Unauthorized if authenticated user not in request', async () => { + const res = await chai.request(newApp()).get('/auth/profile'); + + expect(res).to.have.status(401); + }); + + it('GET /profile serializes public data representation of current authenticated user', async function () { + sinon.stub(db, 'findUser').resolves({ + username: 'alice', + password: 'secret-hashed-password', + email: 'alice@example.com', + displayName: 'Alice Walker', + otherUserData: 'should not be returned', + }); + + const res = await chai.request(newApp('alice')).get('/auth/profile'); + expect(res).to.have.status(200); + expect(res.body).to.deep.equal({ + username: 'alice', + displayName: 'Alice Walker', + email: 'alice@example.com', + title: '', + gitAccount: '', + admin: false, + }); + }); + }); +}); diff --git a/test/services/routes/users.test.js b/test/services/routes/users.test.js new file mode 100644 index 000000000..d97afeee3 --- /dev/null +++ b/test/services/routes/users.test.js @@ -0,0 +1,67 @@ +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const sinon = require('sinon'); +const express = require('express'); +const usersRouter = require('../../../src/service/routes/users'); +const db = require('../../../src/db'); + +const { expect } = chai; +chai.use(chaiHttp); + +describe('Users API', function () { + let app; + + before(function () { + app = express(); + app.use(express.json()); + app.use('/users', usersRouter); + }); + + beforeEach(function () { + sinon.stub(db, 'getUsers').resolves([ + { + username: 'alice', + password: 'secret-hashed-password', + email: 'alice@example.com', + displayName: 'Alice Walker', + }, + ]); + sinon + .stub(db, 'findUser') + .resolves({ username: 'bob', password: 'hidden', email: 'bob@example.com' }); + }); + + afterEach(function () { + sinon.restore(); + }); + + it('GET /users only serializes public data needed for ui, not user secrets like password', async function () { + const res = await chai.request(app).get('/users'); + expect(res).to.have.status(200); + expect(res.body).to.deep.equal([ + { + username: 'alice', + displayName: 'Alice Walker', + email: 'alice@example.com', + title: '', + gitAccount: '', + admin: false, + }, + ]); + }); + + it('GET /users/:id does not serialize password', async function () { + const res = await chai.request(app).get('/users/bob'); + expect(res).to.have.status(200); + console.log(`Response body: ${res.body}`); + + expect(res.body).to.deep.equal({ + username: 'bob', + displayName: '', + email: 'bob@example.com', + title: '', + gitAccount: '', + admin: false, + }); + }); +}); diff --git a/test/tagPushIntegration.test.js b/test/tagPushIntegration.test.js new file mode 100644 index 000000000..80bd93129 --- /dev/null +++ b/test/tagPushIntegration.test.js @@ -0,0 +1,273 @@ +const { expect } = require('chai'); +const { + isTagPush, + getDisplayTimestamp, + getRefToShow, + getShaOrTag, + getCommitterOrTagger, + getMessage, + getRepoFullName, + getGitHubUrl, +} = require('../src/ui/utils/pushUtils'); + +describe('Tag Push Integration', () => { + describe('complete tag push workflow', () => { + const fullTagPush = { + id: 'tag-push-123', + repo: 'finos/git-proxy.git', + branch: 'refs/heads/main', + tag: 'refs/tags/v2.1.0', + user: 'release-manager', + commitFrom: '0000000000000000000000000000000000000000', + commitTo: 'abcdef1234567890abcdef1234567890abcdef12', + timestamp: '2024-01-15T10:30:00Z', + tagData: [ + { + tagName: 'v2.1.0', + type: 'annotated', + tagger: 'release-manager', + message: + 'Release version 2.1.0\n\nThis release includes:\n- New tag push support\n- Improved UI components\n- Better error handling', + timestamp: 1705317000, // 2024-01-15 10:30:00 + }, + ], + commitData: [ + { + commitTs: 1705316700, // 2024-01-15 10:25:00 + commitTimestamp: 1705316700, + message: 'feat: implement tag push support', + committer: 'developer-1', + author: 'developer-1', + authorEmail: 'dev1@finos.org', + }, + { + commitTs: 1705316400, // 2024-01-15 10:20:00 + commitTimestamp: 1705316400, + message: 'docs: update README with tag instructions', + committer: 'developer-2', + author: 'developer-2', + authorEmail: 'dev2@finos.org', + }, + ], + diff: { + content: '+++ new tag support implementation', + }, + }; + + it('correctly identifies as tag push', () => { + expect(isTagPush(fullTagPush)).to.be.true; + }); + + it('generates correct display data for table view', () => { + const repoName = getRepoFullName(fullTagPush.repo); + const refToShow = getRefToShow(fullTagPush); + const shaOrTag = getShaOrTag(fullTagPush); + const committerOrTagger = getCommitterOrTagger(fullTagPush); + const message = getMessage(fullTagPush); + + expect(repoName).to.equal('finos/git-proxy'); + expect(refToShow).to.equal('v2.1.0'); + expect(shaOrTag).to.equal('v2.1.0'); + expect(committerOrTagger).to.equal('release-manager'); + expect(message).to.include('Release version 2.1.0'); + }); + + it('generates correct GitHub URLs for tag push', () => { + const repoName = getRepoFullName(fullTagPush.repo); + const tagName = 'v2.1.0'; + + expect(getGitHubUrl.repo(repoName)).to.equal('https://github.com/finos/git-proxy'); + expect(getGitHubUrl.tag(repoName, tagName)).to.equal( + 'https://github.com/finos/git-proxy/releases/tag/v2.1.0', + ); + expect(getGitHubUrl.user('release-manager')).to.equal('https://github.com/release-manager'); + }); + + it('uses tag timestamp over commit timestamp', () => { + const displayTime = getDisplayTimestamp( + true, + fullTagPush.commitData[0], + fullTagPush.tagData[0], + ); + expect(displayTime).to.include('2024'); + expect(displayTime).to.include('Jan 15'); + }); + + it('handles search functionality properly', () => { + const searchableFields = { + repoName: getRepoFullName(fullTagPush.repo).toLowerCase(), + message: getMessage(fullTagPush).toLowerCase(), + tagName: fullTagPush.tag.replace('refs/tags/', '').toLowerCase(), + }; + + expect(searchableFields.repoName).to.include('finos'); + expect(searchableFields.message).to.include('release'); + expect(searchableFields.tagName).to.equal('v2.1.0'); + }); + }); + + describe('lightweight tag push workflow', () => { + const lightweightTagPush = { + id: 'lightweight-tag-123', + repo: 'example/repo.git', + tag: 'refs/tags/quick-fix', + user: 'hotfix-user', + commitTo: 'fedcba0987654321fedcba0987654321fedcba09', + tagData: [ + { + tagName: 'quick-fix', + type: 'lightweight', + tagger: 'hotfix-user', + message: '', + }, + ], + commitData: [ + { + commitTimestamp: 1705317300, + message: 'fix: critical security patch', + committer: 'hotfix-user', + author: 'security-team', + authorEmail: 'security@example.com', + }, + ], + }; + + it('handles lightweight tags correctly', () => { + expect(isTagPush(lightweightTagPush)).to.be.true; + expect(getRefToShow(lightweightTagPush)).to.equal('quick-fix'); + expect(getShaOrTag(lightweightTagPush)).to.equal('quick-fix'); + }); + + it('falls back to commit message for lightweight tags', () => { + const message = getMessage(lightweightTagPush); + expect(message).to.equal('fix: critical security patch'); + }); + }); + + describe('edge cases in tag push handling', () => { + it('handles tag push with missing tagData gracefully', () => { + const incompleteTagPush = { + id: 'incomplete-tag', + repo: 'test/repo.git', + tag: 'refs/tags/broken-tag', + user: 'test-user', + commitData: [], + tagData: [], // Empty tagData + }; + + expect(isTagPush(incompleteTagPush)).to.be.false; + expect(getCommitterOrTagger(incompleteTagPush)).to.equal('N/A'); + }); + + it('handles tag push with malformed tag reference', () => { + const malformedTagPush = { + id: 'malformed-tag', + repo: 'test/repo.git', + tag: 'malformed-tag-ref', // Missing refs/tags/ prefix + tagData: [ + { + tagName: 'v1.0.0', + type: 'annotated', + tagger: 'test-user', + message: 'Test release', + }, + ], + commitData: [ + { + commitTimestamp: 1705317000, + message: 'test commit', + committer: 'test-user', + }, + ], + }; + + expect(isTagPush(malformedTagPush)).to.be.true; + expect(() => getRefToShow(malformedTagPush)).to.not.throw(); + expect(getRefToShow(malformedTagPush)).to.equal('malformed-tag-ref'); + }); + + it('handles complex tag names with special characters', () => { + const complexTagPush = { + id: 'complex-tag', + repo: 'test/repo.git', + tag: 'refs/tags/v1.0.0-beta.1+build.123', + tagData: [ + { + tagName: 'v1.0.0-beta.1+build.123', + type: 'annotated', + tagger: 'ci-bot', + message: 'Pre-release build with metadata', + }, + ], + commitData: [ + { + commitTimestamp: 1705317000, + message: 'chore: prepare beta release', + committer: 'ci-bot', + }, + ], + }; + + expect(isTagPush(complexTagPush)).to.be.true; + expect(getRefToShow(complexTagPush)).to.equal('v1.0.0-beta.1+build.123'); + expect(getShaOrTag(complexTagPush)).to.equal('v1.0.0-beta.1+build.123'); + }); + }); + + describe('comparison with regular commit push', () => { + const regularCommitPush = { + id: 'commit-push-456', + repo: 'finos/git-proxy.git', + branch: 'refs/heads/feature-branch', + commitFrom: '1111111111111111111111111111111111111111', + commitTo: '2222222222222222222222222222222222222222', + commitData: [ + { + commitTimestamp: 1705317000, + message: 'feat: add new feature', + committer: 'feature-dev', + author: 'feature-dev', + authorEmail: 'dev@finos.org', + }, + ], + }; + + it('differentiates between tag and commit pushes', () => { + const tagPush = { + tag: 'refs/tags/v1.0.0', + tagData: [{ tagName: 'v1.0.0' }], + commitData: [], + }; + + expect(isTagPush(tagPush)).to.be.true; + expect(isTagPush(regularCommitPush)).to.be.false; + }); + + it('generates different URLs for tag vs commit pushes', () => { + const repoName = 'finos/git-proxy'; + + // Tag push URLs + const tagUrl = getGitHubUrl.tag(repoName, 'v1.0.0'); + expect(tagUrl).to.include('/releases/tag/'); + + // Commit push URLs + const commitUrl = getGitHubUrl.commit(repoName, '2222222222222222222222222222222222222222'); + expect(commitUrl).to.include('/commit/'); + + const branchUrl = getGitHubUrl.branch(repoName, 'feature-branch'); + expect(branchUrl).to.include('/tree/'); + }); + + it('shows different committer/author behavior', () => { + const tagPushWithUser = { + tag: 'refs/tags/v1.0.0', + tagData: [{ tagName: 'v1.0.0' }], + user: 'tag-creator', + commitData: [{ committer: 'original-committer' }], + }; + + expect(getCommitterOrTagger(tagPushWithUser)).to.equal('tag-creator'); + expect(getCommitterOrTagger(regularCommitPush)).to.equal('feature-dev'); + }); + }); +}); diff --git a/test/teeAndValidation.test.js b/test/teeAndValidation.test.js new file mode 100644 index 000000000..919dbf401 --- /dev/null +++ b/test/teeAndValidation.test.js @@ -0,0 +1,91 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const { PassThrough } = require('stream'); +const proxyquire = require('proxyquire').noCallThru(); + +const fakeRawBody = sinon.stub().resolves(Buffer.from('payload')); + +const fakeChain = { + executeChain: sinon.stub(), +}; + +const { teeAndValidate, isPackPost, handleMessage } = proxyquire('../src/proxy/routes', { + 'raw-body': fakeRawBody, + '../chain': fakeChain, +}); + +describe('teeAndValidate middleware', () => { + let req; + let res; + let next; + + beforeEach(() => { + req = new PassThrough(); + req.method = 'POST'; + req.url = '/proj/foo.git/git-upload-pack'; + + res = { + set: sinon.stub().returnsThis(), + status: sinon.stub().returnsThis(), + send: sinon.stub(), + end: sinon.stub(), + }; + next = sinon.spy(); + + fakeRawBody.resetHistory(); + fakeChain.executeChain.resetHistory(); + }); + + it('skips non-pack posts', async () => { + req.method = 'GET'; + await teeAndValidate(req, res, next); + expect(next.calledOnce).to.be.true; + expect(fakeRawBody.called).to.be.false; + }); + + it('when the chain blocks it sends a packet and does NOT call next()', async () => { + fakeChain.executeChain.resolves({ blocked: true, blockedMessage: 'denied!' }); + + req.write('abcd'); + req.end(); + + await teeAndValidate(req, res, next); + + expect(fakeRawBody.calledOnce).to.be.true; + expect(fakeChain.executeChain.calledOnce).to.be.true; + expect(next.called).to.be.false; + + expect(res.set.called).to.be.true; + expect(res.status.calledWith(200)).to.be.true; // status 200 is used to ensure error message is rendered by git client + expect(res.send.calledWith(handleMessage('denied!'))).to.be.true; + }); + + it('when the chain allow it calls next() and overrides req.pipe', async () => { + fakeChain.executeChain.resolves({ blocked: false, error: false }); + + req.write('abcd'); + req.end(); + + await teeAndValidate(req, res, next); + + expect(fakeRawBody.calledOnce).to.be.true; + expect(fakeChain.executeChain.calledOnce).to.be.true; + expect(next.calledOnce).to.be.true; + expect(typeof req.pipe).to.equal('function'); + }); +}); + +describe('isPackPost()', () => { + it('returns true for git-upload-pack POST', () => { + expect(isPackPost({ method: 'POST', url: '/a/b.git/git-upload-pack' })).to.be.true; + }); + it('returns true for git-upload-pack POST, with a gitlab style multi-level org', () => { + expect(isPackPost({ method: 'POST', url: '/a/bee/sea/dee.git/git-upload-pack' })).to.be.true; + }); + it('returns true for git-upload-pack POST, with a bare (no org) repo URL', () => { + expect(isPackPost({ method: 'POST', url: '/a.git/git-upload-pack' })).to.be.true; + }); + it('returns false for other URLs', () => { + expect(isPackPost({ method: 'POST', url: '/info/refs' })).to.be.false; + }); +}); diff --git a/test/testActiveDirectoryAuth.test.js b/test/testActiveDirectoryAuth.test.js new file mode 100644 index 000000000..29d1d3226 --- /dev/null +++ b/test/testActiveDirectoryAuth.test.js @@ -0,0 +1,151 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); +const expect = chai.expect; + +describe('ActiveDirectory auth method', () => { + let ldapStub; + let dbStub; + let passportStub; + let strategyCallback; + + const newConfig = JSON.stringify({ + authentication: [ + { + type: 'ActiveDirectory', + enabled: true, + adminGroup: 'test-admin-group', + userGroup: 'test-user-group', + domain: 'test.com', + adConfig: { + url: 'ldap://test-url', + baseDN: 'dc=test,dc=com', + searchBase: 'ou=users,dc=test,dc=com', + }, + }, + ], + }); + + beforeEach(() => { + ldapStub = { + isUserInAdGroup: sinon.stub(), + }; + + dbStub = { + updateUser: sinon.stub(), + }; + + passportStub = { + use: sinon.stub(), + serializeUser: sinon.stub(), + deserializeUser: sinon.stub(), + }; + + const fsStub = { + existsSync: sinon.stub().returns(true), + readFileSync: sinon.stub().returns(newConfig), + }; + + const config = proxyquire('../src/config', { + fs: fsStub, + }); + + // Initialize the user config after proxyquiring to load the stubbed config + config.initUserConfig(); + + const { configure } = proxyquire('../src/service/passport/activeDirectory', { + './ldaphelper': ldapStub, + '../../db': dbStub, + '../../config': config, + 'passport-activedirectory': function (options, callback) { + strategyCallback = callback; + return { + name: 'ActiveDirectory', + authenticate: () => {}, + }; + }, + }); + + configure(passportStub); + }); + + it('should authenticate a valid user and mark them as admin', async () => { + const mockReq = {}; + const mockProfile = { + _json: { + sAMAccountName: 'test-user', + mail: 'test@test.com', + userPrincipalName: 'test@test.com', + title: 'Test User', + }, + displayName: 'Test User', + }; + + ldapStub.isUserInAdGroup.onCall(0).resolves(true).onCall(1).resolves(true); + + const done = sinon.spy(); + + await strategyCallback(mockReq, mockProfile, {}, done); + + expect(done.calledOnce).to.be.true; + const [err, user] = done.firstCall.args; + expect(err).to.be.null; + expect(user).to.have.property('username', 'test-user'); + expect(user).to.have.property('email', 'test@test.com'); + expect(user).to.have.property('displayName', 'Test User'); + expect(user).to.have.property('admin', true); + expect(user).to.have.property('title', 'Test User'); + + expect(dbStub.updateUser.calledOnce).to.be.true; + }); + + it('should fail if user is not in user group', async () => { + const mockReq = {}; + const mockProfile = { + _json: { + sAMAccountName: 'bad-user', + mail: 'bad@test.com', + userPrincipalName: 'bad@test.com', + title: 'Bad User', + }, + displayName: 'Bad User', + }; + + ldapStub.isUserInAdGroup.onCall(0).resolves(false); + + const done = sinon.spy(); + + await strategyCallback(mockReq, mockProfile, {}, done); + + expect(done.calledOnce).to.be.true; + const [err, user] = done.firstCall.args; + expect(err).to.include('not a member'); + expect(user).to.be.null; + + expect(dbStub.updateUser.notCalled).to.be.true; + }); + + it('should handle LDAP errors gracefully', async () => { + const mockReq = {}; + const mockProfile = { + _json: { + sAMAccountName: 'error-user', + mail: 'err@test.com', + userPrincipalName: 'err@test.com', + title: 'Whoops', + }, + displayName: 'Error User', + }; + + ldapStub.isUserInAdGroup.rejects(new Error('LDAP error')); + + const done = sinon.spy(); + + await strategyCallback(mockReq, mockProfile, {}, done); + + expect(done.calledOnce).to.be.true; + const [err, user] = done.firstCall.args; + expect(err).to.contain('LDAP error'); + expect(user).to.be.null; + }); +}); diff --git a/test/testAuthMethods.test.js b/test/testAuthMethods.test.js new file mode 100644 index 000000000..fc7054071 --- /dev/null +++ b/test/testAuthMethods.test.js @@ -0,0 +1,67 @@ +const chai = require('chai'); +const config = require('../src/config'); +const sinon = require('sinon'); +const proxyquire = require('proxyquire'); + +chai.should(); +const expect = chai.expect; + +describe('auth methods', async () => { + it('should return a local auth method by default', async function () { + const authMethods = config.getAuthMethods(); + expect(authMethods).to.have.lengthOf(1); + expect(authMethods[0].type).to.equal('local'); + }); + + it('should return an error if no auth methods are enabled', async function () { + const newConfig = JSON.stringify({ + authentication: [ + { type: 'local', enabled: false }, + { type: 'ActiveDirectory', enabled: false }, + { type: 'openidconnect', enabled: false }, + ], + }); + + const fsStub = { + existsSync: sinon.stub().returns(true), + readFileSync: sinon.stub().returns(newConfig), + }; + + const config = proxyquire('../src/config', { + fs: fsStub, + }); + + // Initialize the user config after proxyquiring to load the stubbed config + config.initUserConfig(); + + expect(() => config.getAuthMethods()).to.throw(Error, 'No authentication method enabled'); + }); + + it('should return an array of enabled auth methods when overridden', async function () { + const newConfig = JSON.stringify({ + authentication: [ + { type: 'local', enabled: true }, + { type: 'ActiveDirectory', enabled: true }, + { type: 'openidconnect', enabled: true }, + ], + }); + + const fsStub = { + existsSync: sinon.stub().returns(true), + readFileSync: sinon.stub().returns(newConfig), + }; + + const config = proxyquire('../src/config', { + fs: fsStub, + }); + + // Initialize the user config after proxyquiring to load the stubbed config + config.initUserConfig(); + + const authMethods = config.getAuthMethods(); + expect(authMethods).to.have.lengthOf(3); + expect(authMethods[0].type).to.equal('local'); + expect(authMethods[1].type).to.equal('ActiveDirectory'); + expect(authMethods[2].type).to.equal('openidconnect'); + }); +}); diff --git a/test/testCheckUserPushPermission.test.js b/test/testCheckUserPushPermission.test.js new file mode 100644 index 000000000..dd7e9d187 --- /dev/null +++ b/test/testCheckUserPushPermission.test.js @@ -0,0 +1,62 @@ +const chai = require('chai'); +const processor = require('../src/proxy/processors/push-action/checkUserPushPermission'); +const { Action } = require('../src/proxy/actions/Action'); +const { expect } = chai; +const db = require('../src/db'); +chai.should(); + +const TEST_ORG = 'finos'; +const TEST_REPO = 'user-push-perms-test.git'; +const TEST_URL = 'https://github.com/finos/user-push-perms-test.git'; +const TEST_USERNAME_1 = 'push-perms-test'; +const TEST_EMAIL_1 = 'push-perms-test@test.com'; +const TEST_USERNAME_2 = 'push-perms-test-2'; +const TEST_EMAIL_2 = 'push-perms-test-2@test.com'; +const TEST_EMAIL_3 = 'push-perms-test-3@test.com'; + +describe('CheckUserPushPermissions...', async () => { + let testRepo = null; + + before(async function () { + // await db.deleteRepo(TEST_REPO); + // await db.deleteUser(TEST_USERNAME_1); + // await db.deleteUser(TEST_USERNAME_2); + testRepo = await db.createRepo({ + project: TEST_ORG, + name: TEST_REPO, + url: TEST_URL, + }); + await db.createUser(TEST_USERNAME_1, 'abc', TEST_EMAIL_1, TEST_USERNAME_1, false); + await db.addUserCanPush(testRepo._id, TEST_USERNAME_1); + await db.createUser(TEST_USERNAME_2, 'abc', TEST_EMAIL_2, TEST_USERNAME_2, false); + }); + + after(async function () { + await db.deleteRepo(testRepo._id); + await db.deleteUser(TEST_USERNAME_1); + await db.deleteUser(TEST_USERNAME_2); + }); + + it('A committer that is approved should be allowed to push...', async () => { + const action = new Action('1', 'type', 'method', 1, TEST_URL); + action.userEmail = TEST_EMAIL_1; + const { error } = await processor.exec(null, action); + expect(error).to.be.false; + }); + + it('A committer that is NOT approved should NOT be allowed to push...', async () => { + const action = new Action('1', 'type', 'method', 1, TEST_URL); + action.userEmail = TEST_EMAIL_2; + const { error, errorMessage } = await processor.exec(null, action); + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); + + it('An unknown committer should NOT be allowed to push...', async () => { + const action = new Action('1', 'type', 'method', 1, TEST_URL); + action.userEmail = TEST_EMAIL_3; + const { error, errorMessage } = await processor.exec(null, action); + expect(error).to.be.true; + expect(errorMessage).to.contains('Your push has been blocked'); + }); +}); diff --git a/test/testConfig.test.js b/test/testConfig.test.js new file mode 100644 index 000000000..c099dffea --- /dev/null +++ b/test/testConfig.test.js @@ -0,0 +1,489 @@ +const chai = require('chai'); +const fs = require('fs'); +const path = require('path'); +const defaultSettings = require('../proxy.config.json'); +const fixtures = 'fixtures'; + +chai.should(); +const expect = chai.expect; + +describe('default configuration', function () { + it('should use default values if no user-settings.json file exists', function () { + const config = require('../src/config'); + config.logConfiguration(); + const enabledMethods = defaultSettings.authentication.filter((method) => method.enabled); + + expect(config.getAuthMethods()).to.deep.equal(enabledMethods); + expect(config.getDatabase()).to.be.eql(defaultSettings.sink[0]); + expect(config.getTempPasswordConfig()).to.be.eql(defaultSettings.tempPassword); + expect(config.getAuthorisedList()).to.be.eql(defaultSettings.authorisedList); + expect(config.getRateLimit()).to.be.eql(defaultSettings.rateLimit); + expect(config.getTLSKeyPemPath()).to.be.eql(defaultSettings.tls.key); + expect(config.getTLSCertPemPath()).to.be.eql(defaultSettings.tls.cert); + expect(config.getTLSEnabled()).to.be.eql(defaultSettings.tls.enabled); + expect(config.getDomains()).to.be.eql(defaultSettings.domains); + expect(config.getURLShortener()).to.be.eql(defaultSettings.urlShortener); + expect(config.getContactEmail()).to.be.eql(defaultSettings.contactEmail); + expect(config.getPlugins()).to.be.eql(defaultSettings.plugins); + expect(config.getCSRFProtection()).to.be.eql(defaultSettings.csrfProtection); + expect(config.getAttestationConfig()).to.be.eql(defaultSettings.attestationConfig); + expect(config.getAPIs()).to.be.eql(defaultSettings.api); + }); + after(function () { + delete require.cache[require.resolve('../src/config')]; + }); +}); + +describe('user configuration', function () { + let tempDir; + let tempUserFile; + let oldEnv; + + beforeEach(function () { + delete require.cache[require.resolve('../src/config/env')]; + delete require.cache[require.resolve('../src/config')]; + oldEnv = { ...process.env }; + tempDir = fs.mkdtempSync('gitproxy-test'); + tempUserFile = path.join(tempDir, 'test-settings.json'); + require('../src/config/file').setConfigFile(tempUserFile); + }); + + it('should override default settings for authorisedList', function () { + const user = { + authorisedList: [{ project: 'foo', name: 'bar', url: 'https://github.com/foo/bar.git' }], + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + const enabledMethods = defaultSettings.authentication.filter((method) => method.enabled); + + expect(config.getAuthorisedList()).to.be.eql(user.authorisedList); + expect(config.getAuthMethods()).to.deep.equal(enabledMethods); + expect(config.getDatabase()).to.be.eql(defaultSettings.sink[0]); + expect(config.getTempPasswordConfig()).to.be.eql(defaultSettings.tempPassword); + }); + + it('should override default settings for authentication', function () { + const user = { + authentication: [ + { + type: 'openidconnect', + enabled: true, + oidcConfig: { + issuer: 'https://accounts.google.com', + clientID: 'test-client-id', + clientSecret: 'test-client-secret', + callbackURL: 'https://example.com/callback', + scope: 'openid email profile', + }, + }, + ], + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + const authMethods = config.getAuthMethods(); + const oidcAuth = authMethods.find((method) => method.type === 'openidconnect'); + + expect(oidcAuth).to.not.be.undefined; + expect(oidcAuth.enabled).to.be.true; + expect(config.getAuthMethods()).to.deep.include(user.authentication[0]); + expect(config.getAuthMethods()).to.not.be.eql(defaultSettings.authentication); + expect(config.getDatabase()).to.be.eql(defaultSettings.sink[0]); + expect(config.getTempPasswordConfig()).to.be.eql(defaultSettings.tempPassword); + }); + + it('should override default settings for database', function () { + const user = { sink: [{ type: 'postgres', enabled: true }] }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + const enabledMethods = defaultSettings.authentication.filter((method) => method.enabled); + + expect(config.getDatabase()).to.be.eql(user.sink[0]); + expect(config.getDatabase()).to.not.be.eql(defaultSettings.sink[0]); + expect(config.getAuthMethods()).to.deep.equal(enabledMethods); + expect(config.getTempPasswordConfig()).to.be.eql(defaultSettings.tempPassword); + }); + + it('should override default settings for SSL certificate', function () { + const user = { + tls: { + enabled: true, + key: 'my-key.pem', + cert: 'my-cert.pem', + }, + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getTLSKeyPemPath()).to.be.eql(user.tls.key); + expect(config.getTLSCertPemPath()).to.be.eql(user.tls.cert); + }); + + it('should override default settings for rate limiting', function () { + const limitConfig = { rateLimit: { windowMs: 60000, limit: 1500 } }; + fs.writeFileSync(tempUserFile, JSON.stringify(limitConfig)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getRateLimit().windowMs).to.be.eql(limitConfig.rateLimit.windowMs); + expect(config.getRateLimit().limit).to.be.eql(limitConfig.rateLimit.limit); + }); + + it('should override default settings for attestation config', function () { + const user = { + attestationConfig: { + questions: [ + { label: 'Testing Label Change', tooltip: { text: 'Testing Tooltip Change', links: [] } }, + ], + }, + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getAttestationConfig()).to.be.eql(user.attestationConfig); + }); + + it('should override default settings for url shortener', function () { + const user = { urlShortener: 'https://url-shortener.com' }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getURLShortener()).to.be.eql(user.urlShortener); + }); + + it('should override default settings for contact email', function () { + const user = { contactEmail: 'test@example.com' }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getContactEmail()).to.be.eql(user.contactEmail); + }); + + it('should override default settings for plugins', function () { + const user = { plugins: ['plugin1', 'plugin2'] }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getPlugins()).to.be.eql(user.plugins); + }); + + it('should override default settings for sslCertPemPath', function () { + const user = { + tls: { + enabled: true, + key: 'my-key.pem', + cert: 'my-cert.pem', + }, + }; + + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getTLSCertPemPath()).to.be.eql(user.tls.cert); + expect(config.getTLSKeyPemPath()).to.be.eql(user.tls.key); + expect(config.getTLSEnabled()).to.be.eql(user.tls.enabled); + }); + + it('should prioritize tls.key and tls.cert over sslKeyPemPath and sslCertPemPath', function () { + const user = { + tls: { enabled: true, key: 'good-key.pem', cert: 'good-cert.pem' }, + sslKeyPemPath: 'bad-key.pem', + sslCertPemPath: 'bad-cert.pem', + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getTLSCertPemPath()).to.be.eql(user.tls.cert); + expect(config.getTLSKeyPemPath()).to.be.eql(user.tls.key); + expect(config.getTLSEnabled()).to.be.eql(user.tls.enabled); + }); + + it('should use sslKeyPemPath and sslCertPemPath if tls.key and tls.cert are not present', function () { + const user = { sslKeyPemPath: 'good-key.pem', sslCertPemPath: 'good-cert.pem' }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getTLSCertPemPath()).to.be.eql(user.sslCertPemPath); + expect(config.getTLSKeyPemPath()).to.be.eql(user.sslKeyPemPath); + expect(config.getTLSEnabled()).to.be.eql(false); + }); + + it('should override default settings for api', function () { + const user = { api: { gitlab: { baseUrl: 'https://gitlab.com' } } }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + // Invalidate cache to force reload + const config = require('../src/config'); + config.invalidateCache(); + + expect(config.getAPIs()).to.be.eql(user.api); + }); + + it('should override default settings for cookieSecret if env var is used', function () { + fs.writeFileSync(tempUserFile, '{}'); + process.env.GIT_PROXY_COOKIE_SECRET = 'test-cookie-secret'; + + const config = require('../src/config'); + config.invalidateCache(); + expect(config.getCookieSecret()).to.equal('test-cookie-secret'); + }); + + it('should override default settings for mongo connection string if env var is used', function () { + const user = { + sink: [ + { + type: 'mongo', + enabled: true, + }, + ], + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + process.env.GIT_PROXY_MONGO_CONNECTION_STRING = 'mongodb://example.com:27017/test'; + + const config = require('../src/config'); + config.invalidateCache(); + expect(config.getDatabase().connectionString).to.equal('mongodb://example.com:27017/test'); + }); + + it('should test cache invalidation function', function () { + fs.writeFileSync(tempUserFile, '{}'); + + const config = require('../src/config'); + + // Load config first time + const firstLoad = config.getAuthorisedList(); + + // Invalidate cache and load again + config.invalidateCache(); + const secondLoad = config.getAuthorisedList(); + + expect(firstLoad).to.deep.equal(secondLoad); + }); + + it('should test reloadConfiguration function', async function () { + fs.writeFileSync(tempUserFile, '{}'); + + const config = require('../src/config'); + + // reloadConfiguration doesn't throw + await config.reloadConfiguration(); + }); + + it('should handle configuration errors during initialization', function () { + const user = { + invalidConfig: 'this should cause validation error', + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + expect(() => config.getAuthorisedList()).to.not.throw(); + }); + + it('should test all getter functions for coverage', function () { + fs.writeFileSync(tempUserFile, '{}'); + + const config = require('../src/config'); + + expect(() => config.getProxyUrl()).to.not.throw(); + expect(() => config.getCookieSecret()).to.not.throw(); + expect(() => config.getSessionMaxAgeHours()).to.not.throw(); + expect(() => config.getCommitConfig()).to.not.throw(); + expect(() => config.getPrivateOrganizations()).to.not.throw(); + expect(() => config.getUIRouteAuth()).to.not.throw(); + }); + + it('should test getAuthentication function returns first auth method', function () { + const user = { + authentication: [ + { type: 'ldap', enabled: true }, + { type: 'local', enabled: true }, + ], + }; + fs.writeFileSync(tempUserFile, JSON.stringify(user)); + + const config = require('../src/config'); + config.invalidateCache(); + + const firstAuth = config.getAuthentication(); + expect(firstAuth).to.be.an('object'); + expect(firstAuth.type).to.equal('ldap'); + }); + + afterEach(function () { + fs.rmSync(tempUserFile); + fs.rmdirSync(tempDir); + process.env = oldEnv; + delete require.cache[require.resolve('../src/config')]; + }); +}); + +describe('validate config files', function () { + const config = require('../src/config/file'); + + it('all valid config files should pass validation', function () { + const validConfigFiles = ['proxy.config.valid-1.json', 'proxy.config.valid-2.json']; + for (const testConfigFile of validConfigFiles) { + expect(config.validate(path.join(__dirname, fixtures, testConfigFile))).to.be.true; + } + }); + + it('all invalid config files should fail validation', function () { + const invalidConfigFiles = ['proxy.config.invalid-1.json', 'proxy.config.invalid-2.json']; + for (const testConfigFile of invalidConfigFiles) { + const test = function () { + config.validate(path.join(__dirname, fixtures, testConfigFile)); + }; + expect(test).to.throw(); + } + }); + + it('should validate using default config file when no path provided', function () { + const originalConfigFile = config.configFile; + const mainConfigPath = path.join(__dirname, '..', 'proxy.config.json'); + config.setConfigFile(mainConfigPath); + + try { + // default configFile + expect(() => config.validate()).to.not.throw(); + } finally { + // Restore original config file + config.setConfigFile(originalConfigFile); + } + }); + + after(function () { + delete require.cache[require.resolve('../src/config')]; + }); +}); + +describe('setConfigFile function', function () { + const config = require('../src/config/file'); + let originalConfigFile; + + beforeEach(function () { + originalConfigFile = config.configFile; + }); + + afterEach(function () { + // Restore original config file + config.setConfigFile(originalConfigFile); + }); + + it('should set the config file path', function () { + const newPath = '/tmp/new-config.json'; + config.setConfigFile(newPath); + expect(config.configFile).to.equal(newPath); + }); + + it('should allow changing config file multiple times', function () { + const firstPath = '/tmp/first-config.json'; + const secondPath = '/tmp/second-config.json'; + + config.setConfigFile(firstPath); + expect(config.configFile).to.equal(firstPath); + + config.setConfigFile(secondPath); + expect(config.configFile).to.equal(secondPath); + }); +}); + +describe('Configuration Update Handling', function () { + let tempDir; + let tempUserFile; + let oldEnv; + + beforeEach(function () { + delete require.cache[require.resolve('../src/config')]; + oldEnv = { ...process.env }; + tempDir = fs.mkdtempSync('gitproxy-test'); + tempUserFile = path.join(tempDir, 'test-settings.json'); + require('../src/config/file').configFile = tempUserFile; + }); + + it('should test ConfigLoader initialization', function () { + const configWithSources = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'file', + enabled: true, + path: tempUserFile, + }, + ], + }, + }; + + fs.writeFileSync(tempUserFile, JSON.stringify(configWithSources)); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(() => config.getAuthorisedList()).to.not.throw(); + }); + + it('should handle config loader initialization errors', function () { + const invalidConfigSources = { + configurationSources: { + enabled: true, + sources: [ + { + type: 'invalid-type', + enabled: true, + path: tempUserFile, + }, + ], + }, + }; + + fs.writeFileSync(tempUserFile, JSON.stringify(invalidConfigSources)); + + const consoleErrorSpy = require('sinon').spy(console, 'error'); + + const config = require('../src/config'); + config.invalidateCache(); + + expect(() => config.getAuthorisedList()).to.not.throw(); + + consoleErrorSpy.restore(); + }); + + afterEach(function () { + if (fs.existsSync(tempUserFile)) { + fs.rmSync(tempUserFile, { force: true }); + } + if (fs.existsSync(tempDir)) { + fs.rmdirSync(tempDir); + } + process.env = oldEnv; + delete require.cache[require.resolve('../src/config')]; + }); +}); diff --git a/test/testDb.test.js b/test/testDb.test.js new file mode 100644 index 000000000..2f32a99b0 --- /dev/null +++ b/test/testDb.test.js @@ -0,0 +1,880 @@ +// This test needs to run first +const chai = require('chai'); +const db = require('../src/db'); +const { Repo, User } = require('../src/db/types'); +const { Action } = require('../src/proxy/actions/Action'); +const { Step } = require('../src/proxy/actions/Step'); + +const { expect } = chai; + +const TEST_REPO = { + project: 'finos', + name: 'db-test-repo', + url: 'https://github.com/finos/db-test-repo.git', +}; + +const TEST_NONEXISTENT_REPO = { + project: 'MegaCorp', + name: 'repo', + url: 'https://example.com/MegaCorp/MegaGroup/repo.git', + _id: 'ABCDEFGHIJKLMNOP', +}; + +const TEST_USER = { + username: 'db-u1', + password: 'abc', + gitAccount: 'db-test-user', + email: 'db-test@test.com', + admin: true, +}; + +const TEST_PUSH = { + steps: [], + error: false, + blocked: true, + allowPush: false, + authorised: false, + canceled: true, + rejected: false, + autoApproved: false, + autoRejected: false, + commitData: [], + id: '0000000000000000000000000000000000000000__1744380874110', + type: 'push', + method: 'get', + timestamp: 1744380903338, + project: 'finos', + repoName: 'db-test-repo.git', + url: TEST_REPO.url, + repo: 'finos/db-test-repo.git', + user: 'db-test-user', + userEmail: 'db-test@test.com', + lastStep: null, + blockedMessage: + '\n\n\nGitProxy has received your push:\n\nhttp://localhost:8080/requests/0000000000000000000000000000000000000000__1744380874110\n\n\n', + _id: 'GIMEz8tU2KScZiTz', + attestation: null, +}; + +const TEST_REPO_DOT_GIT = { + project: 'finos', + name: 'db.git-test-repo', + url: 'https://github.com/finos/db.git-test-repo.git', +}; + +// the same as TEST_PUSH but with .git somewhere valid within the name +// to ensure a global replace isn't done when trimming, just to the end +const TEST_PUSH_DOT_GIT = { + ...TEST_PUSH, + repoName: 'db.git-test-repo.git', + url: 'https://github.com/finos/db.git-test-repo.git', + repo: 'finos/db.git-test-repo.git', +}; + +/** + * Clean up response data from the DB by removing an extraneous properties, + * allowing comparison with expect. + * @param {object} example Example element from which columns to retain are extracted + * @param {array | object} responses Array of responses to clean. + * @return {array} Array of cleaned up responses. + */ +const cleanResponseData = (example, responses) => { + const columns = Object.keys(example); + + if (Array.isArray(responses)) { + return responses.map((response) => { + const cleanResponse = {}; + columns.forEach((col) => { + cleanResponse[col] = response[col]; + }); + return cleanResponse; + }); + } else if (typeof responses === 'object') { + const cleanResponse = {}; + columns.forEach((col) => { + cleanResponse[col] = responses[col]; + }); + return cleanResponse; + } else { + throw new Error(`Can only clean arrays or objects, but a ${typeof responses} was passed`); + } +}; + +// Use this test as a template +describe('Database clients', async () => { + before(async function () {}); + + it('should be able to construct a repo instance', async function () { + const repo = new Repo('project', 'name', 'https://github.com/finos.git-proxy.git', null, 'id'); + expect(repo._id).to.equal('id'); + expect(repo.project).to.equal('project'); + expect(repo.name).to.equal('name'); + expect(repo.url).to.equal('https://github.com/finos.git-proxy.git'); + expect(repo.users).to.deep.equals({ canPush: [], canAuthorise: [] }); + + const repo2 = new Repo( + 'project', + 'name', + 'https://github.com/finos.git-proxy.git', + { canPush: ['bill'], canAuthorise: ['ben'] }, + 'id', + ); + expect(repo2.users).to.deep.equals({ canPush: ['bill'], canAuthorise: ['ben'] }); + }); + + it('should be able to construct a user instance', async function () { + const user = new User( + 'username', + 'password', + 'gitAccount', + 'email@domain.com', + true, + null, + 'id', + ); + expect(user.username).to.equal('username'); + expect(user.username).to.equal('username'); + expect(user.gitAccount).to.equal('gitAccount'); + expect(user.email).to.equal('email@domain.com'); + expect(user.admin).to.equal(true); + expect(user.oidcId).to.be.null; + expect(user._id).to.equal('id'); + + const user2 = new User( + 'username', + 'password', + 'gitAccount', + 'email@domain.com', + false, + 'oidcId', + 'id', + ); + expect(user2.admin).to.equal(false); + expect(user2.oidcId).to.equal('oidcId'); + }); + + it('should be able to construct a valid action instance', async function () { + const action = new Action( + 'id', + 'type', + 'method', + Date.now(), + 'https://github.com/finos/git-proxy.git', + ); + expect(action.project).to.equal('finos'); + expect(action.repoName).to.equal('git-proxy.git'); + }); + + it('should be able to block an action by adding a blocked step', async function () { + const action = new Action( + 'id', + 'type', + 'method', + Date.now(), + 'https://github.com/finos.git-proxy.git', + ); + const step = new Step('stepName', false, null, false, null); + step.setAsyncBlock('blockedMessage'); + action.addStep(step); + expect(action.blocked).to.be.true; + expect(action.blockedMessage).to.equal('blockedMessage'); + expect(action.getLastStep()).to.deep.equals(step); + expect(action.continue()).to.be.false; + }); + + it('should be able to error an action by adding a step with an error', async function () { + const action = new Action( + 'id', + 'type', + 'method', + Date.now(), + 'https://github.com/finos.git-proxy.git', + ); + const step = new Step('stepName', true, 'errorMessage', false, null); + action.addStep(step); + expect(action.error).to.be.true; + expect(action.errorMessage).to.equal('errorMessage'); + expect(action.getLastStep()).to.deep.equals(step); + expect(action.continue()).to.be.false; + }); + + it('should be able to create a repo', async function () { + await db.createRepo(TEST_REPO); + const repos = await db.getRepos(); + const cleanRepos = cleanResponseData(TEST_REPO, repos); + expect(cleanRepos).to.deep.include(TEST_REPO); + }); + + it('should be able to filter repos', async function () { + // uppercase the filter value to confirm db client is lowercasing inputs + const repos = await db.getRepos({ name: TEST_REPO.name.toUpperCase() }); + const cleanRepos = cleanResponseData(TEST_REPO, repos); + expect(cleanRepos[0]).to.eql(TEST_REPO); + + const repos2 = await db.getRepos({ url: TEST_REPO.url }); + const cleanRepos2 = cleanResponseData(TEST_REPO, repos2); + expect(cleanRepos2[0]).to.eql(TEST_REPO); + + // passing an empty query should produce same results as no query + const repos3 = await db.getRepos(); + const repos4 = await db.getRepos({}); + expect(repos3).to.have.same.deep.members(repos4); + }); + + it('should be able to retrieve a repo by url', async function () { + const repo = await db.getRepoByUrl(TEST_REPO.url); + const cleanRepo = cleanResponseData(TEST_REPO, repo); + expect(cleanRepo).to.eql(TEST_REPO); + }); + + it('should be able to retrieve a repo by id', async function () { + // _id is autogenerated by the DB so we need to retrieve it before we can use it + const repo = await db.getRepoByUrl(TEST_REPO.url); + const repoById = await db.getRepoById(repo._id); + const cleanRepo = cleanResponseData(TEST_REPO, repoById); + expect(cleanRepo).to.eql(TEST_REPO); + }); + + it('should be able to delete a repo', async function () { + // _id is autogenerated by the DB so we need to retrieve it before we can use it + const repo = await db.getRepoByUrl(TEST_REPO.url); + await db.deleteRepo(repo._id); + const repos = await db.getRepos(); + const cleanRepos = cleanResponseData(TEST_REPO, repos); + expect(cleanRepos).to.not.deep.include(TEST_REPO); + }); + + it('should be able to create a repo with a blank project', async function () { + // test with a null value + let threwError = false; + let testRepo = { + project: null, + name: TEST_REPO.name, + url: TEST_REPO.url, + }; + try { + const repo = await db.createRepo(testRepo); + await db.deleteRepo(repo._id, true); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.false; + + // test with an empty string + threwError = false; + testRepo = { + project: '', + name: TEST_REPO.name, + url: TEST_REPO.url, + }; + try { + const repo = await db.createRepo(testRepo); + await db.deleteRepo(repo._id, true); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.false; + + // test with an undefined property + threwError = false; + testRepo = { + name: TEST_REPO.name, + url: TEST_REPO.url, + }; + try { + const repo = await db.createRepo(testRepo); + await db.deleteRepo(repo._id, true); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.false; + }); + + it('should NOT be able to create a repo with blank name or url', async function () { + // null name + let threwError = false; + let testRepo = { + project: TEST_REPO.project, + name: null, + url: TEST_REPO.url, + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + + // blank name + threwError = false; + testRepo = { + project: TEST_REPO.project, + name: '', + url: TEST_REPO.url, + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + + // undefined name + threwError = false; + testRepo = { + project: TEST_REPO.project, + url: TEST_REPO.url, + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + + // null url + testRepo = { + project: TEST_REPO.project, + name: TEST_REPO.name, + url: null, + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + + // blank url + testRepo = { + project: TEST_REPO.project, + name: TEST_REPO.name, + url: '', + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + + // undefined url + testRepo = { + project: TEST_REPO.project, + name: TEST_REPO.name, + }; + try { + await db.createRepo(testRepo); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should throw an error when creating a user and username or email is not set', async function () { + // null username + let threwError = false; + let message = null; + try { + await db.createUser( + null, + TEST_USER.password, + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal('username cannot be empty'); + + // blank username + threwError = false; + try { + await db.createUser( + '', + TEST_USER.password, + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal('username cannot be empty'); + + // null email + threwError = false; + try { + await db.createUser( + TEST_USER.username, + TEST_USER.password, + null, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal('email cannot be empty'); + + // blank username + threwError = false; + try { + await db.createUser( + TEST_USER.username, + TEST_USER.password, + '', + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal('email cannot be empty'); + }); + + it('should be able to create a user', async function () { + await db.createUser( + TEST_USER.username, + TEST_USER.password, + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + const users = await db.getUsers(); + console.log('TEST USER:', JSON.stringify(TEST_USER, null, 2)); + console.log('USERS:', JSON.stringify(users, null, 2)); + // remove password as it will have been hashed + + const { password: _, ...TEST_USER_CLEAN } = TEST_USER; + const cleanUsers = cleanResponseData(TEST_USER_CLEAN, users); + expect(cleanUsers).to.deep.include(TEST_USER_CLEAN); + }); + + it('should throw an error when creating a duplicate username', async function () { + let threwError = false; + let message = null; + try { + await db.createUser( + TEST_USER.username, + TEST_USER.password, + 'prefix_' + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal(`user ${TEST_USER.username} already exists`); + }); + + it('should throw an error when creating a user with a duplicate email', async function () { + let threwError = false; + let message = null; + try { + await db.createUser( + 'prefix_' + TEST_USER.username, + TEST_USER.password, + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + } catch (e) { + threwError = true; + message = e.message; + } + expect(threwError).to.be.true; + expect(message).to.equal(`A user with email ${TEST_USER.email} already exists`); + }); + + it('should be able to find a user', async function () { + const user = await db.findUser(TEST_USER.username); + + const { password: _, ...TEST_USER_CLEAN } = TEST_USER; + + const { password: _2, _id: _3, ...DB_USER_CLEAN } = user; + + expect(DB_USER_CLEAN).to.eql(TEST_USER_CLEAN); + }); + + it('should be able to filter getUsers', async function () { + // uppercase the filter value to confirm db client is lowercasing inputs + const users = await db.getUsers({ username: TEST_USER.username.toUpperCase() }); + + const { password: _, ...TEST_USER_CLEAN } = TEST_USER; + const cleanUsers = cleanResponseData(TEST_USER_CLEAN, users); + expect(cleanUsers[0]).to.eql(TEST_USER_CLEAN); + + const users2 = await db.getUsers({ email: TEST_USER.email.toUpperCase() }); + const cleanUsers2 = cleanResponseData(TEST_USER_CLEAN, users2); + expect(cleanUsers2[0]).to.eql(TEST_USER_CLEAN); + }); + + it('should be able to delete a user', async function () { + await db.deleteUser(TEST_USER.username); + const users = await db.getUsers(); + const cleanUsers = cleanResponseData(TEST_USER, users); + expect(cleanUsers).to.not.deep.include(TEST_USER); + }); + + it('should be able to update a user', async function () { + await db.createUser( + TEST_USER.username, + TEST_USER.password, + TEST_USER.email, + TEST_USER.gitAccount, + TEST_USER.admin, + ); + + // has fewer properties to prove that records are merged + const updateToApply = { + username: TEST_USER.username, + gitAccount: 'updatedGitAccount', + admin: false, + }; + + const updatedUser = { + // remove password as it will have been hashed + username: TEST_USER.username, + email: TEST_USER.email, + gitAccount: 'updatedGitAccount', + admin: false, + }; + await db.updateUser(updateToApply); + + const users = await db.getUsers(); + const cleanUsers = cleanResponseData(updatedUser, users); + expect(cleanUsers).to.deep.include(updatedUser); + await db.deleteUser(TEST_USER.username); + }); + + it('should be able to create a user via updateUser', async function () { + await db.updateUser(TEST_USER); + + const users = await db.getUsers(); + // remove password as it will have been hashed + + const { password: _, ...TEST_USER_CLEAN } = TEST_USER; + const cleanUsers = cleanResponseData(TEST_USER_CLEAN, users); + expect(cleanUsers).to.deep.include(TEST_USER_CLEAN); + // leave user in place for next test(s) + }); + + it('should throw an error when authorising a user to push on non-existent repo', async function () { + let threwError = false; + try { + // uppercase the filter value to confirm db client is lowercasing inputs + await db.addUserCanPush(TEST_NONEXISTENT_REPO._id, TEST_USER.username); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should be able to authorise a user to push and confirm that they can', async function () { + // first create the repo and check that user is not allowed to push + await db.createRepo(TEST_REPO); + + let allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username); + expect(allowed).to.be.false; + + const repo = await db.getRepoByUrl(TEST_REPO.url); + + // uppercase the filter value to confirm db client is lowercasing inputs + await db.addUserCanPush(repo._id, TEST_USER.username.toUpperCase()); + + // repeat, should not throw an error if already set + await db.addUserCanPush(repo._id, TEST_USER.username.toUpperCase()); + + // confirm the setting exists + allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username); + expect(allowed).to.be.true; + + // confirm that casing doesn't matter + allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username.toUpperCase()); + expect(allowed).to.be.true; + }); + + it('should throw an error when de-authorising a user to push on non-existent repo', async function () { + let threwError = false; + try { + await db.removeUserCanPush(TEST_NONEXISTENT_REPO._id, TEST_USER.username); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it("should be able to de-authorise a user to push and confirm that they can't", async function () { + let threwError = false; + try { + // repo should already exist with user able to push after previous test + let allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username); + expect(allowed).to.be.true; + + const repo = await db.getRepoByUrl(TEST_REPO.url); + + // uppercase the filter value to confirm db client is lowercasing inputs + await db.removeUserCanPush(repo._id, TEST_USER.username.toUpperCase()); + + // repeat, should not throw an error if already unset + await db.removeUserCanPush(repo._id, TEST_USER.username.toUpperCase()); + + // confirm the setting exists + allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username); + expect(allowed).to.be.false; + + // confirm that casing doesn't matter + allowed = await db.isUserPushAllowed(TEST_REPO.url, TEST_USER.username.toUpperCase()); + expect(allowed).to.be.false; + } catch (e) { + console.error('Error thrown at: ' + e.stack, e); + threwError = true; + } + expect(threwError).to.be.false; + }); + + it('should throw an error when authorising a user to authorise on non-existent repo', async function () { + let threwError = false; + try { + await db.addUserCanAuthorise(TEST_NONEXISTENT_REPO._id, TEST_USER.username); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should throw an error when de-authorising a user to push on non-existent repo', async function () { + let threwError = false; + try { + // uppercase the filter value to confirm db client is lowercasing inputs + await db.removeUserCanAuthorise(TEST_NONEXISTENT_REPO._id, TEST_USER.username); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should NOT throw an error when checking whether a user can push on non-existent repo', async function () { + const allowed = await db.isUserPushAllowed(TEST_NONEXISTENT_REPO.url, TEST_USER.username); + expect(allowed).to.be.false; + }); + + it('should be able to create a push', async function () { + await db.writeAudit(TEST_PUSH); + const pushes = await db.getPushes(); + const cleanPushes = cleanResponseData(TEST_PUSH, pushes); + expect(cleanPushes).to.deep.include(TEST_PUSH); + }); + + it('should be able to delete a push', async function () { + await db.deletePush(TEST_PUSH.id); + const pushes = await db.getPushes(); + const cleanPushes = cleanResponseData(TEST_PUSH, pushes); + expect(cleanPushes).to.not.deep.include(TEST_PUSH); + }); + + it('should be able to authorise a push', async function () { + // first create the push + await db.writeAudit(TEST_PUSH); + let threwError = false; + try { + const msg = await db.authorise(TEST_PUSH.id); + expect(msg).to.have.property('message'); + } catch (e) { + console.error('Error: ', e); + threwError = true; + } + expect(threwError).to.be.false; + // clean up + await db.deletePush(TEST_PUSH.id); + }); + + it('should throw an error when authorising a non-existent a push', async function () { + let threwError = false; + try { + await db.authorise(TEST_PUSH.id); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should be able to reject a push', async function () { + // first create the push + await db.writeAudit(TEST_PUSH); + let threwError = false; + try { + const msg = await db.reject(TEST_PUSH.id); + expect(msg).to.have.property('message'); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.false; + // clean up + await db.deletePush(TEST_PUSH.id); + }); + + it('should throw an error when rejecting a non-existent a push', async function () { + let threwError = false; + try { + await db.reject(TEST_PUSH.id); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should be able to cancel a push', async function () { + // first create the push + await db.writeAudit(TEST_PUSH); + let threwError = false; + try { + const msg = await db.cancel(TEST_PUSH.id); + expect(msg).to.have.property('message'); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.false; + // clean up + await db.deletePush(TEST_PUSH.id); + }); + + it('should throw an error when cancelling a non-existent a push', async function () { + let threwError = false; + try { + await db.cancel(TEST_PUSH.id); + } catch (e) { + threwError = true; + } + expect(threwError).to.be.true; + }); + + it('should be able to check if a user can cancel push', async function () { + let threwError = false; + try { + const repo = await db.getRepoByUrl(TEST_REPO.url); + + // push does not exist yet, should return false + let allowed = await db.canUserCancelPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + + // create the push - user should already exist and not authorised to push + await db.writeAudit(TEST_PUSH); + allowed = await db.canUserCancelPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + + // authorise user and recheck + await db.addUserCanPush(repo._id, TEST_USER.username); + allowed = await db.canUserCancelPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.true; + + // deauthorise user and recheck + await db.removeUserCanPush(repo._id, TEST_USER.username); + allowed = await db.canUserCancelPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + console.error(e); + threwError = true; + } + expect(threwError).to.be.false; + // clean up + await db.deletePush(TEST_PUSH.id); + }); + + it('should be able to check if a user can approve/reject push', async function () { + let allowed = undefined; + + try { + // push does not exist yet, should return false + allowed = await db.canUserApproveRejectPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + expect.fail(e); + } + + try { + // create the push - user should already exist and not authorised to push + await db.writeAudit(TEST_PUSH); + allowed = await db.canUserApproveRejectPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + expect.fail(e); + } + + try { + const repo = await db.getRepoByUrl(TEST_REPO.url); + + // authorise user and recheck + await db.addUserCanAuthorise(repo._id, TEST_USER.username); + allowed = await db.canUserApproveRejectPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.true; + + // deauthorise user and recheck + await db.removeUserCanAuthorise(repo._id, TEST_USER.username); + allowed = await db.canUserApproveRejectPush(TEST_PUSH.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + expect.fail(e); + } + + // clean up + await db.deletePush(TEST_PUSH.id); + }); + + it('should be able to check if a user can approve/reject push including .git within the repo name', async function () { + let allowed = undefined; + const repo = await db.createRepo(TEST_REPO_DOT_GIT); + try { + // push does not exist yet, should return false + allowed = await db.canUserApproveRejectPush(TEST_PUSH_DOT_GIT.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + expect.fail(e); + } + + try { + // create the push - user should already exist and not authorised to push + await db.writeAudit(TEST_PUSH_DOT_GIT); + allowed = await db.canUserApproveRejectPush(TEST_PUSH_DOT_GIT.id, TEST_USER.username); + expect(allowed).to.be.false; + } catch (e) { + expect.fail(e); + } + + try { + // authorise user and recheck + await db.addUserCanAuthorise(repo._id, TEST_USER.username); + allowed = await db.canUserApproveRejectPush(TEST_PUSH_DOT_GIT.id, TEST_USER.username); + expect(allowed).to.be.true; + } catch (e) { + expect.fail(e); + } + + // clean up + await db.deletePush(TEST_PUSH_DOT_GIT.id); + await db.removeUserCanAuthorise(repo._id, TEST_USER.username); + }); + + after(async function () { + // _id is autogenerated by the DB so we need to retrieve it before we can use it + const repo = await db.getRepoByUrl(TEST_REPO.url); + await db.deleteRepo(repo._id, true); + const repoDotGit = await db.getRepoByUrl(TEST_REPO_DOT_GIT.url); + await db.deleteRepo(repoDotGit._id); + await db.deleteUser(TEST_USER.username); + await db.deletePush(TEST_PUSH.id); + await db.deletePush(TEST_PUSH_DOT_GIT.id); + }); +}); diff --git a/test/testJwtAuthHandler.test.js b/test/testJwtAuthHandler.test.js new file mode 100644 index 000000000..9c7ada52e --- /dev/null +++ b/test/testJwtAuthHandler.test.js @@ -0,0 +1,210 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const axios = require('axios'); +const jwt = require('jsonwebtoken'); +const { jwkToBuffer } = require('jwk-to-pem'); + +const { assignRoles, getJwks, validateJwt } = require('../src/service/passport/jwtUtils'); +const jwtAuthHandler = require('../src/service/passport/jwtAuthHandler'); + +describe('getJwks', () => { + it('should fetch JWKS keys from authority', async () => { + const jwksResponse = { keys: [{ kid: 'test-key', kty: 'RSA', n: 'abc', e: 'AQAB' }] }; + + const getStub = sinon.stub(axios, 'get'); + getStub.onFirstCall().resolves({ data: { jwks_uri: 'https://mock.com/jwks' } }); + getStub.onSecondCall().resolves({ data: jwksResponse }); + + const keys = await getJwks('https://mock.com'); + expect(keys).to.deep.equal(jwksResponse.keys); + + getStub.restore(); + }); + + it('should throw error if fetch fails', async () => { + const stub = sinon.stub(axios, 'get').rejects(new Error('Network fail')); + try { + await getJwks('https://fail.com'); + } catch (err) { + expect(err.message).to.equal('Failed to fetch JWKS'); + } + stub.restore(); + }); +}); + +describe('validateJwt', () => { + let decodeStub; + let verifyStub; + let pemStub; + let getJwksStub; + + beforeEach(() => { + const jwksResponse = { keys: [{ kid: 'test-key', kty: 'RSA', n: 'abc', e: 'AQAB' }] }; + const getStub = sinon.stub(axios, 'get'); + getStub.onFirstCall().resolves({ data: { jwks_uri: 'https://mock.com/jwks' } }); + getStub.onSecondCall().resolves({ data: jwksResponse }); + + getJwksStub = sinon.stub().resolves(jwksResponse.keys); + decodeStub = sinon.stub(jwt, 'decode'); + verifyStub = sinon.stub(jwt, 'verify'); + pemStub = sinon.stub(jwkToBuffer); + + pemStub.returns('fake-public-key'); + getJwksStub.returns(jwksResponse.keys); + }); + + afterEach(() => sinon.restore()); + + it('should validate a correct JWT', async () => { + const mockJwk = { kid: '123', kty: 'RSA', n: 'abc', e: 'AQAB' }; + const mockPem = 'fake-public-key'; + + decodeStub.returns({ header: { kid: '123' } }); + getJwksStub.resolves([mockJwk]); + pemStub.returns(mockPem); + verifyStub.returns({ azp: 'client-id', sub: 'user123' }); + + const { verifiedPayload } = await validateJwt( + 'fake.token.here', + 'https://issuer.com', + 'client-id', + 'client-id', + getJwksStub, + ); + expect(verifiedPayload.sub).to.equal('user123'); + }); + + it('should return error if JWT invalid', async () => { + decodeStub.returns(null); // Simulate broken token + + const { error } = await validateJwt( + 'bad.token', + 'https://issuer.com', + 'client-id', + 'client-id', + getJwksStub, + ); + expect(error).to.include('Invalid JWT'); + }); +}); + +describe('assignRoles', () => { + it('should assign admin role based on claim', () => { + const user = { username: 'admin-user' }; + const payload = { admin: 'admin' }; + const mapping = { admin: { admin: 'admin' } }; + + assignRoles(mapping, payload, user); + expect(user.admin).to.be.true; + }); + + it('should assign multiple roles based on claims', () => { + const user = { username: 'multi-role-user' }; + const payload = { 'custom-claim-admin': 'custom-value', editor: 'editor' }; + const mapping = { + admin: { 'custom-claim-admin': 'custom-value' }, + editor: { editor: 'editor' }, + }; + + assignRoles(mapping, payload, user); + expect(user.admin).to.be.true; + expect(user.editor).to.be.true; + }); + + it('should not assign role if claim mismatch', () => { + const user = { username: 'basic-user' }; + const payload = { admin: 'nope' }; + const mapping = { admin: { admin: 'admin' } }; + + assignRoles(mapping, payload, user); + expect(user.admin).to.be.undefined; + }); + + it('should not assign role if no mapping provided', () => { + const user = { username: 'no-role-user' }; + const payload = { admin: 'admin' }; + + assignRoles(null, payload, user); + expect(user.admin).to.be.undefined; + }); +}); + +describe('jwtAuthHandler', () => { + let req; + let res; + let next; + let jwtConfig; + let validVerifyResponse; + + beforeEach(() => { + req = { header: sinon.stub(), isAuthenticated: sinon.stub(), user: {} }; + res = { status: sinon.stub().returnsThis(), send: sinon.stub() }; + next = sinon.stub(); + + jwtConfig = { + clientID: 'client-id', + authorityURL: 'https://accounts.google.com', + expectedAudience: 'expected-audience', + roleMapping: { admin: { admin: 'admin' } }, + }; + + validVerifyResponse = { + header: { kid: '123' }, + azp: 'client-id', + sub: 'user123', + admin: 'admin', + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should call next if user is authenticated', async () => { + req.isAuthenticated.returns(true); + await jwtAuthHandler()(req, res, next); + expect(next.calledOnce).to.be.true; + }); + + it('should return 401 if no token provided', async () => { + req.header.returns(null); + await jwtAuthHandler(jwtConfig)(req, res, next); + + expect(res.status.calledWith(401)).to.be.true; + expect(res.send.calledWith('No token provided\n')).to.be.true; + }); + + it('should return 500 if authorityURL not configured', async () => { + req.header.returns('Bearer fake-token'); + jwtConfig.authorityURL = null; + sinon.stub(jwt, 'verify').returns(validVerifyResponse); + + await jwtAuthHandler(jwtConfig)(req, res, next); + + expect(res.status.calledWith(500)).to.be.true; + expect(res.send.calledWith({ message: 'JWT handler: authority URL is not configured\n' })).to.be + .true; + }); + + it('should return 500 if clientID not configured', async () => { + req.header.returns('Bearer fake-token'); + jwtConfig.clientID = null; + sinon.stub(jwt, 'verify').returns(validVerifyResponse); + + await jwtAuthHandler(jwtConfig)(req, res, next); + + expect(res.status.calledWith(500)).to.be.true; + expect(res.send.calledWith({ message: 'JWT handler: client ID is not configured\n' })).to.be + .true; + }); + + it('should return 401 if JWT validation fails', async () => { + req.header.returns('Bearer fake-token'); + sinon.stub(jwt, 'verify').throws(new Error('Invalid token')); + + await jwtAuthHandler(jwtConfig)(req, res, next); + + expect(res.status.calledWith(401)).to.be.true; + expect(res.send.calledWithMatch(/JWT validation failed:/)).to.be.true; + }); +}); diff --git a/test/testLogin.test.js b/test/testLogin.test.js new file mode 100644 index 000000000..3000f8b74 --- /dev/null +++ b/test/testLogin.test.js @@ -0,0 +1,291 @@ +// Import the dependencies for testing +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const db = require('../src/db'); +const service = require('../src/service'); + +chai.use(chaiHttp); +chai.should(); +const expect = chai.expect; + +describe('auth', async () => { + let app; + let cookie; + + before(async function () { + app = await service.start(); + await db.deleteUser('login-test-user'); + }); + + describe('test login / logout', async function () { + // Test to get all students record + it('should get 401 not logged in', async function () { + const res = await chai.request(app).get('/api/auth/profile'); + + res.should.have.status(401); + }); + + it('should be able to login', async function () { + const res = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + + expect(res).to.have.cookie('connect.sid'); + res.should.have.status(200); + + // Get the connect cooie + res.headers['set-cookie'].forEach((x) => { + if (x.startsWith('connect')) { + cookie = x.split(';')[0]; + } + }); + }); + + it('should now be able to access the user login metadata', async function () { + const res = await chai.request(app).get('/api/auth/me').set('Cookie', `${cookie}`); + res.should.have.status(200); + }); + + it('should now be able to access the profile', async function () { + const res = await chai.request(app).get('/api/auth/profile').set('Cookie', `${cookie}`); + res.should.have.status(200); + }); + + it('should be able to set the git account', async function () { + console.log(`cookie: ${cookie}`); + const res = await chai + .request(app) + .post('/api/auth/gitAccount') + .set('Cookie', `${cookie}`) + .send({ + username: 'admin', + gitAccount: 'new-account', + }); + res.should.have.status(200); + }); + + it('should throw an error if the username is not provided when setting the git account', async function () { + const res = await chai + .request(app) + .post('/api/auth/gitAccount') + .set('Cookie', `${cookie}`) + .send({ + gitAccount: 'new-account', + }); + console.log(`res: ${JSON.stringify(res)}`); + res.should.have.status(400); + }); + + it('should now be able to logout', async function () { + const res = await chai.request(app).post('/api/auth/logout').set('Cookie', `${cookie}`); + res.should.have.status(200); + }); + + it('test cannot access profile page', async function () { + const res = await chai.request(app).get('/api/auth/profile').set('Cookie', `${cookie}`); + + res.should.have.status(401); + }); + + it('should fail to login with invalid username', async function () { + const res = await chai.request(app).post('/api/auth/login').send({ + username: 'invalid', + password: 'admin', + }); + res.should.have.status(401); + }); + + it('should fail to login with invalid password', async function () { + const res = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'invalid', + }); + res.should.have.status(401); + }); + + it('should fail to set the git account if the user is not logged in', async function () { + const res = await chai.request(app).post('/api/auth/gitAccount').send({ + username: 'admin', + gitAccount: 'new-account', + }); + res.should.have.status(401); + }); + + it('should fail to get the current user metadata if not logged in', async function () { + const res = await chai.request(app).get('/api/auth/me'); + res.should.have.status(401); + }); + + it('should fail to login with invalid credentials', async function () { + const res = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'invalid', + }); + res.should.have.status(401); + }); + }); + + describe('test create user', async function () { + beforeEach(async function () { + await db.deleteUser('newuser'); + await db.deleteUser('nonadmin'); + }); + + it('should fail to create user when not authenticated', async function () { + const res = await chai.request(app).post('/api/auth/create-user').send({ + username: 'newuser', + password: 'newpass', + email: 'new@email.com', + gitAccount: 'newgit', + }); + + res.should.have.status(401); + res.body.should.have + .property('message') + .eql('You are not authorized to perform this action...'); + }); + + it('should fail to create user when not admin', async function () { + await db.deleteUser('nonadmin'); + await db.createUser('nonadmin', 'nonadmin', 'nonadmin@test.com', 'nonadmin', false); + + // First login as non-admin user + const loginRes = await chai.request(app).post('/api/auth/login').send({ + username: 'nonadmin', + password: 'nonadmin', + }); + + loginRes.should.have.status(200); + + let nonAdminCookie; + // Get the connect cooie + loginRes.headers['set-cookie'].forEach((x) => { + if (x.startsWith('connect')) { + nonAdminCookie = x.split(';')[0]; + } + }); + + console.log('nonAdminCookie', nonAdminCookie); + + const res = await chai + .request(app) + .post('/api/auth/create-user') + .set('Cookie', nonAdminCookie) + .send({ + username: 'newuser', + password: 'newpass', + email: 'new@email.com', + gitAccount: 'newgit', + }); + + res.should.have.status(401); + res.body.should.have + .property('message') + .eql('You are not authorized to perform this action...'); + }); + + it('should fail to create user with missing required fields', async function () { + // First login as admin + const loginRes = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + + const adminCookie = loginRes.headers['set-cookie'][0].split(';')[0]; + + const res = await chai + .request(app) + .post('/api/auth/create-user') + .set('Cookie', adminCookie) + .send({ + username: 'newuser', + // missing password + email: 'new@email.com', + gitAccount: 'newgit', + }); + + res.should.have.status(400); + res.body.should.have + .property('message') + .eql('Missing required fields: username, password, email, and gitAccount are required'); + }); + + it('should successfully create a new user', async function () { + // First login as admin + const loginRes = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + + const adminCookie = loginRes.headers['set-cookie'][0].split(';')[0]; + + const res = await chai + .request(app) + .post('/api/auth/create-user') + .set('Cookie', adminCookie) + .send({ + username: 'newuser', + password: 'newpass', + email: 'new@email.com', + gitAccount: 'newgit', + admin: false, + }); + + res.should.have.status(201); + res.body.should.have.property('message').eql('User created successfully'); + res.body.should.have.property('username').eql('newuser'); + + // Verify we can login with the new user + const newUserLoginRes = await chai.request(app).post('/api/auth/login').send({ + username: 'newuser', + password: 'newpass', + }); + + newUserLoginRes.should.have.status(200); + }); + + it('should fail to create user when username already exists', async function () { + // First login as admin + const loginRes = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + + const adminCookie = loginRes.headers['set-cookie'][0].split(';')[0]; + + const res = await chai + .request(app) + .post('/api/auth/create-user') + .set('Cookie', adminCookie) + .send({ + username: 'newuser', + password: 'newpass', + email: 'new@email.com', + gitAccount: 'newgit', + admin: false, + }); + + res.should.have.status(201); + + // Verify we can login with the new user + const failCreateRes = await chai + .request(app) + .post('/api/auth/create-user') + .set('Cookie', adminCookie) + .send({ + username: 'newuser', + password: 'newpass', + email: 'new@email.com', + gitAccount: 'newgit', + admin: false, + }); + + failCreateRes.should.have.status(400); + }); + }); + + after(async function () { + await service.httpServer.close(); + }); +}); diff --git a/test/testParseAction.test.js b/test/testParseAction.test.js new file mode 100644 index 000000000..02686fc1d --- /dev/null +++ b/test/testParseAction.test.js @@ -0,0 +1,83 @@ +// Import the dependencies for testing +const chai = require('chai'); +chai.should(); +const expect = chai.expect; +const preprocessor = require('../src/proxy/processors/pre-processor/parseAction'); +const db = require('../src/db'); +let testRepo = null; + +const TEST_REPO = { + url: 'https://github.com/finos/git-proxy.git', + name: 'git-proxy', + project: 'finos', +}; + +describe('Pre-processor: parseAction', async () => { + before(async function () { + // make sure the test repo exists as the presence of the repo makes a difference to handling of urls + testRepo = await db.getRepoByUrl(TEST_REPO.url); + if (!testRepo) { + testRepo = await db.createRepo(TEST_REPO); + } + }); + after(async function () { + // clean up test DB + await db.deleteRepo(testRepo._id); + }); + + it('should be able to parse a pull request into an action', async function () { + const req = { + originalUrl: '/github.com/finos/git-proxy.git/git-upload-pack', + method: 'GET', + headers: {}, + }; + + const action = await preprocessor.exec(req); + expect(action.timestamp).is.greaterThan(0); + expect(action.id).to.not.be.false; + expect(action.type).to.equal('pull'); + expect(action.url).to.equal('https://github.com/finos/git-proxy.git'); + }); + + it('should be able to parse a pull request with a legacy path into an action', async function () { + const req = { + originalUrl: '/finos/git-proxy.git/git-upload-pack', + method: 'GET', + headers: {}, + }; + + const action = await preprocessor.exec(req); + expect(action.timestamp).is.greaterThan(0); + expect(action.id).to.not.be.false; + expect(action.type).to.equal('pull'); + expect(action.url).to.equal('https://github.com/finos/git-proxy.git'); + }); + + it('should be able to parse a push request into an action', async function () { + const req = { + originalUrl: '/github.com/finos/git-proxy.git/git-receive-pack', + method: 'POST', + headers: { 'content-type': 'application/x-git-receive-pack-request' }, + }; + + const action = await preprocessor.exec(req); + expect(action.timestamp).is.greaterThan(0); + expect(action.id).to.not.be.false; + expect(action.type).to.equal('push'); + expect(action.url).to.equal('https://github.com/finos/git-proxy.git'); + }); + + it('should be able to parse a push request with a legacy path into an action', async function () { + const req = { + originalUrl: '/finos/git-proxy.git/git-receive-pack', + method: 'POST', + headers: { 'content-type': 'application/x-git-receive-pack-request' }, + }; + + const action = await preprocessor.exec(req); + expect(action.timestamp).is.greaterThan(0); + expect(action.id).to.not.be.false; + expect(action.type).to.equal('push'); + expect(action.url).to.equal('https://github.com/finos/git-proxy.git'); + }); +}); diff --git a/test/testParsePush.test.js b/test/testParsePush.test.js new file mode 100644 index 000000000..334aa005e --- /dev/null +++ b/test/testParsePush.test.js @@ -0,0 +1,1065 @@ +const { expect } = require('chai'); +const sinon = require('sinon'); +const zlib = require('zlib'); + +const { + exec, + getCommitData, + getPackMeta, + parsePacketLines, + parseTag, + unpack, +} = require('../src/proxy/processors/push-action/parsePush'); + +import { EMPTY_COMMIT_HASH, FLUSH_PACKET, PACK_SIGNATURE } from '../src/proxy/processors/constants'; + +/** + * Creates a simplified sample PACK buffer for testing. + * @param {number} numEntries - Number of entries in the PACK file. + * @param {string} commitContent - Content of the commit object. + * @param {number} type - Type of the object (1 for commit). + * @return {Buffer} - The generated PACK buffer. + */ +function createSamplePackBuffer( + numEntries = 1, + commitContent = 'tree 123\nparent 456\nauthor A 123 +0000\ncommitter C 456 +0000\n\nmessage', + type = 1, +) { + const header = Buffer.alloc(12); + header.write(PACK_SIGNATURE, 0, 4, 'utf-8'); // Signature + header.writeUInt32BE(2, 4); // Version + header.writeUInt32BE(numEntries, 8); // Number of entries + + const originalContent = Buffer.from(commitContent, 'utf8'); + const compressedContent = zlib.deflateSync(originalContent); // actual zlib for setup + + // Basic type/size encoding (assumes small sizes for simplicity) + // Real PACK files use variable-length encoding for size + let typeAndSize = (type << 4) | (compressedContent.length & 0x0f); // Lower 4 bits of size + if (compressedContent.length >= 16) { + typeAndSize |= 0x80; + } + const objectHeader = Buffer.from([typeAndSize]); // Placeholder, actual size encoding is complex + + // Combine parts and append checksum + const packContent = Buffer.concat([objectHeader, compressedContent]); + const checksum = Buffer.alloc(20); + + return Buffer.concat([header, packContent, checksum]); +} + +/** + * Creates a packet line buffer from an array of lines. + * Each line is prefixed with its length in hex format, and the last line is a flush packet. + * @param {string[]} lines - Array of lines to be included in the buffer. + * @return {Buffer} - The generated buffer containing the packet lines. + */ +function createPacketLineBuffer(lines) { + let buffer = Buffer.alloc(0); + lines.forEach((line) => { + const lengthInHex = (line.length + 4).toString(16).padStart(4, '0'); + buffer = Buffer.concat([buffer, Buffer.from(lengthInHex, 'ascii'), Buffer.from(line, 'ascii')]); + }); + buffer = Buffer.concat([buffer, Buffer.from(FLUSH_PACKET, 'ascii')]); + + return buffer; +} + +/** + * Creates a simplified sample PACK buffer for tag objects. + * @param {string} tagContent - Content of the tag object. + * @param {number} type - Type of the object (4 for tag). + * @return {Buffer} - The generated PACK buffer. + */ +function createSampleTagPackBuffer( + tagContent = 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\ntagger Test Tagger 1234567890 +0000\n\nTag message', + type = 4, +) { + const header = Buffer.alloc(12); + header.write(PACK_SIGNATURE, 0, 4, 'utf-8'); // Signature + header.writeUInt32BE(2, 4); // Version + header.writeUInt32BE(1, 8); // Number of entries (1 tag) + + const originalContent = Buffer.from(tagContent, 'utf8'); + const compressedContent = zlib.deflateSync(originalContent); + + // Basic type/size encoding for tag objects + let typeAndSize = (type << 4) | (compressedContent.length & 0x0f); + if (compressedContent.length >= 16) { + typeAndSize |= 0x80; + } + const objectHeader = Buffer.from([typeAndSize]); + + // Combine parts and append checksum + const packContent = Buffer.concat([objectHeader, compressedContent]); + const checksum = Buffer.alloc(20); + + return Buffer.concat([header, packContent, checksum]); +} + +/** + * Creates an empty PACK buffer for testing. + * @return {Buffer} - The generated buffer containing the PACK header and checksum. + */ +function createEmptyPackBuffer() { + const header = Buffer.alloc(12); + header.write(PACK_SIGNATURE, 0, 4, 'utf-8'); // signature + header.writeUInt32BE(2, 4); // version + header.writeUInt32BE(0, 8); // number of entries + + const checksum = Buffer.alloc(20); // fake checksum (all zeros) + return Buffer.concat([header, checksum]); +} + +describe('parsePackFile', () => { + let action; + let req; + let sandbox; + let zlibInflateStub; // No deflate stub used due to complexity of PACK encoding + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + // Mock Action and Step and spy on methods + action = { + branch: null, + commitFrom: null, + commitTo: null, + commitData: [], + user: null, + steps: [], + addStep: sandbox.spy(function (step) { + this.steps.push(step); + }), + setCommit: sandbox.spy(function (from, to) { + this.commitFrom = from; + this.commitTo = to; + }), + }; + + req = { + body: null, + }; + + zlibInflateStub = sandbox.stub(zlib, 'inflateSync'); + }); + + afterEach(() => { + sandbox.restore(); + }); + + describe('exec', () => { + it('should add error step if req.body is missing', async () => { + req.body = undefined; + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('No body found in request'); + }); + + it('should add error step if req.body is empty', async () => { + req.body = Buffer.alloc(0); + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('No body found in request'); + }); + + it('should add error step if no ref updates found', async () => { + const packetLines = ['some other line\n', 'another line\n']; + req.body = createPacketLineBuffer(packetLines); // We don't include PACK data (only testing ref updates) + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('push one ref at a time'); + expect(step.logs[0]).to.include('Invalid number of ref updates'); + }); + + it('should add error step if multiple ref updates found', async () => { + const packetLines = [ + 'oldhash1 newhash1 refs/heads/main\0caps\n', + 'oldhash2 newhash2 refs/heads/develop\0caps\n', + ]; + req.body = createPacketLineBuffer(packetLines); + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('push one ref at a time'); + expect(step.logs[0]).to.include('Invalid number of ref updates'); + expect(step.logs[1]).to.include('Expected 1, but got 2'); + }); + + it('should add error step if PACK data is missing', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/feature/test'; + const packetLines = [`${oldCommit} ${newCommit} ${ref}\0capa\n`]; + + req.body = createPacketLineBuffer(packetLines); + + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('PACK data is missing'); + + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + }); + + it('should successfully parse a valid push request', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/main'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + const commitContent = + 'tree 1234567890abcdef1234567890abcdef12345678\n' + + 'parent abcdef1234567890abcdef1234567890abcdef12\n' + + 'author Test Author 1234567890 +0000\n' + + 'committer Test Committer 1234567890 +0000\n\n' + + 'feat: Add new feature\n\n' + + 'This is the commit body.'; + const commitContentBuffer = Buffer.from(commitContent, 'utf8'); + + zlibInflateStub.returns(commitContentBuffer); + + const numEntries = 1; + const packBuffer = createSamplePackBuffer(numEntries, commitContent, 1); // Use real zlib + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + // Check step and action properties + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.false; + expect(step.errorMessage).to.be.null; + + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + expect(action.commitFrom).to.equal(oldCommit); + expect(action.commitTo).to.equal(newCommit); + expect(action.user).to.equal('Test Committer'); + + // Check parsed commit data + const commitMessages = action.commitData.map((commit) => commit.message); + expect(action.commitData).to.be.an('array').with.lengthOf(1); + expect(commitMessages[0]).to.equal('feat: Add new feature\n\nThis is the commit body.'); + + const parsedCommit = action.commitData[0]; + expect(parsedCommit.tree).to.equal('1234567890abcdef1234567890abcdef12345678'); + expect(parsedCommit.parent).to.equal('abcdef1234567890abcdef1234567890abcdef12'); + expect(parsedCommit.author).to.equal('Test Author'); + expect(parsedCommit.committer).to.equal('Test Committer'); + expect(parsedCommit.commitTimestamp).to.equal('1234567890'); + expect(parsedCommit.message).to.equal('feat: Add new feature\n\nThis is the commit body.'); + expect(parsedCommit.authorEmail).to.equal('author@example.com'); + + expect(step.content.meta).to.deep.equal({ + sig: PACK_SIGNATURE, + version: 2, + entries: numEntries, + }); + }); + + it('should handle initial commit (zero hash oldCommit)', async () => { + const oldCommit = '0'.repeat(40); // Zero hash + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/main'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + // Commit content without a parent line + const commitContent = + 'tree 1234567890abcdef1234567890abcdef12345678\n' + + 'author Test Author 1234567890 +0000\n' + + 'committer Test Committer 1234567890 +0100\n\n' + + 'feat: Initial commit'; + const parentFromCommit = '0'.repeat(40); // Expected parent hash + + const commitContentBuffer = Buffer.from(commitContent, 'utf8'); + zlibInflateStub.returns(commitContentBuffer); + + const packBuffer = createSamplePackBuffer(1, commitContent, 1); // Use real zlib + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.false; + + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + + // commitFrom should still be the zero hash + expect(action.commitFrom).to.equal(oldCommit); + expect(action.commitTo).to.equal(newCommit); + expect(action.user).to.equal('Test Committer'); + + // Check parsed commit data reflects no parent (zero hash) + expect(action.commitData[0].parent).to.equal(parentFromCommit); + }); + + it('should handle commit with multiple parents (merge commit)', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'c'.repeat(40); // Merge commit hash + const ref = 'refs/heads/main'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + const parent1 = 'b1'.repeat(20); + const parent2 = 'b2'.repeat(20); + const commitContent = + 'tree 1234567890abcdef1234567890abcdef12345678\n' + + `parent ${parent1}\n` + + `parent ${parent2}\n` + + 'author Test Author 1234567890 +0000\n' + + 'committer Test Committer 1234567890 +0100\n\n' + + "Merge branch 'feature'"; + + const commitContentBuffer = Buffer.from(commitContent, 'utf8'); + zlibInflateStub.returns(commitContentBuffer); + + const packBuffer = createSamplePackBuffer(1, commitContent, 1); // Use real zlib + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + // Check step and action properties + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.false; + + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + expect(action.commitFrom).to.equal(oldCommit); + expect(action.commitTo).to.equal(newCommit); + + // Parent should be the FIRST parent in the commit content + expect(action.commitData[0].parent).to.equal(parent1); + }); + + it('should add error step if getCommitData throws error', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/main'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + // Malformed commit content - missing tree line + const commitContent = + 'parent abcdef1234567890abcdef1234567890abcdef12\n' + + 'author Test Author 1678886400 +0000\n' + + 'committer Test Committer 1678886460 +0100\n\n' + + 'feat: Missing tree'; + const commitContentBuffer = Buffer.from(commitContent, 'utf8'); + zlibInflateStub.returns(commitContentBuffer); + + const packBuffer = createSamplePackBuffer(1, commitContent, 1); + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Invalid commit data: Missing tree'); + }); + + it('should add error step if data after flush packet does not start with "PACK"', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/main'; + const packetLines = [`${oldCommit} ${newCommit} ${ref}\0capa\n`]; + + const packetLineBuffer = createPacketLineBuffer(packetLines); + const garbageData = Buffer.from('NOT PACK DATA'); + req.body = Buffer.concat([packetLineBuffer, garbageData]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Invalid PACK data structure'); + expect(step.errorMessage).to.not.include('PACK data is missing'); + + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + }); + + it('should correctly identify PACK data even if "PACK" appears in packet lines', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/develop'; + const packetLines = [ + `${oldCommit} ${newCommit} ${ref}\0capa\n`, + 'some other data containing PACK keyword', // Include "PACK" within a packet line's content + ]; + + const commitContent = + 'tree 1234567890abcdef1234567890abcdef12345678\n' + + `parent ${oldCommit}\n` + + 'author Test Author 1234567890 +0000\n' + + 'committer Test Committer 1234567890 +0000\n\n' + + 'Test commit message with PACK inside'; + const samplePackBuffer = createSamplePackBuffer(1, commitContent, 1); + + zlibInflateStub.returns(Buffer.from(commitContent, 'utf8')); + + const packetLineBuffer = createPacketLineBuffer(packetLines); + req.body = Buffer.concat([packetLineBuffer, samplePackBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + expect(action.steps.length).to.equal(1); + + // Check that the step was added correctly, and no error present + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.false; + expect(step.errorMessage).to.be.null; + + // Verify action properties were parsed correctly + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + expect(action.commitFrom).to.equal(oldCommit); + expect(action.commitTo).to.equal(newCommit); + expect(action.commitData).to.be.an('array').with.lengthOf(1); + expect(action.commitData[0].message).to.equal('Test commit message with PACK inside'); + expect(action.commitData[0].committer).to.equal('Test Committer'); + expect(action.user).to.equal('Test Committer'); + }); + + it('should handle PACK data starting immediately after flush packet', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/master'; + const packetLines = [`${oldCommit} ${newCommit} ${ref}\0`]; + + const commitContent = + 'tree 1234567890abcdef1234567890abcdef12345678\n' + + `parent ${oldCommit}\n` + + 'author Test Author 1234567890 +0000\n' + + 'committer Test Committer 1234567890 +0000\n\n' + + 'Commit A'; + const samplePackBuffer = createSamplePackBuffer(1, commitContent, 1); + zlibInflateStub.returns(Buffer.from(commitContent, 'utf8')); + + const packetLineBuffer = createPacketLineBuffer(packetLines); + req.body = Buffer.concat([packetLineBuffer, samplePackBuffer]); + + const result = await exec(req, action); + + expect(result).to.equal(action); + const step = action.steps[0]; + expect(step.error).to.be.false; + expect(action.commitData[0].message).to.equal('Commit A'); + }); + + it('should add error step if PACK header parsing fails (getPackMeta with wrong signature)', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/fix'; + const packetLines = [`${oldCommit} ${newCommit} ${ref}\0capa\n`]; + + const packetLineBuffer = createPacketLineBuffer(packetLines); + const badPackBuffer = createSamplePackBuffer(); + badPackBuffer.write('AAAA', 0, 4, 'utf-8'); // Invalid signature, should be 'PACK' + + req.body = Buffer.concat([packetLineBuffer, badPackBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + const step = action.steps[0]; + expect(step.stepName).to.equal('parsePackFile'); + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Invalid PACK data structure'); + }); + + it('should return empty commitData on empty branch push', async () => { + const emptyPackBuffer = createEmptyPackBuffer(); + + const newCommit = 'b'.repeat(40); + const ref = 'refs/heads/feature/emptybranch'; + const packetLine = `${EMPTY_COMMIT_HASH} ${newCommit} ${ref}\0capabilities\n`; + + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), emptyPackBuffer]); + + const result = await exec(req, action); + + expect(result).to.equal(action); + + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.false; + expect(action.branch).to.equal(ref); + expect(action.setCommit.calledOnceWith(EMPTY_COMMIT_HASH, newCommit)).to.be.true; + + expect(action.commitData).to.be.an('array').with.lengthOf(0); + }); + }); + + describe('Tag Push Tests', () => { + it('should successfully parse a valid tag push request', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/tags/v1.0.0'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\ntagger Test Tagger 1234567890 +0000\n\nThis is a test tag message'; + const tagContentBuffer = Buffer.from(tagContent, 'utf8'); + + zlibInflateStub.returns(tagContentBuffer); + + const packBuffer = createSampleTagPackBuffer(tagContent, 4); // Type 4 = tag + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + // Check step and action properties + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.false; + expect(step.errorMessage).to.be.null; + + expect(action.tag).to.equal(ref); + expect(action.branch).to.be.undefined; + expect(action.actionType).to.equal('tag'); // ActionType.TAG enum value + expect(action.setCommit.calledOnceWith(oldCommit, newCommit)).to.be.true; + expect(action.commitFrom).to.equal(oldCommit); + expect(action.commitTo).to.equal(newCommit); + + // Check parsed tag data + expect(action.tagData).to.be.an('array').with.lengthOf(1); + const parsedTag = action.tagData[0]; + expect(parsedTag.object).to.equal('1234567890abcdef1234567890abcdef12345678'); + expect(parsedTag.type).to.equal('commit'); + expect(parsedTag.tagName).to.equal('v1.0.0'); + expect(parsedTag.tagger).to.equal('Test Tagger'); + expect(parsedTag.message).to.equal('This is a test tag message'); + + expect(action.user).to.equal('Test Tagger'); + }); + + it('should handle tag with missing tagger line with error', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/tags/v1.0.0'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + // Tag content without tagger line + const malformedTagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\n\nTag without tagger'; + const tagContentBuffer = Buffer.from(malformedTagContent, 'utf8'); + + zlibInflateStub.returns(tagContentBuffer); + + const packBuffer = createSampleTagPackBuffer(malformedTagContent, 4); + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + // Should set tag name from packet line + expect(action.tag).to.equal(ref); + expect(action.branch).to.be.undefined; + expect(action.actionType).to.equal('tag'); + + // Should have error due to parsing failure + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Invalid tag object: no tagger line'); + }); + + it('should handle tag with incomplete data with error', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/tags/v2.0.0'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + // Tag content missing object field + const incompleteTagContent = + 'type commit\ntag v2.0.0\ntagger Test Tagger 1234567890 +0000\n\nIncomplete tag'; + const tagContentBuffer = Buffer.from(incompleteTagContent, 'utf8'); + + zlibInflateStub.returns(tagContentBuffer); + + const packBuffer = createSampleTagPackBuffer(incompleteTagContent, 4); + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + expect(action.tag).to.equal(ref); + expect(action.actionType).to.equal('tag'); + + // Should have error due to parsing failure + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step).to.exist; + expect(step.error).to.be.true; + expect(step.errorMessage).to.include('Invalid tag object'); + }); + + it('should handle annotated tag with complex message', async () => { + const oldCommit = 'a'.repeat(40); + const newCommit = 'b'.repeat(40); + const ref = 'refs/tags/v3.0.0-beta1'; + const packetLine = `${oldCommit} ${newCommit} ${ref}\0capabilities\n`; + + const complexMessage = + 'Release v3.0.0-beta1\n\nThis is a major release with:\n- Feature A\n- Feature B\n\nBreaking changes:\n- API change in module X'; + const tagContent = `object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v3.0.0-beta1\ntagger Release Bot 1678886400 +0000\n\n${complexMessage}`; + const tagContentBuffer = Buffer.from(tagContent, 'utf8'); + + zlibInflateStub.returns(tagContentBuffer); + + const packBuffer = createSampleTagPackBuffer(tagContent, 4); + req.body = Buffer.concat([createPacketLineBuffer([packetLine]), packBuffer]); + + const result = await exec(req, action); + expect(result).to.equal(action); + + const step = action.steps.find((s) => s.stepName === 'parsePackFile'); + expect(step.error).to.be.false; + + expect(action.tag).to.equal(ref); + expect(action.tagData).to.be.an('array').with.lengthOf(1); + + const parsedTag = action.tagData[0]; + expect(parsedTag.tagName).to.equal('v3.0.0-beta1'); + expect(parsedTag.tagger).to.equal('Release Bot'); + expect(parsedTag.message).to.equal(complexMessage); + expect(action.user).to.equal('Release Bot'); + }); + }); + + describe('getPackMeta', () => { + it('should correctly parse PACK header', () => { + const buffer = createSamplePackBuffer(5); // 5 entries + const [meta, contentBuff] = getPackMeta(buffer); + + expect(meta).to.deep.equal({ + sig: PACK_SIGNATURE, + version: 2, + entries: 5, + }); + expect(contentBuff).to.be.instanceOf(Buffer); + expect(contentBuff.length).to.equal(buffer.length - 12); // Remaining buffer after header + }); + + it('should handle buffer exactly 12 bytes long', () => { + const buffer = createSamplePackBuffer(1).slice(0, 12); // Only header + const [meta, contentBuff] = getPackMeta(buffer); + + expect(meta).to.deep.equal({ + sig: PACK_SIGNATURE, + version: 2, + entries: 1, + }); + expect(contentBuff.length).to.equal(0); // No content left + }); + }); + + describe('unpack', () => { + let deflateStub; + + beforeEach(() => { + // Need to stub deflate for unpack tests + deflateStub = sandbox.stub(zlib, 'deflateSync'); + }); + + it('should call zlib.inflateSync and zlib.deflateSync', () => { + const inputBuf = Buffer.from('compressed data'); + const inflatedBuffer = Buffer.from('uncompressed data', 'utf8'); + const deflatedResult = Buffer.from('re-deflated'); // Mock deflated buffer + + zlibInflateStub.withArgs(inputBuf).returns(inflatedBuffer); + deflateStub.withArgs(inflatedBuffer).returns(deflatedResult); + + const [resultString, resultLength] = unpack(inputBuf); + + expect(zlibInflateStub.calledOnceWith(inputBuf)).to.be.true; + expect(deflateStub.calledOnceWith(inflatedBuffer)).to.be.true; // Check local stub + expect(resultString).to.equal(inflatedBuffer.toString('utf8')); + expect(resultLength).to.equal(deflatedResult.length); // unpack returns length of the deflated buffer + }); + + it('should return inflated string and deflated length', () => { + const inputBuf = Buffer.from('dummy compressed'); + const inflatedBuffer = Buffer.from('real uncompressed text', 'utf8'); + const deflatedResult = Buffer.from('tiny'); // Different length + + zlibInflateStub.withArgs(inputBuf).returns(inflatedBuffer); + deflateStub.withArgs(inflatedBuffer).returns(deflatedResult); + + const [content, size] = unpack(inputBuf); + + expect(content).to.equal(inflatedBuffer.toString('utf8')); + expect(size).to.equal(deflatedResult.length); + }); + }); + + describe('getCommitData', () => { + it('should return empty array if no type 1 contents', () => { + const contents = [ + { type: 2, content: 'blob' }, + { type: 3, content: 'tree' }, + ]; + expect(getCommitData(contents)).to.deep.equal([]); + }); + + it('should parse a single valid commit object', () => { + const commitContent = `tree 123\nparent 456\nauthor Au Thor 111 +0000\ncommitter Com Itter 222 +0100\n\nCommit message here`; + const contents = [{ type: 1, content: commitContent }]; + const result = getCommitData(contents); + + expect(result).to.be.an('array').with.lengthOf(1); + expect(result[0]).to.deep.equal({ + tree: '123', + parent: '456', + author: 'Au Thor', + committer: 'Com Itter', + committerEmail: 'c@e.com', + commitTimestamp: '222', + message: 'Commit message here', + authorEmail: 'a@e.com', + }); + }); + + it('should parse multiple valid commit objects', () => { + const commit1 = `tree 111\nparent 000\nauthor A1 1678880001 +0000\ncommitter C1 1678880002 +0000\n\nMsg1`; + const commit2 = `tree 222\nparent 111\nauthor A2 1678880003 +0100\ncommitter C2 1678880004 +0100\n\nMsg2`; + const contents = [ + { type: 1, content: commit1 }, + { type: 3, content: 'tree data' }, // non-commit types must be ignored + { type: 1, content: commit2 }, + ]; + + const result = getCommitData(contents); + expect(result).to.be.an('array').with.lengthOf(2); + + // Check first commit data + expect(result[0].message).to.equal('Msg1'); + expect(result[0].parent).to.equal('000'); + expect(result[0].author).to.equal('A1'); + expect(result[0].committer).to.equal('C1'); + expect(result[0].authorEmail).to.equal('a1@e.com'); + expect(result[0].commitTimestamp).to.equal('1678880002'); + + // Check second commit data + expect(result[1].message).to.equal('Msg2'); + expect(result[1].parent).to.equal('111'); + expect(result[1].author).to.equal('A2'); + expect(result[1].committer).to.equal('C2'); + expect(result[1].authorEmail).to.equal('a2@e.com'); + expect(result[1].commitTimestamp).to.equal('1678880004'); + }); + + it('should default parent to zero hash if not present', () => { + const commitContent = `tree 123\nauthor Au Thor 111 +0000\ncommitter Com Itter 222 +0100\n\nCommit message here`; + const contents = [{ type: 1, content: commitContent }]; + const result = getCommitData(contents); + expect(result[0].parent).to.equal('0'.repeat(40)); + }); + + it('should handle commit messages with multiple lines', () => { + const commitContent = `tree 123\nparent 456\nauthor A 111 +0000\ncommitter C 222 +0100\n\nLine one\nLine two\n\nLine four`; + const contents = [{ type: 1, content: commitContent }]; + const result = getCommitData(contents); + expect(result[0].message).to.equal('Line one\nLine two\n\nLine four'); + }); + + it('should handle commits without a message body', () => { + const commitContent = `tree 123\nparent 456\nauthor A 111 +0000\ncommitter C 222 +0100\n`; + const contents = [{ type: 1, content: commitContent }]; + const result = getCommitData(contents); + expect(result[0].message).to.equal(''); + }); + + it('should throw error for invalid commit data (missing tree)', () => { + const commitContent = `parent 456\nauthor A 1234567890 +0000\ncommitter C 1234567890 +0000\n\nMsg`; + const contents = [{ type: 1, content: commitContent }]; + expect(() => getCommitData(contents)).to.throw('Invalid commit data: Missing tree'); + }); + + it('should throw error for invalid commit data (missing author)', () => { + const commitContent = `tree 123\nparent 456\ncommitter C 1234567890 +0000\n\nMsg`; + const contents = [{ type: 1, content: commitContent }]; + expect(() => getCommitData(contents)).to.throw('Invalid commit data: Missing author'); + }); + + it('should throw error for invalid commit data (missing committer)', () => { + const commitContent = `tree 123\nparent 456\nauthor A 1234567890 +0000\n\nMsg`; + const contents = [{ type: 1, content: commitContent }]; + expect(() => getCommitData(contents)).to.throw('Invalid commit data: Missing committer'); + }); + + it('should throw error for invalid author line (missing timezone offset)', () => { + const commitContent = `tree 123\nparent 456\nauthor A 1234567890\ncommitter C 1234567890 +0000\n\nMsg`; + const contents = [{ type: 1, content: commitContent }]; + expect(() => getCommitData(contents)).to.throw('Failed to parse person line'); + }); + + it('should correctly parse a commit with a GPG signature header', () => { + const gpgSignedCommit = + 'tree b4d3c0ffee1234567890abcdef1234567890aabbcc\n' + + 'parent 01dbeef9876543210fedcba9876543210fedcba\n' + + 'author Test Author 1744814600 +0100\n' + + 'committer Test Committer 1744814610 +0200\n' + + 'gpgsig -----BEGIN PGP SIGNATURE-----\n \n' + + ' wsFcBAABCAAQBQJn/8ISCRC1aQ7uu5UhlAAAntAQACeyQd6IykNXiN6m9DfVp8DJ\n' + + ' UsY64ws+Td0inrEee+cHXVI9uJn15RJYQkICwlM4TZsVGav7nYaVqO+gfAg2ORAH\n' + + ' ghUnwSFFs7ucN/p0a47ItkJmt04+jQIFlZIC+wy1u2H3aKJwqaF+kGP5SA33ahgV\n' + + ' ZWviKodXFki8/G+sKB63q1qrDw6aELtftEgeAPQUcuLzj+vu/m3dWrDbatfUXMkC\n' + + ' JC6PbFajqrJ5pEtFwBqqRE+oIsOM9gkNAti1yDD5eoS+bNXACe0hT0+UoIzn5a34\n' + + ' xcElXTSdAK/MRjGiLN91G2nWvlbpM5wAEqr5Bl5ealCc6BbWfPxbP46slaE5DfkD\n' + + ' u0+RkVX06MSSPqzOmEV14ZWKap5C19FpF9o/rY8vtLlCxjWMhtUvvdR4OQfQpEDY\n' + + ' eTqzCHRnM3+7r3ABAWt9v7cG99bIMEs3sGcMy11HMeaoBpye6vCIP4ghNnoB1hUJ\n' + + ' D7MD77jzk4Kbf4IzS5omExyMu3AiNZecZX4+1w/527yPhv3s/HB1Gfz0oCUned+6\n' + + ' b9Kkle+krsQ/EK/4gPcb/Kb1cTcm3HhjaOSYwA+JpApJQ0mrduH34AT5MZJuIPFe\n' + + ' QheLzQI1d2jmFs11GRC5hc0HBk1WmGm6U8+FBuxCX0ECZPdYeQJjUeWjnNeUoE6a\n' + + ' 5lytZU4Onk57nUhIMSrx\n' + + ' =IxZr\n' + + ' -----END PGP SIGNATURE-----\n\n' + + 'This is the commit message.\n' + + 'It can span multiple lines.\n\n' + + 'And include blank lines internally.'; + + const contents = [ + { type: 1, content: gpgSignedCommit }, + { + type: 1, + content: `tree 111\nparent 000\nauthor A1 1744814600 +0200\ncommitter C1 1744814610 +0200\n\nMsg1`, + }, + ]; + + const result = getCommitData(contents); + expect(result).to.be.an('array').with.lengthOf(2); + + // Check the GPG signed commit data + const gpgResult = result[0]; + expect(gpgResult.tree).to.equal('b4d3c0ffee1234567890abcdef1234567890aabbcc'); + expect(gpgResult.parent).to.equal('01dbeef9876543210fedcba9876543210fedcba'); + expect(gpgResult.author).to.equal('Test Author'); + expect(gpgResult.committer).to.equal('Test Committer'); + expect(gpgResult.authorEmail).to.equal('test.author@example.com'); + expect(gpgResult.commitTimestamp).to.equal('1744814610'); + expect(gpgResult.message).to.equal( + `This is the commit message.\nIt can span multiple lines.\n\nAnd include blank lines internally.`, + ); + + // Sanity check: the second commit should be the simple commit + const simpleResult = result[1]; + expect(simpleResult.message).to.equal('Msg1'); + expect(simpleResult.parent).to.equal('000'); + expect(simpleResult.author).to.equal('A1'); + expect(simpleResult.committer).to.equal('C1'); + expect(simpleResult.authorEmail).to.equal('a1@e.com'); + expect(simpleResult.commitTimestamp).to.equal('1744814610'); + }); + }); + + describe('parseTag', () => { + it('should parse a valid tag object correctly', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\ntagger John Doe 1678886400 +0000\n\nFirst stable release'; + const tagObject = { content: tagContent }; + + const result = parseTag(tagObject); + + expect(result).to.deep.equal({ + object: '1234567890abcdef1234567890abcdef12345678', + type: 'commit', + tagName: 'v1.0.0', + tagger: 'John Doe', + taggerEmail: 'john@example.com', + timestamp: '1678886400', + message: 'First stable release', + }); + }); + + it('should parse tag with multi-line message correctly', () => { + const complexMessage = + 'Release v2.0.0\n\nMajor release with:\n- Feature A\n- Feature B\n\nBreaking changes included.'; + const tagContent = `object abcdef1234567890abcdef1234567890abcdef12\ntype commit\ntag v2.0.0\ntagger Release Bot 1678972800 +0000\n\n${complexMessage}`; + const tagObject = { content: tagContent }; + + const result = parseTag(tagObject); + + expect(result.object).to.equal('abcdef1234567890abcdef1234567890abcdef12'); + expect(result.type).to.equal('commit'); + expect(result.tagName).to.equal('v2.0.0'); + expect(result.tagger).to.equal('Release Bot'); + expect(result.message).to.equal(complexMessage); + }); + + it('should handle tag with empty message', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\ntagger Jane Doe 1678886400 +0000\n\n'; + const tagObject = { content: tagContent }; + + const result = parseTag(tagObject); + + expect(result.message).to.equal(''); + expect(result.tagName).to.equal('v1.0.0'); + expect(result.tagger).to.equal('Jane Doe'); + }); + + it('should throw error when tagger line is missing', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\n\nTag without tagger'; + const tagObject = { content: tagContent }; + + expect(() => parseTag(tagObject)).to.throw('Invalid tag object: no tagger line'); + }); + + it('should throw error when object line is missing', () => { + const tagContent = + 'type commit\ntag v1.0.0\ntagger John Doe 1678886400 +0000\n\nTag without object'; + const tagObject = { content: tagContent }; + + expect(() => parseTag(tagObject)).to.throw('Invalid tag object'); + }); + + it('should throw error when type line is missing', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntag v1.0.0\ntagger John Doe 1678886400 +0000\n\nTag without type'; + const tagObject = { content: tagContent }; + + expect(() => parseTag(tagObject)).to.throw('Invalid tag object'); + }); + + it('should throw error when tag name is missing', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntagger John Doe 1678886400 +0000\n\nTag without name'; + const tagObject = { content: tagContent }; + + expect(() => parseTag(tagObject)).to.throw('Invalid tag object'); + }); + + it('should handle tagger with complex email format', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype commit\ntag v1.0.0\ntagger John Doe (Developer) 1678886400 +0100\n\nTag with complex tagger'; + const tagObject = { content: tagContent }; + + const result = parseTag(tagObject); + + expect(result.tagger).to.equal('John Doe (Developer)'); + expect(result.tagName).to.equal('v1.0.0'); + expect(result.message).to.equal('Tag with complex tagger'); + }); + + it('should handle tag pointing to different object types', () => { + const tagContent = + 'object 1234567890abcdef1234567890abcdef12345678\ntype tree\ntag tree-tag\ntagger Tree Tagger 1678886400 +0000\n\nTag pointing to tree object'; + const tagObject = { content: tagContent }; + + const result = parseTag(tagObject); + + expect(result.type).to.equal('tree'); + expect(result.tagName).to.equal('tree-tag'); + expect(result.tagger).to.equal('Tree Tagger'); + }); + }); + + describe('parsePacketLines', () => { + it('should parse multiple valid packet lines correctly and return the correct offset', () => { + const lines = ['line1 content', 'line2 more content\nwith newline', 'line3']; + const buffer = createPacketLineBuffer(lines); // Helper adds "0000" at the end + const expectedOffset = buffer.length; // Should indicate the end of the buffer after flush packet + const [parsedLines, offset] = parsePacketLines(buffer); + + expect(parsedLines).to.deep.equal(lines); + expect(offset).to.equal(expectedOffset); + }); + + it('should handle an empty input buffer', () => { + const buffer = Buffer.alloc(0); + const [parsedLines, offset] = parsePacketLines(buffer); + + expect(parsedLines).to.deep.equal([]); + expect(offset).to.equal(0); + }); + + it('should handle a buffer only with a flush packet', () => { + const buffer = Buffer.from(FLUSH_PACKET); + const [parsedLines, offset] = parsePacketLines(buffer); + + expect(parsedLines).to.deep.equal([]); + expect(offset).to.equal(4); + }); + + it('should handle lines with null characters correctly', () => { + const lines = ['line1\0capability=value', 'line2']; + const buffer = createPacketLineBuffer(lines); + const expectedOffset = buffer.length; + const [parsedLines, offset] = parsePacketLines(buffer); + + expect(parsedLines).to.deep.equal(lines); + expect(offset).to.equal(expectedOffset); + }); + + it('should stop parsing at the first flush packet', () => { + const lines = ['line1', 'line2']; + let buffer = createPacketLineBuffer(lines); + + // Add extra data after the flush packet + const extraData = Buffer.from('extradataafterflush'); + buffer = Buffer.concat([buffer, extraData]); + + const expectedOffset = buffer.length - extraData.length; + const [parsedLines, offset] = parsePacketLines(buffer); + + expect(parsedLines).to.deep.equal(lines); + expect(offset).to.equal(expectedOffset); + }); + + it('should throw an error if a packet line length exceeds buffer bounds', () => { + // 000A -> length 10, but actual line length is only 3 bytes + const invalidLengthBuffer = Buffer.from('000Aabc'); + expect(() => parsePacketLines(invalidLengthBuffer)).to.throw( + /Invalid packet line length 000A/, + ); + }); + + it('should throw an error for non-hex length prefix (all non-hex)', () => { + const invalidHexBuffer = Buffer.from('XXXXline'); + expect(() => parsePacketLines(invalidHexBuffer)).to.throw(/Invalid packet line length XXXX/); + }); + + it('should throw an error for non-hex length prefix (non-hex at the end)', () => { + // Cover the quirk of parseInt returning 0 instead of NaN + const invalidHexBuffer = Buffer.from('000zline'); + expect(() => parsePacketLines(invalidHexBuffer)).to.throw(/Invalid packet line length 000z/); + }); + + it('should handle buffer ending exactly after a valid line length without content', () => { + // 0008 -> length 8, but buffer ends after header (no content) + const incompleteBuffer = Buffer.from('0008'); + expect(() => parsePacketLines(incompleteBuffer)).to.throw(/Invalid packet line length 0008/); + }); + }); +}); diff --git a/test/testProxy.test.js b/test/testProxy.test.js new file mode 100644 index 000000000..99508b982 --- /dev/null +++ b/test/testProxy.test.js @@ -0,0 +1,308 @@ +const chai = require('chai'); +const sinon = require('sinon'); +const http = require('http'); +const https = require('https'); +const proxyquire = require('proxyquire'); + +const expect = chai.expect; + +describe('Proxy', () => { + let sandbox; + let Proxy; + let mockHttpServer; + let mockHttpsServer; + + beforeEach(() => { + sandbox = sinon.createSandbox(); + + mockHttpServer = { + listen: sandbox.stub().callsFake((port, callback) => { + if (callback) setImmediate(callback); + return mockHttpServer; + }), + close: sandbox.stub().callsFake((callback) => { + if (callback) setImmediate(callback); + return mockHttpServer; + }), + }; + + mockHttpsServer = { + listen: sandbox.stub().callsFake((port, callback) => { + if (callback) setImmediate(callback); + return mockHttpsServer; + }), + close: sandbox.stub().callsFake((callback) => { + if (callback) setImmediate(callback); + return mockHttpsServer; + }), + }; + + sandbox.stub(http, 'createServer').returns(mockHttpServer); + sandbox.stub(https, 'createServer').returns(mockHttpsServer); + + // deep mocking for express router + const mockRouter = sandbox.stub(); + mockRouter.use = sandbox.stub(); + mockRouter.get = sandbox.stub(); + mockRouter.post = sandbox.stub(); + mockRouter.stack = []; + + Proxy = proxyquire('../src/proxy/index', { + './routes': { + getRouter: sandbox.stub().resolves(mockRouter), + }, + '../config': { + getTLSEnabled: sandbox.stub().returns(false), + getTLSKeyPemPath: sandbox.stub().returns('/tmp/key.pem'), + getTLSCertPemPath: sandbox.stub().returns('/tmp/cert.pem'), + getPlugins: sandbox.stub().returns(['mock-plugin']), + getAuthorisedList: sandbox.stub().returns([{ project: 'test-proj', name: 'test-repo' }]), + }, + '../db': { + getRepos: sandbox.stub().resolves([]), + createRepo: sandbox.stub().resolves({ _id: 'mock-repo-id' }), + addUserCanPush: sandbox.stub().resolves(), + addUserCanAuthorise: sandbox.stub().resolves(), + }, + '../plugin': { + PluginLoader: sandbox.stub().returns({ + load: sandbox.stub().resolves(), + }), + }, + './chain': { + default: {}, + }, + '../config/env': { + serverConfig: { + GIT_PROXY_SERVER_PORT: 3000, + GIT_PROXY_HTTPS_SERVER_PORT: 3001, + }, + }, + fs: { + readFileSync: sandbox.stub().returns(Buffer.from('mock-cert')), + }, + }).default; + }); + + afterEach(() => { + sandbox.restore(); + }); + + describe('start()', () => { + it('should start HTTP server when TLS is disabled', async () => { + const proxy = new Proxy(); + + await proxy.start(); + + expect(http.createServer.calledOnce).to.be.true; + expect(https.createServer.called).to.be.false; + expect(mockHttpServer.listen.calledWith(3000)).to.be.true; + + await proxy.stop(); + }); + + it('should start both HTTP and HTTPS servers when TLS is enabled', async () => { + const mockRouterTLS = sandbox.stub(); + mockRouterTLS.use = sandbox.stub(); + mockRouterTLS.get = sandbox.stub(); + mockRouterTLS.post = sandbox.stub(); + mockRouterTLS.stack = []; + + const ProxyWithTLS = proxyquire('../src/proxy/index', { + './routes': { + getRouter: sandbox.stub().resolves(mockRouterTLS), + }, + '../config': { + getTLSEnabled: sandbox.stub().returns(true), // TLS enabled + getTLSKeyPemPath: sandbox.stub().returns('/tmp/key.pem'), + getTLSCertPemPath: sandbox.stub().returns('/tmp/cert.pem'), + getPlugins: sandbox.stub().returns(['mock-plugin']), + getAuthorisedList: sandbox.stub().returns([]), + }, + '../db': { + getRepos: sandbox.stub().resolves([]), + createRepo: sandbox.stub().resolves({ _id: 'mock-repo-id' }), + addUserCanPush: sandbox.stub().resolves(), + addUserCanAuthorise: sandbox.stub().resolves(), + }, + '../plugin': { + PluginLoader: sandbox.stub().returns({ + load: sandbox.stub().resolves(), + }), + }, + './chain': { + default: {}, + }, + '../config/env': { + serverConfig: { + GIT_PROXY_SERVER_PORT: 3000, + GIT_PROXY_HTTPS_SERVER_PORT: 3001, + }, + }, + fs: { + readFileSync: sandbox.stub().returns(Buffer.from('mock-cert')), + }, + }).default; + + const proxy = new ProxyWithTLS(); + + await proxy.start(); + + expect(http.createServer.calledOnce).to.be.true; + expect(https.createServer.calledOnce).to.be.true; + expect(mockHttpServer.listen.calledWith(3000)).to.be.true; + expect(mockHttpsServer.listen.calledWith(3001)).to.be.true; + + await proxy.stop(); + }); + + it('should set up express app after starting', async () => { + const proxy = new Proxy(); + expect(proxy.getExpressApp()).to.be.null; + + await proxy.start(); + + expect(proxy.getExpressApp()).to.not.be.null; + expect(proxy.getExpressApp()).to.be.a('function'); + + await proxy.stop(); + }); + }); + + describe('getExpressApp()', () => { + it('should return null before start() is called', () => { + const proxy = new Proxy(); + + expect(proxy.getExpressApp()).to.be.null; + }); + + it('should return express app after start() is called', async () => { + const proxy = new Proxy(); + + await proxy.start(); + + const app = proxy.getExpressApp(); + expect(app).to.not.be.null; + expect(app).to.be.a('function'); + expect(app.use).to.be.a('function'); + + await proxy.stop(); + }); + }); + + describe('stop()', () => { + it('should close HTTP server when running', async () => { + const proxy = new Proxy(); + await proxy.start(); + await proxy.stop(); + + expect(mockHttpServer.close.calledOnce).to.be.true; + }); + + it('should close both HTTP and HTTPS servers when both are running', async () => { + const mockRouterStop = sandbox.stub(); + mockRouterStop.use = sandbox.stub(); + mockRouterStop.get = sandbox.stub(); + mockRouterStop.post = sandbox.stub(); + mockRouterStop.stack = []; + + const ProxyWithTLS = proxyquire('../src/proxy/index', { + './routes': { + getRouter: sandbox.stub().resolves(mockRouterStop), + }, + '../config': { + getTLSEnabled: sandbox.stub().returns(true), + getTLSKeyPemPath: sandbox.stub().returns('/tmp/key.pem'), + getTLSCertPemPath: sandbox.stub().returns('/tmp/cert.pem'), + getPlugins: sandbox.stub().returns([]), + getAuthorisedList: sandbox.stub().returns([]), + }, + '../db': { + getRepos: sandbox.stub().resolves([]), + createRepo: sandbox.stub().resolves({ _id: 'mock-repo-id' }), + addUserCanPush: sandbox.stub().resolves(), + addUserCanAuthorise: sandbox.stub().resolves(), + }, + '../plugin': { + PluginLoader: sandbox.stub().returns({ + load: sandbox.stub().resolves(), + }), + }, + './chain': { + default: {}, + }, + '../config/env': { + serverConfig: { + GIT_PROXY_SERVER_PORT: 3000, + GIT_PROXY_HTTPS_SERVER_PORT: 3001, + }, + }, + fs: { + readFileSync: sandbox.stub().returns(Buffer.from('mock-cert')), + }, + }).default; + + const proxy = new ProxyWithTLS(); + await proxy.start(); + await proxy.stop(); + + expect(mockHttpServer.close.calledOnce).to.be.true; + expect(mockHttpsServer.close.calledOnce).to.be.true; + }); + + it('should resolve successfully when no servers are running', async () => { + const proxy = new Proxy(); + + await proxy.stop(); + + expect(mockHttpServer.close.called).to.be.false; + expect(mockHttpsServer.close.called).to.be.false; + }); + + it('should handle errors gracefully', async () => { + const proxy = new Proxy(); + await proxy.start(); + + // simulate error in server close + mockHttpServer.close.callsFake((callback) => { + throw new Error('Server close error'); + }); + + try { + await proxy.stop(); + expect.fail('Expected stop() to reject'); + } catch (error) { + expect(error.message).to.equal('Server close error'); + } + }); + }); + + describe('full lifecycle', () => { + it('should start and stop successfully', async () => { + const proxy = new Proxy(); + + await proxy.start(); + expect(proxy.getExpressApp()).to.not.be.null; + expect(mockHttpServer.listen.calledOnce).to.be.true; + + await proxy.stop(); + expect(mockHttpServer.close.calledOnce).to.be.true; + }); + + it('should handle multiple start/stop cycles', async () => { + const proxy = new Proxy(); + + await proxy.start(); + await proxy.stop(); + + mockHttpServer.listen.resetHistory(); + mockHttpServer.close.resetHistory(); + + await proxy.start(); + await proxy.stop(); + + expect(mockHttpServer.listen.calledOnce).to.be.true; + expect(mockHttpServer.close.calledOnce).to.be.true; + }); + }); +}); diff --git a/test/testProxyRoute.test.js b/test/testProxyRoute.test.js new file mode 100644 index 000000000..39ee5e7fc --- /dev/null +++ b/test/testProxyRoute.test.js @@ -0,0 +1,504 @@ +const { handleMessage, validGitRequest } = require('../src/proxy/routes'); +const chai = require('chai'); +const chaiHttp = require('chai-http'); +chai.use(chaiHttp); +chai.should(); +const expect = chai.expect; +const sinon = require('sinon'); +const express = require('express'); +const getRouter = require('../src/proxy/routes').getRouter; +const chain = require('../src/proxy/chain'); +const proxyquire = require('proxyquire'); +const { Action, Step } = require('../src/proxy/actions'); +const service = require('../src/service'); +const db = require('../src/db'); + +import Proxy from '../src/proxy'; + +const TEST_DEFAULT_REPO = { + url: 'https://github.com/finos/git-proxy.git', + name: 'git-proxy', + project: 'finos/git-proxy', + host: 'github.com', + proxyUrlPrefix: '/github.com/finos/git-proxy.git', +}; + +const TEST_GITLAB_REPO = { + url: 'https://gitlab.com/gitlab-community/meta.git', + name: 'gitlab', + project: 'gitlab-community/meta', + host: 'gitlab.com', + proxyUrlPrefix: '/gitlab.com/gitlab-community/meta.git', +}; + +const TEST_UNKNOWN_REPO = { + url: 'https://github.com/finos/fdc3.git', + name: 'fdc3', + project: 'finos/fdc3', + host: 'github.com', + proxyUrlPrefix: '/github.com/finos/fdc3.git', + fallbackUrlPrefix: '/finos/fdc3.git', +}; + +describe('proxy route filter middleware', () => { + let app; + + beforeEach(async () => { + app = express(); + app.use('/', await getRouter()); + }); + + afterEach(() => { + sinon.restore(); + }); + + after(() => { + sinon.restore(); + }); + + it('should reject invalid git requests with 400', async () => { + const res = await chai + .request(app) + .get('/owner/repo.git/invalid/path') + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request'); + + expect(res).to.have.status(200); // status 200 is used to ensure error message is rendered by git client + expect(res.text).to.contain('Invalid request received'); + }); + + it('should handle blocked requests and return custom packet message', async () => { + sinon.stub(chain, 'executeChain').resolves({ + blocked: true, + blockedMessage: 'You shall not push!', + error: true, + }); + + const res = await chai + .request(app) + .post('/owner/repo.git/git-upload-pack') + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .send(Buffer.from('0000')) + .buffer(); + + expect(res.status).to.equal(200); // status 200 is used to ensure error message is rendered by git client + expect(res.text).to.contain('You shall not push!'); + expect(res.headers['content-type']).to.include('application/x-git-receive-pack-result'); + expect(res.headers['x-frame-options']).to.equal('DENY'); + }); + + describe('when request is valid and not blocked', () => { + it('should return error if repo is not found', async () => { + sinon.stub(chain, 'executeChain').resolves({ + blocked: false, + blockedMessage: '', + error: false, + }); + + const res = await chai + .request(app) + .get('/owner/repo.git/info/refs?service=git-upload-pack') + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + expect(res.status).to.equal(401); + expect(res.text).to.equal('Repository not found.'); + }); + + it('should pass through if repo is found', async () => { + sinon.stub(chain, 'executeChain').resolves({ + blocked: false, + blockedMessage: '', + error: false, + }); + + const res = await chai + .request(app) + .get('/finos/git-proxy.git/info/refs?service=git-upload-pack') + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + expect(res.status).to.equal(200); + expect(res.text).to.contain('git-upload-pack'); + }); + }); +}); + +describe('proxy route helpers', () => { + describe('handleMessage', async () => { + it('should handle short messages', async function () { + const res = await handleMessage('one'); + expect(res).to.contain('one'); + }); + + it('should handle emoji messages', async function () { + const res = await handleMessage('❌ push failed: too many errors'); + expect(res).to.contain('❌'); + }); + }); + + describe('validGitRequest', () => { + it('should return true for /info/refs?service=git-upload-pack with valid user-agent', () => { + const res = validGitRequest('/info/refs?service=git-upload-pack', { + 'user-agent': 'git/2.30.1', + }); + expect(res).to.be.true; + }); + + it('should return true for /info/refs?service=git-receive-pack with valid user-agent', () => { + const res = validGitRequest('/info/refs?service=git-receive-pack', { + 'user-agent': 'git/1.9.1', + }); + expect(res).to.be.true; + }); + + it('should return false for /info/refs?service=git-upload-pack with missing user-agent', () => { + const res = validGitRequest('/info/refs?service=git-upload-pack', {}); + expect(res).to.be.false; + }); + + it('should return false for /info/refs?service=git-upload-pack with non-git user-agent', () => { + const res = validGitRequest('/info/refs?service=git-upload-pack', { + 'user-agent': 'curl/7.79.1', + }); + expect(res).to.be.false; + }); + + it('should return true for /git-upload-pack with valid user-agent and accept', () => { + const res = validGitRequest('/git-upload-pack', { + 'user-agent': 'git/2.40.0', + accept: 'application/x-git-upload-pack-request', + }); + expect(res).to.be.true; + }); + + it('should return false for /git-upload-pack with missing accept header', () => { + const res = validGitRequest('/git-upload-pack', { + 'user-agent': 'git/2.40.0', + }); + expect(res).to.be.false; + }); + + it('should return false for /git-upload-pack with wrong accept header', () => { + const res = validGitRequest('/git-upload-pack', { + 'user-agent': 'git/2.40.0', + accept: 'application/json', + }); + expect(res).to.be.false; + }); + + it('should return false for unknown paths', () => { + const res = validGitRequest('/not-a-valid-git-path', { + 'user-agent': 'git/2.40.0', + accept: 'application/x-git-upload-pack-request', + }); + expect(res).to.be.false; + }); + }); +}); + +describe('proxyFilter function', async () => { + let proxyRoutes; + let req; + let res; + let actionToReturn; + let executeChainStub; + + beforeEach(async () => { + executeChainStub = sinon.stub(); + + // Re-import the proxy routes module and stub executeChain + proxyRoutes = proxyquire('../src/proxy/routes', { + '../chain': { executeChain: executeChainStub }, + }); + + req = { + url: '/github.com/finos/git-proxy.git/info/refs?service=git-receive-pack', + headers: { + host: 'dummyHost', + 'user-agent': 'git/dummy-git-client', + accept: 'application/x-git-receive-pack-request', + }, + }; + res = { + set: () => {}, + status: () => { + return { + send: () => {}, + }; + }, + }; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should return false for push requests that should be blocked', async function () { + // mock the executeChain function + actionToReturn = new Action( + 1234, + 'dummy', + 'dummy', + Date.now(), + '/github.com/finos/git-proxy.git', + ); + const step = new Step('dummy', false, null, true, 'test block', null); + actionToReturn.addStep(step); + executeChainStub.returns(actionToReturn); + const result = await proxyRoutes.proxyFilter(req, res); + expect(result).to.be.false; + }); + + it('should return false for push requests that produced errors', async function () { + // mock the executeChain function + actionToReturn = new Action( + 1234, + 'dummy', + 'dummy', + Date.now(), + '/github.com/finos/git-proxy.git', + ); + const step = new Step('dummy', true, 'test error', false, null, null); + actionToReturn.addStep(step); + executeChainStub.returns(actionToReturn); + const result = await proxyRoutes.proxyFilter(req, res); + expect(result).to.be.false; + }); + + it('should return false for invalid push requests', async function () { + // mock the executeChain function + actionToReturn = new Action( + 1234, + 'dummy', + 'dummy', + Date.now(), + '/github.com/finos/git-proxy.git', + ); + const step = new Step('dummy', true, 'test error', false, null, null); + actionToReturn.addStep(step); + executeChainStub.returns(actionToReturn); + + // create an invalid request + req = { + url: '/github.com/finos/git-proxy.git/invalidPath', + headers: { + host: 'dummyHost', + 'user-agent': 'git/dummy-git-client', + accept: 'application/x-git-receive-pack-request', + }, + }; + + const result = await proxyRoutes.proxyFilter(req, res); + expect(result).to.be.false; + }); + + it('should return true for push requests that are valid and pass the chain', async function () { + // mock the executeChain function + actionToReturn = new Action( + 1234, + 'dummy', + 'dummy', + Date.now(), + '/github.com/finos/git-proxy.git', + ); + const step = new Step('dummy', false, null, false, null, null); + actionToReturn.addStep(step); + executeChainStub.returns(actionToReturn); + const result = await proxyRoutes.proxyFilter(req, res); + expect(result).to.be.true; + }); +}); + +describe('proxy express application', async () => { + let apiApp; + let cookie; + let proxy; + + const setCookie = function (res) { + res.headers['set-cookie'].forEach((x) => { + if (x.startsWith('connect')) { + const value = x.split(';')[0]; + cookie = value; + } + }); + }; + + const cleanupRepo = async (url) => { + const repo = await db.getRepoByUrl(url); + if (repo) { + await db.deleteRepo(repo._id); + } + }; + + before(async () => { + // start the API and proxy + proxy = new Proxy(); + apiApp = await service.start(proxy); + await proxy.start(); + + const res = await chai.request(apiApp).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + expect(res).to.have.cookie('connect.sid'); + setCookie(res); + + // if our default repo is not set-up, create it + const repo = await db.getRepoByUrl(TEST_DEFAULT_REPO.url); + if (!repo) { + const res2 = await chai + .request(apiApp) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_DEFAULT_REPO); + res2.should.have.status(200); + } + }); + + after(async () => { + sinon.restore(); + await service.stop(); + await proxy.stop(); + await cleanupRepo(TEST_DEFAULT_REPO.url); + await cleanupRepo(TEST_GITLAB_REPO.url); + }); + + it('should proxy requests for the default GitHub repository', async function () { + // proxy a fetch request + const res = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_DEFAULT_REPO.proxyUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + expect(res.status).to.equal(200); + expect(res.text).to.contain('git-upload-pack'); + }); + + it('should proxy requests for the default GitHub repository using the fallback URL', async function () { + // proxy a fetch request using a fallback URL + const res = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_DEFAULT_REPO.proxyUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + expect(res.status).to.equal(200); + expect(res.text).to.contain('git-upload-pack'); + }); + + it('should be restarted by the api and proxy requests for a new host (e.g. gitlab.com) when a project at that host is ADDED via the API', async function () { + // Tests that the proxy restarts properly after a project with a URL at a new host is added + + // check that we don't have *any* repos at gitlab.com setup + const numExistingGitlabRepos = (await db.getRepos({ url: /https:\/\/gitlab\.com/ })).length; + expect( + numExistingGitlabRepos, + 'There is a GitLab that exists in the database already, which is NOT expected when running this test', + ).to.be.equal(0); + + // create the repo through the API, which should force the proxy to restart to handle the new domain + const res = await chai + .request(apiApp) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_GITLAB_REPO); + res.should.have.status(200); + + // confirm that the repo was created in the DB + const repo = await db.getRepoByUrl(TEST_GITLAB_REPO.url); + expect(repo).to.not.be.null; + + // and that our initial query for repos would have picked it up + const numCurrentGitlabRepos = (await db.getRepos({ url: /https:\/\/gitlab\.com/ })).length; + expect(numCurrentGitlabRepos).to.be.equal(1); + + // proxy a request to the new repo + const res2 = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_GITLAB_REPO.proxyUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + res2.should.have.status(200); + expect(res2.text).to.contain('git-upload-pack'); + }).timeout(5000); + + it('should be restarted by the api and stop proxying requests for a host (e.g. gitlab.com) when the last project at that host is DELETED via the API', async function () { + // We are testing that the proxy stops proxying requests for a particular origin + // The chain is stubbed and will always passthrough requests, hence, we are only checking what hosts are proxied. + + // the gitlab test repo should already exist + let repo = await db.getRepoByUrl(TEST_GITLAB_REPO.url); + expect(repo).to.not.be.null; + + // delete the gitlab test repo, which should force the proxy to restart and stop proxying gitlab.com + // We assume that there are no other gitlab.com repos present + const res = await chai + .request(apiApp) + .delete('/api/v1/repo/' + repo._id + '/delete') + .set('Cookie', `${cookie}`) + .send(); + res.should.have.status(200); + + // confirm that its gone from the DB + repo = await db.getRepoByUrl(TEST_GITLAB_REPO.url); + expect( + repo, + 'The GitLab repo still existed in the database after it should have been deleted...', + ).to.be.null; + + // give the proxy half a second to restart + await new Promise((resolve) => setTimeout(resolve, 500)); + + // try (and fail) to proxy a request to gitlab.com + const res2 = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_GITLAB_REPO.proxyUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + + res2.should.have.status(200); // status 200 is used to ensure error message is rendered by git client + expect(res2.text).to.contain('Rejecting repo'); + }).timeout(5000); + + it('should not proxy requests for an unknown project', async function () { + // We are testing that the proxy stops proxying requests for a particular origin + // The chain is stubbed and will always passthrough requests, hence, we are only checking what hosts are proxied. + + // the gitlab test repo should already exist + const repo = await db.getRepoByUrl(TEST_UNKNOWN_REPO.url); + expect( + repo, + 'The unknown (but real) repo existed in the database which is not expected for this test', + ).to.be.null; + + // try (and fail) to proxy a request to the repo directly + const res = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_UNKNOWN_REPO.proxyUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + res.should.have.status(200); // status 200 is used to ensure error message is rendered by git client + expect(res.text).to.contain('Rejecting repo'); + + // try (and fail) to proxy a request to the repo via the fallback URL directly + const res2 = await chai + .request(proxy.getExpressApp()) + .get(`${TEST_UNKNOWN_REPO.fallbackUrlPrefix}/info/refs?service=git-upload-pack`) + .set('user-agent', 'git/2.42.0') + .set('accept', 'application/x-git-upload-pack-request') + .buffer(); + res2.should.have.status(200); + expect(res2.text).to.contain('Rejecting repo'); + }).timeout(5000); +}); diff --git a/test/testPush.test.js b/test/testPush.test.js new file mode 100644 index 000000000..62836b3a5 --- /dev/null +++ b/test/testPush.test.js @@ -0,0 +1,330 @@ +// Import the dependencies for testing +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const db = require('../src/db'); +const service = require('../src/service'); + +chai.use(chaiHttp); +chai.should(); +const expect = chai.expect; + +// dummy repo +const TEST_ORG = 'finos'; +const TEST_REPO = 'test-push'; +const TEST_URL = 'https://github.com/finos/test-push.git'; +// approver user +const TEST_USERNAME_1 = 'push-test'; +const TEST_EMAIL_1 = 'push-test@test.com'; +const TEST_PASSWORD_1 = 'test1234'; +// committer user +const TEST_USERNAME_2 = 'push-test-2'; +const TEST_EMAIL_2 = 'push-test-2@test.com'; +const TEST_PASSWORD_2 = 'test5678'; +// unknown user +const TEST_USERNAME_3 = 'push-test-3'; +const TEST_EMAIL_3 = 'push-test-3@test.com'; + +const TEST_PUSH = { + steps: [], + error: false, + blocked: false, + allowPush: false, + authorised: false, + canceled: false, + rejected: false, + autoApproved: false, + autoRejected: false, + commitData: [], + id: '0000000000000000000000000000000000000000__1744380874110', + type: 'push', + method: 'get', + timestamp: 1744380903338, + project: TEST_ORG, + repoName: TEST_REPO + '.git', + url: TEST_URL, + repo: TEST_ORG + '/' + TEST_REPO + '.git', + user: TEST_USERNAME_2, + userEmail: TEST_EMAIL_2, + lastStep: null, + blockedMessage: + '\n\n\nGitProxy has received your push:\n\nhttp://localhost:8080/requests/0000000000000000000000000000000000000000__1744380874110\n\n\n', + _id: 'GIMEz8tU2KScZiTz', + attestation: null, +}; + +describe('auth', async () => { + let app; + let cookie; + let testRepo; + + const setCookie = function (res) { + res.headers['set-cookie'].forEach((x) => { + if (x.startsWith('connect')) { + const value = x.split(';')[0]; + cookie = value; + } + }); + }; + + const login = async function (username, password) { + console.log(`logging in as ${username}...`); + const res = await chai.request(app).post('/api/auth/login').send({ + username: username, + password: password, + }); + res.should.have.status(200); + expect(res).to.have.cookie('connect.sid'); + setCookie(res); + }; + + const loginAsApprover = () => login(TEST_USERNAME_1, TEST_PASSWORD_1); + const loginAsCommitter = () => login(TEST_USERNAME_2, TEST_PASSWORD_2); + const loginAsAdmin = () => login('admin', 'admin'); + + const logout = async function () { + const res = await chai.request(app).post('/api/auth/logout').set('Cookie', `${cookie}`); + res.should.have.status(200); + cookie = null; + }; + + before(async function () { + // remove existing repo and users if any + const oldRepo = await db.getRepoByUrl(TEST_URL); + if (oldRepo) { + await db.deleteRepo(oldRepo._id); + } + await db.deleteUser(TEST_USERNAME_1); + await db.deleteUser(TEST_USERNAME_2); + + app = await service.start(); + await loginAsAdmin(); + + // set up a repo, user and push to test against + testRepo = await db.createRepo({ + project: TEST_ORG, + name: TEST_REPO, + url: TEST_URL, + }); + + // Create a new user for the approver + console.log('creating approver'); + await db.createUser(TEST_USERNAME_1, TEST_PASSWORD_1, TEST_EMAIL_1, TEST_USERNAME_1, false); + await db.addUserCanAuthorise(testRepo._id, TEST_USERNAME_1); + + // create a new user for the committer + console.log('creating committer'); + await db.createUser(TEST_USERNAME_2, TEST_PASSWORD_2, TEST_EMAIL_2, TEST_USERNAME_2, false); + await db.addUserCanPush(testRepo._id, TEST_USERNAME_2); + + // logout of admin account + await logout(); + }); + + after(async function () { + await db.deleteRepo(testRepo._id); + await db.deleteUser(TEST_USERNAME_1); + await db.deleteUser(TEST_USERNAME_2); + }); + + describe('test push API', async function () { + afterEach(async function () { + await db.deletePush(TEST_PUSH.id); + await logout(); + }); + + it('should get 404 for unknown push', async function () { + await loginAsApprover(); + + const commitId = + '0000000000000000000000000000000000000000__79b4d8953cbc324bcc1eb53d6412ff89666c241f'; + const res = await chai + .request(app) + .get(`/api/v1/push/${commitId}`) + .set('Cookie', `${cookie}`); + res.should.have.status(404); + }); + + it('should allow an authorizer to approve a push', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/authorise`) + .set('Cookie', `${cookie}`) + .set('content-type', 'application/x-www-form-urlencoded') + .send({ + params: { + attestation: [ + { + label: 'I am happy for this to be pushed to the upstream repository', + tooltip: { + text: 'Are you happy for this contribution to be pushed upstream?', + links: [], + }, + checked: true, + }, + ], + }, + }); + res.should.have.status(200); + }); + + it('should NOT allow an authorizer to approve if attestation is incomplete', async function () { + // make the approver also the committer + const testPush = { ...TEST_PUSH }; + testPush.user = TEST_USERNAME_1; + testPush.userEmail = TEST_EMAIL_1; + await db.writeAudit(testPush); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/authorise`) + .set('Cookie', `${cookie}`) + .set('content-type', 'application/x-www-form-urlencoded') + .send({ + params: { + attestation: [ + { + label: 'I am happy for this to be pushed to the upstream repository', + tooltip: { + text: 'Are you happy for this contribution to be pushed upstream?', + links: [], + }, + checked: false, + }, + ], + }, + }); + res.should.have.status(401); + }); + + it('should NOT allow an authorizer to approve if committer is unknown', async function () { + // make the approver also the committer + const testPush = { ...TEST_PUSH }; + testPush.user = TEST_USERNAME_3; + testPush.userEmail = TEST_EMAIL_3; + await db.writeAudit(testPush); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/authorise`) + .set('Cookie', `${cookie}`) + .set('content-type', 'application/x-www-form-urlencoded') + .send({ + params: { + attestation: [ + { + label: 'I am happy for this to be pushed to the upstream repository', + tooltip: { + text: 'Are you happy for this contribution to be pushed upstream?', + links: [], + }, + checked: true, + }, + ], + }, + }); + res.should.have.status(401); + }); + + it('should NOT allow an authorizer to approve their own push', async function () { + // make the approver also the committer + const testPush = { ...TEST_PUSH }; + testPush.user = TEST_USERNAME_1; + testPush.userEmail = TEST_EMAIL_1; + await db.writeAudit(testPush); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/authorise`) + .set('Cookie', `${cookie}`) + .set('content-type', 'application/x-www-form-urlencoded') + .send({ + params: { + attestation: [ + { + label: 'I am happy for this to be pushed to the upstream repository', + tooltip: { + text: 'Are you happy for this contribution to be pushed upstream?', + links: [], + }, + checked: true, + }, + ], + }, + }); + res.should.have.status(401); + }); + + it('should NOT allow a non-authorizer to approve a push', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsCommitter(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/authorise`) + .set('Cookie', `${cookie}`) + .set('content-type', 'application/x-www-form-urlencoded') + .send({ + params: { + attestation: [ + { + label: 'I am happy for this to be pushed to the upstream repository', + tooltip: { + text: 'Are you happy for this contribution to be pushed upstream?', + links: [], + }, + checked: true, + }, + ], + }, + }); + res.should.have.status(401); + }); + + it('should allow an authorizer to reject a push', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/reject`) + .set('Cookie', `${cookie}`); + res.should.have.status(200); + }); + + it('should NOT allow an authorizer to reject their own push', async function () { + // make the approver also the committer + const testPush = { ...TEST_PUSH }; + testPush.user = TEST_USERNAME_1; + testPush.userEmail = TEST_EMAIL_1; + await db.writeAudit(testPush); + await loginAsApprover(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/reject`) + .set('Cookie', `${cookie}`); + res.should.have.status(401); + }); + + it('should NOT allow a non-authorizer to reject a push', async function () { + await db.writeAudit(TEST_PUSH); + await loginAsCommitter(); + const res = await chai + .request(app) + .post(`/api/v1/push/${TEST_PUSH.id}/reject`) + .set('Cookie', `${cookie}`); + res.should.have.status(401); + }); + }); + + after(async function () { + const res = await chai.request(app).post('/api/auth/logout').set('Cookie', `${cookie}`); + res.should.have.status(200); + + await service.httpServer.close(); + + await db.deleteRepo(TEST_REPO); + await db.deleteUser(TEST_USERNAME_1); + await db.deleteUser(TEST_USERNAME_2); + await db.deletePush(TEST_PUSH.id); + }); +}); diff --git a/test/testRepoApi.test.js b/test/testRepoApi.test.js new file mode 100644 index 000000000..23dc40bac --- /dev/null +++ b/test/testRepoApi.test.js @@ -0,0 +1,340 @@ +// Import the dependencies for testing +const chai = require('chai'); +const chaiHttp = require('chai-http'); +const db = require('../src/db'); +const service = require('../src/service'); +const { getAllProxiedHosts } = require('../src/proxy/routes/helper'); + +import Proxy from '../src/proxy'; + +chai.use(chaiHttp); +chai.should(); +const expect = chai.expect; + +const TEST_REPO = { + url: 'https://github.com/finos/test-repo.git', + name: 'test-repo', + project: 'finos', + host: 'github.com', +}; + +const TEST_REPO_NON_GITHUB = { + url: 'https://gitlab.com/org/sub-org/test-repo2.git', + name: 'test-repo2', + project: 'org/sub-org', + host: 'gitlab.com', +}; + +const TEST_REPO_NAKED = { + url: 'https://123.456.789:80/test-repo3.git', + name: 'test-repo3', + project: '', + host: '123.456.789:80', +}; + +const cleanupRepo = async (url) => { + const repo = await db.getRepoByUrl(url); + if (repo) { + await db.deleteRepo(repo._id); + } +}; + +describe('add new repo', async () => { + let app; + let proxy; + let cookie; + const repoIds = []; + + const setCookie = function (res) { + res.headers['set-cookie'].forEach((x) => { + if (x.startsWith('connect')) { + const value = x.split(';')[0]; + cookie = value; + } + }); + }; + + before(async function () { + proxy = new Proxy(); + app = await service.start(proxy); + // Prepare the data. + // _id is autogenerated by the DB so we need to retrieve it before we can use it + cleanupRepo(TEST_REPO.url); + cleanupRepo(TEST_REPO_NON_GITHUB.url); + cleanupRepo(TEST_REPO_NAKED.url); + + await db.deleteUser('u1'); + await db.deleteUser('u2'); + await db.createUser('u1', 'abc', 'test@test.com', 'test', true); + await db.createUser('u2', 'abc', 'test2@test.com', 'test', true); + }); + + it('login', async function () { + const res = await chai.request(app).post('/api/auth/login').send({ + username: 'admin', + password: 'admin', + }); + expect(res).to.have.cookie('connect.sid'); + setCookie(res); + }); + + it('create a new repo', async function () { + const res = await chai + .request(app) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_REPO); + res.should.have.status(200); + + const repo = await db.getRepoByUrl(TEST_REPO.url); + // save repo id for use in subsequent tests + repoIds[0] = repo._id; + + repo.project.should.equal(TEST_REPO.project); + repo.name.should.equal(TEST_REPO.name); + repo.url.should.equal(TEST_REPO.url); + repo.users.canPush.length.should.equal(0); + repo.users.canAuthorise.length.should.equal(0); + }); + + it('get a repo', async function () { + const res = await chai + .request(app) + .get('/api/v1/repo/' + repoIds[0]) + .set('Cookie', `${cookie}`) + .send(); + res.should.have.status(200); + + expect(res.body.url).to.equal(TEST_REPO.url); + expect(res.body.name).to.equal(TEST_REPO.name); + expect(res.body.project).to.equal(TEST_REPO.project); + }); + + it('return a 409 error if the repo already exists', async function () { + const res = await chai + .request(app) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_REPO); + res.should.have.status(409); + res.body.message.should.equal('Repository ' + TEST_REPO.url + ' already exists!'); + }); + + it('filter repos', async function () { + const res = await chai + .request(app) + .get('/api/v1/repo') + .set('Cookie', `${cookie}`) + .query({ url: TEST_REPO.url }); + res.should.have.status(200); + res.body[0].project.should.equal(TEST_REPO.project); + res.body[0].name.should.equal(TEST_REPO.name); + res.body[0].url.should.equal(TEST_REPO.url); + }); + + it('add 1st can push user', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/push`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u1', + }); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canPush.length.should.equal(1); + repo.users.canPush[0].should.equal('u1'); + }); + + it('add 2nd can push user', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/push`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u2', + }); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canPush.length.should.equal(2); + repo.users.canPush[1].should.equal('u2'); + }); + + it('add push user that does not exist', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/push`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u3', + }); + + res.should.have.status(400); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canPush.length.should.equal(2); + }); + + it('delete user u2 from push', async function () { + const res = await chai + .request(app) + .delete(`/api/v1/repo/${repoIds[0]}/user/push/u2`) + .set('Cookie', `${cookie}`) + .send({}); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canPush.length.should.equal(1); + }); + + it('add 1st can authorise user', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/authorise`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u1', + }); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canAuthorise.length.should.equal(1); + repo.users.canAuthorise[0].should.equal('u1'); + }); + + it('add 2nd can authorise user', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/authorise`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u2', + }); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canAuthorise.length.should.equal(2); + repo.users.canAuthorise[1].should.equal('u2'); + }); + + it('add authorise user that does not exist', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/authorise`) + .set('Cookie', `${cookie}`) + .send({ + username: 'u3', + }); + + res.should.have.status(400); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canAuthorise.length.should.equal(2); + }); + + it('Can delete u2 user', async function () { + const res = await chai + .request(app) + .delete(`/api/v1/repo/${repoIds[0]}/user/authorise/u2`) + .set('Cookie', `${cookie}`) + .send({}); + + res.should.have.status(200); + const repo = await db.getRepoById(repoIds[0]); + repo.users.canAuthorise.length.should.equal(1); + }); + + it('Valid user push permission on repo', async function () { + const res = await chai + .request(app) + .patch(`/api/v1/repo/${repoIds[0]}/user/authorise`) + .set('Cookie', `${cookie}`) + .send({ username: 'u2' }); + + res.should.have.status(200); + const isAllowed = await db.isUserPushAllowed(TEST_REPO.url, 'u2'); + expect(isAllowed).to.be.true; + }); + + it('Invalid user push permission on repo', async function () { + const isAllowed = await db.isUserPushAllowed(TEST_REPO.url, 'test1234'); + expect(isAllowed).to.be.false; + }); + + it('Proxy route helpers should return the proxied origin', async function () { + const origins = await getAllProxiedHosts(); + expect(origins).to.eql([TEST_REPO.host]); + }); + + it('Proxy route helpers should return the new proxied origins when new repos are added', async function () { + const res = await chai + .request(app) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_REPO_NON_GITHUB); + res.should.have.status(200); + + const repo = await db.getRepoByUrl(TEST_REPO_NON_GITHUB.url); + // save repo id for use in subsequent tests + repoIds[1] = repo._id; + + repo.project.should.equal(TEST_REPO_NON_GITHUB.project); + repo.name.should.equal(TEST_REPO_NON_GITHUB.name); + repo.url.should.equal(TEST_REPO_NON_GITHUB.url); + repo.users.canPush.length.should.equal(0); + repo.users.canAuthorise.length.should.equal(0); + + const origins = await getAllProxiedHosts(); + expect(origins).to.have.members([TEST_REPO.host, TEST_REPO_NON_GITHUB.host]); + + const res2 = await chai + .request(app) + .post('/api/v1/repo') + .set('Cookie', `${cookie}`) + .send(TEST_REPO_NAKED); + res2.should.have.status(200); + const repo2 = await db.getRepoByUrl(TEST_REPO_NAKED.url); + repoIds[2] = repo2._id; + + const origins2 = await getAllProxiedHosts(); + expect(origins2).to.have.members([ + TEST_REPO.host, + TEST_REPO_NON_GITHUB.host, + TEST_REPO_NAKED.host, + ]); + }); + + it('delete a repo', async function () { + const res = await chai + .request(app) + .delete('/api/v1/repo/' + repoIds[1] + '/delete') + .set('Cookie', `${cookie}`) + .send(); + res.should.have.status(200); + + const repo = await db.getRepoByUrl(TEST_REPO_NON_GITHUB.url); + expect(repo).to.be.null; + + const res2 = await chai + .request(app) + .delete('/api/v1/repo/' + repoIds[2] + '/delete') + .set('Cookie', `${cookie}`) + .send(); + res2.should.have.status(200); + + const repo2 = await db.getRepoByUrl(TEST_REPO_NAKED.url); + expect(repo2).to.be.null; + }); + + after(async function () { + await service.httpServer.close(); + + // don't clean up data as cypress tests rely on it being present + // await cleanupRepo(TEST_REPO.url); + // await db.deleteUser('u1'); + // await db.deleteUser('u2'); + + await cleanupRepo(TEST_REPO_NON_GITHUB.url); + await cleanupRepo(TEST_REPO_NAKED.url); + }); +}); diff --git a/test/testRouteFilter.test.js b/test/testRouteFilter.test.js new file mode 100644 index 000000000..d2bcb1ef4 --- /dev/null +++ b/test/testRouteFilter.test.js @@ -0,0 +1,189 @@ +import * as chai from 'chai'; +import { + validGitRequest, + processUrlPath, + processGitUrl, + processGitURLForNameAndOrg, +} from '../src/proxy/routes/helper'; + +chai.should(); + +const expect = chai.expect; + +const VERY_LONG_PATH = + '/a/very/very/very/very/very//very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/very/long/path'; + +describe('url helpers and filter functions used in the proxy', function () { + it('processUrlPath should return breakdown of a proxied path, separating the path to repository from the git operation path', function () { + expect( + processUrlPath('/github.com/octocat/hello-world.git/info/refs?service=git-upload-pack'), + ).to.deep.eq({ + repoPath: '/github.com/octocat/hello-world.git', + gitPath: '/info/refs?service=git-upload-pack', + }); + + expect( + processUrlPath('/gitlab.com/org/sub-org/hello-world.git/info/refs?service=git-upload-pack'), + ).to.deep.eq({ + repoPath: '/gitlab.com/org/sub-org/hello-world.git', + gitPath: '/info/refs?service=git-upload-pack', + }); + + expect( + processUrlPath('/123.456.789/hello-world.git/info/refs?service=git-upload-pack'), + ).to.deep.eq({ + repoPath: '/123.456.789/hello-world.git', + gitPath: '/info/refs?service=git-upload-pack', + }); + }); + + it('processUrlPath should return breakdown of a legacy proxy path, separating the path to repository from the git operation path', function () { + expect(processUrlPath('/octocat/hello-world.git/info/refs?service=git-upload-pack')).to.deep.eq( + { repoPath: '/octocat/hello-world.git', gitPath: '/info/refs?service=git-upload-pack' }, + ); + }); + + it('processUrlPath should return breakdown of a legacy proxy path, separating the path to repository when git path is just /', function () { + expect(processUrlPath('/octocat/hello-world.git/')).to.deep.eq({ + repoPath: '/octocat/hello-world.git', + gitPath: '/', + }); + }); + + it('processUrlPath should return breakdown of a legacy proxy path, separating the path to repository when no path is present', function () { + expect(processUrlPath('/octocat/hello-world.git')).to.deep.eq({ + repoPath: '/octocat/hello-world.git', + gitPath: '/', + }); + }); + + it("processUrlPath should return null if the url couldn't be parsed", function () { + expect(processUrlPath('/octocat/hello-world')).to.be.null; + expect(processUrlPath(VERY_LONG_PATH)).to.be.null; + }); + + it('processGitUrl should return breakdown of a git URL separating out the protocol, host and repository path', function () { + expect(processGitUrl('https://somegithost.com/octocat/hello-world.git')).to.deep.eq({ + protocol: 'https://', + host: 'somegithost.com', + repoPath: '/octocat/hello-world.git', + }); + + expect(processGitUrl('https://123.456.789:1234/hello-world.git')).to.deep.eq({ + protocol: 'https://', + host: '123.456.789:1234', + repoPath: '/hello-world.git', + }); + }); + + it('processGitUrl should return breakdown of a git URL separating out the protocol, host and repository path and discard any git operation path', function () { + expect( + processGitUrl( + 'https://somegithost.com:1234/octocat/hello-world.git/info/refs?service=git-upload-pack', + ), + ).to.deep.eq({ + protocol: 'https://', + host: 'somegithost.com:1234', + repoPath: '/octocat/hello-world.git', + }); + + expect( + processGitUrl('https://123.456.789/hello-world.git/info/refs?service=git-upload-pack'), + ).to.deep.eq({ + protocol: 'https://', + host: '123.456.789', + repoPath: '/hello-world.git', + }); + }); + + it('processGitUrl should return null for a url it cannot parse', function () { + expect(processGitUrl('somegithost.com:1234/octocat/hello-world.git')).to.be.null; + expect(processUrlPath('somegithost.com:1234' + VERY_LONG_PATH + '.git')).to.be.null; + }); + + it('processGitURLForNameAndOrg should return breakdown of a git URL path separating out the protocol, origin and repository path', function () { + expect(processGitURLForNameAndOrg('github.com/octocat/hello-world.git')).to.deep.eq({ + project: 'octocat', + repoName: 'hello-world.git', + }); + }); + + it('processGitURLForNameAndOrg should return breakdown of a git repository URL separating out the project (organisation) and repository name', function () { + expect(processGitURLForNameAndOrg('https://github.com:80/octocat/hello-world.git')).to.deep.eq({ + project: 'octocat', + repoName: 'hello-world.git', + }); + }); + + it("processGitURLForNameAndOrg should return null for a git repository URL it can't parse", function () { + expect(processGitURLForNameAndOrg('someGitHost.com/repo')).to.be.null; + expect(processGitURLForNameAndOrg('https://someGitHost.com/repo')).to.be.null; + expect(processGitURLForNameAndOrg('https://somegithost.com:1234' + VERY_LONG_PATH + '.git')).to + .be.null; + }); + + it('validGitRequest should return true for safe requests on expected URLs', function () { + [ + '/info/refs?service=git-upload-pack', + '/info/refs?service=git-receive-pack', + '/git-upload-pack', + '/git-receive-pack', + ].forEach((url) => { + expect( + validGitRequest(url, { + 'user-agent': 'git/2.30.0', + accept: 'application/x-git-upload-pack-request', + }), + ).true; + }); + }); + + it('validGitRequest should return false for unsafe URLs', function () { + ['/', '/foo'].forEach((url) => { + expect( + validGitRequest(url, { + 'user-agent': 'git/2.30.0', + accept: 'application/x-git-upload-pack-request', + }), + ).false; + }); + }); + + it('validGitRequest should return false for a browser request', function () { + expect( + validGitRequest('/', { + 'user-agent': 'Mozilla/5.0', + accept: '*/*', + }), + ).false; + }); + + it('validGitRequest should return false for unexpected combinations of headers & URLs', function () { + // expected Accept=application/x-git-upload-pack + expect( + validGitRequest('/git-upload-pack', { + 'user-agent': 'git/2.30.0', + accept: '*/*', + }), + ).false; + + // expected User-Agent=git/* + expect( + validGitRequest('/info/refs?service=git-upload-pack', { + 'user-agent': 'Mozilla/5.0', + accept: '*/*', + }), + ).false; + }); + + it('validGitRequest should return false for unexpected content-type on certain URLs', function () { + ['application/json', 'text/html', '*/*'].map((accept) => { + expect( + validGitRequest('/git-upload-pack', { + 'user-agent': 'git/2.30.0', + accept: accept, + }), + ).false; + }); + }); +});