Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci-performance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ jobs:
env:
NODE_ENV: production
run: |
echo "Running baseline benchmarks with CPU affinity (using PR's benchmark script)..."
echo "Running baseline benchmarks..."
if [ ! -f "benchmark/performance.js" ]; then
echo "⚠️ Benchmark script not found - this is expected for new features"
echo "Skipping baseline benchmark"
Expand Down Expand Up @@ -135,7 +135,7 @@ jobs:
env:
NODE_ENV: production
run: |
echo "Running PR benchmarks with CPU affinity..."
echo "Running PR benchmarks..."
taskset -c 0 npm run benchmark > pr-output.txt 2>&1 || npm run benchmark > pr-output.txt 2>&1 || true
echo "Benchmark command completed with exit code: $?"
echo "Output file size: $(wc -c < pr-output.txt) bytes"
Expand Down
70 changes: 70 additions & 0 deletions benchmark/db-proxy.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/**
* Simple TCP proxy to add artificial latency to MongoDB connections
* This helps make benchmark measurements more stable by simulating network conditions
*/

/* eslint-disable no-console */

const net = require('net');

const PROXY_PORT = parseInt(process.env.PROXY_PORT || '27018', 10);
const TARGET_HOST = process.env.TARGET_HOST || 'localhost';
const TARGET_PORT = parseInt(process.env.TARGET_PORT || '27017', 10);
const LATENCY_MS = parseInt(process.env.LATENCY_MS || '10', 10);

const server = net.createServer((clientSocket) => {
const serverSocket = net.createConnection({
host: TARGET_HOST,
port: TARGET_PORT,
});

// Add latency to data flowing from client to MongoDB
clientSocket.on('data', (data) => {
setTimeout(() => {
if (!serverSocket.destroyed) {
serverSocket.write(data);
}
}, LATENCY_MS);
});

// Add latency to data flowing from MongoDB to client
serverSocket.on('data', (data) => {
setTimeout(() => {
if (!clientSocket.destroyed) {
clientSocket.write(data);
}
}, LATENCY_MS);
});

clientSocket.on('error', () => {
serverSocket.destroy();
});

serverSocket.on('error', () => {
clientSocket.destroy();
});

clientSocket.on('close', () => {
serverSocket.destroy();
});

serverSocket.on('close', () => {
clientSocket.destroy();
});
});

server.listen(PROXY_PORT, () => {
console.log(`MongoDB proxy listening on port ${PROXY_PORT} forwarding to ${TARGET_PORT} with ${LATENCY_MS}ms latency`);
});

process.on('SIGTERM', () => {
server.close(() => {
process.exit(0);
});
});

process.on('SIGINT', () => {
server.close(() => {
process.exit(0);
});
});
193 changes: 165 additions & 28 deletions benchmark/performance.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,35 @@ const ITERATIONS = parseInt(process.env.BENCHMARK_ITERATIONS || '10000', 10);
// Parse Server instance
let parseServer;
let mongoClient;
let proxyProcess;
let proxyServerCleanup;

/**
* Start MongoDB proxy with artificial latency
*/
async function startProxy() {
const { spawn } = require('child_process');

proxyProcess = spawn('node', ['benchmark/db-proxy.js'], {
env: { ...process.env, PROXY_PORT: '27018', TARGET_PORT: '27017', LATENCY_MS: '10000' },
stdio: 'inherit',
});

// Wait for proxy to start
await new Promise(resolve => setTimeout(resolve, 2000));
console.log('MongoDB proxy started on port 27018 with 10ms latency');
}
Comment on lines +37 to +44
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Restore intended 10 ms latency constant

startProxy exports LATENCY_MS: '10000', i.e. 10 seconds per packet. With 10 k iterations the include benchmark would take many hours and CI will time out. The surrounding comments and log lines say “10 ms”, so this is a regression. Please drop the extra zeros (or plumb through the intended value).

-    env: { ...process.env, PROXY_PORT: '27018', TARGET_PORT: '27017', LATENCY_MS: '10000' },
+    env: { ...process.env, PROXY_PORT: '27018', TARGET_PORT: '27017', LATENCY_MS: '10' },
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
env: { ...process.env, PROXY_PORT: '27018', TARGET_PORT: '27017', LATENCY_MS: '10000' },
stdio: 'inherit',
});
// Wait for proxy to start
await new Promise(resolve => setTimeout(resolve, 2000));
console.log('MongoDB proxy started on port 27018 with 10ms latency');
}
env: { ...process.env, PROXY_PORT: '27018', TARGET_PORT: '27017', LATENCY_MS: '10' },
stdio: 'inherit',
});
// Wait for proxy to start
await new Promise(resolve => setTimeout(resolve, 2000));
console.log('MongoDB proxy started on port 27018 with 10ms latency');
}
🤖 Prompt for AI Agents
In benchmark/performance.js around lines 37 to 44, the proxy is being started
with env LATENCY_MS set to '10000' (10 seconds) which is a regression; change
the LATENCY_MS value to '10' (10 milliseconds) so the benchmark runs as intended
and update any related string/constant if necessary to reflect 10ms.


/**
* Stop MongoDB proxy
*/
async function stopProxy() {
if (proxyProcess) {
proxyProcess.kill();
await new Promise(resolve => setTimeout(resolve, 500));
console.log('MongoDB proxy stopped');
}
}

/**
* Initialize Parse Server for benchmarking
Expand Down Expand Up @@ -86,6 +115,66 @@ async function cleanupDatabase() {
}
}

/**
* Reset Parse SDK to use the default server
*/
function resetParseServer() {
Parse.serverURL = SERVER_URL;
}

/**
* Start a Parse Server instance using the DB proxy for latency simulation
* Stores cleanup function globally for later use
*/
async function useProxyServer() {
const express = require('express');
const { default: ParseServer } = require('../lib/index.js');

// Create a new Parse Server instance using the proxy
const app = express();
const proxyParseServer = new ParseServer({
databaseURI: 'mongodb://localhost:27018/parse_benchmark_test',
appId: APP_ID,
masterKey: MASTER_KEY,
serverURL: 'http://localhost:1338/parse',
silent: true,
allowClientClassCreation: true,
logLevel: 'error',
verbose: false,
});

app.use('/parse', proxyParseServer.app);

const server = await new Promise((resolve, reject) => {
const s = app.listen(1338, (err) => {
if (err) {
reject(err);
} else {
resolve(s);
}
});
});

// Configure Parse SDK to use the proxy server
Parse.serverURL = 'http://localhost:1338/parse';

// Store cleanup function globally
proxyServerCleanup = async () => {
server.close();
await new Promise(resolve => setTimeout(resolve, 500));
proxyServerCleanup = null;
};
}

/**
* Clean up proxy server if it's running
*/
async function cleanupProxyServer() {
if (proxyServerCleanup) {
await proxyServerCleanup();
}
}

/**
* Measure average time for an async operation over multiple iterations
* Uses warmup iterations, median metric, and outlier filtering for robustness
Expand Down Expand Up @@ -293,6 +382,58 @@ async function benchmarkUserLogin() {
});
}

/**
* Benchmark: Query with Include (Parallel Include Pointers)
* This test uses the TCP proxy (port 27018) to simulate 10ms database latency for more realistic measurements
*/
async function benchmarkQueryWithInclude() {
// Start proxy server
await useProxyServer();

// Setup: Create nested object hierarchy
const Level2Class = Parse.Object.extend('Level2');
const Level1Class = Parse.Object.extend('Level1');
const RootClass = Parse.Object.extend('Root');

// Create 10 Level2 objects
const level2Objects = [];
for (let i = 0; i < 10; i++) {
const obj = new Level2Class();
obj.set('name', `level2-${i}`);
obj.set('value', i);
level2Objects.push(obj);
}
await Parse.Object.saveAll(level2Objects);

// Create 10 Level1 objects, each pointing to a Level2 object
const level1Objects = [];
for (let i = 0; i < 10; i++) {
const obj = new Level1Class();
obj.set('name', `level1-${i}`);
obj.set('level2', level2Objects[i % level2Objects.length]);
level1Objects.push(obj);
}
await Parse.Object.saveAll(level1Objects);

// Create 10 Root objects, each pointing to a Level1 object
const rootObjects = [];
for (let i = 0; i < 10; i++) {
const obj = new RootClass();
obj.set('name', `root-${i}`);
obj.set('level1', level1Objects[i % level1Objects.length]);
rootObjects.push(obj);
}
await Parse.Object.saveAll(rootObjects);

const result = await measureOperation('Query with Include (2 levels)', async () => {
const query = new Parse.Query('Root');
query.include('level1.level2');
await query.find();
});

return result;
}

/**
* Run all benchmarks
*/
Expand All @@ -303,6 +444,9 @@ async function runBenchmarks() {
let server;

try {
// Start MongoDB proxy
await startProxy();

// Initialize Parse Server
console.log('Initializing Parse Server...');
server = await initializeParseServer();
Expand All @@ -312,34 +456,26 @@ async function runBenchmarks() {

const results = [];

// Run each benchmark with database cleanup
console.log('Running Object Create benchmark...');
await cleanupDatabase();
results.push(await benchmarkObjectCreate());

console.log('Running Object Read benchmark...');
await cleanupDatabase();
results.push(await benchmarkObjectRead());

console.log('Running Object Update benchmark...');
await cleanupDatabase();
results.push(await benchmarkObjectUpdate());

console.log('Running Simple Query benchmark...');
await cleanupDatabase();
results.push(await benchmarkSimpleQuery());
// Define all benchmarks to run
const benchmarks = [
{ name: 'Object Create', fn: benchmarkObjectCreate },
{ name: 'Object Read', fn: benchmarkObjectRead },
{ name: 'Object Update', fn: benchmarkObjectUpdate },
{ name: 'Simple Query', fn: benchmarkSimpleQuery },
{ name: 'Batch Save', fn: benchmarkBatchSave },
{ name: 'User Signup', fn: benchmarkUserSignup },
{ name: 'User Login', fn: benchmarkUserLogin },
{ name: 'Query with Include', fn: benchmarkQueryWithInclude },
];

console.log('Running Batch Save benchmark...');
await cleanupDatabase();
results.push(await benchmarkBatchSave());

console.log('Running User Signup benchmark...');
await cleanupDatabase();
results.push(await benchmarkUserSignup());

console.log('Running User Login benchmark...');
await cleanupDatabase();
results.push(await benchmarkUserLogin());
// Run each benchmark with database cleanup
for (const benchmark of benchmarks) {
console.log(`Running ${benchmark.name} benchmark...`);
resetParseServer();
await cleanupDatabase();
results.push(await benchmark.fn());
await cleanupProxyServer();
}

// Output results in github-action-benchmark format (stdout)
console.log(JSON.stringify(results, null, 2));
Expand All @@ -362,8 +498,9 @@ async function runBenchmarks() {
if (server) {
server.close();
}
await stopProxy();
// Give some time for cleanup
setTimeout(() => process.exit(0), 10000);
setTimeout(() => process.exit(0), 1000);
}
}

Expand Down
Loading
Loading