Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/regression.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ jobs:
architecture: "arm64"
- platform: aws
language: nodejs
version: "16"
version: "24"
architecture: "x64"
- platform: aws
language: nodejs
version: "16"
version: "24"
architecture: "arm64"
- platform: aws
language: cpp
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ const timer = ms => new Promise( res => setTimeout(res, ms));
exports.handler = async function(event) {
var sleep = event.sleep;
timer(sleep*1000);
return sleep;
return {result: sleep};
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Return the benchmark value directly to avoid result.result.

Line 7 makes the wrapper response contain result: { result: sleep }. The regression path extracts the wrapper-level result, so validation receives an object instead of the numeric sleep value.

🐛 Proposed fix
-  return {result: sleep};
+  return sleep;
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
return {result: sleep};
return sleep;
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/000.microbenchmarks/010.sleep/nodejs/function.js` at line 7, The
wrapper currently returns an object ({ result: sleep }) which makes callers read
result.result; change the function to return the numeric benchmark value
directly by returning the sleep variable instead of an object so the regression
path/validation receives the numeric sleep value (replace the return in the
function that currently returns {result: sleep} to return sleep).

};
2 changes: 1 addition & 1 deletion benchmarks/100.webapps/110.dynamic-html/nodejs/function.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ exports.handler = async function(event) {
fs.readFile(file, "utf-8",
function(err, data) {
if(err) reject(err);
resolve(Mustache.render(data, input));
resolve({result: Mustache.render(data, input)});
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Avoid nesting result inside the wrapper-level result.

Line 26 returns { result: html }, but the Node.js wrappers already place the handler return value under result. This makes validators receive { result: html } instead of the rendered HTML string.

🐛 Proposed fix
-        resolve({result: Mustache.render(data, input)});
+        resolve(Mustache.render(data, input));
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/100.webapps/110.dynamic-html/nodejs/function.js` at line 26, The
Promise is resolving an object with a nested result ({ result:
Mustache.render(...) }) causing the runtime wrapper to double-wrap the response;
change the resolver in this handler so it resolves the rendered HTML directly
(use Mustache.render(data, input) as the resolved value instead of wrapping it
in { result: ... }) — locate the call to resolve(...) that contains
Mustache.render and return the rendered string directly.

}
);
});
Expand Down
78 changes: 78 additions & 0 deletions benchmarks/100.webapps/130.crud-api/nodejs/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
const nosql = require('./nosql');

const nosqlClient = nosql.nosql.get_instance();
const nosqlTableName = "shopping_cart";
Comment thread
mcopik marked this conversation as resolved.

async function addProduct(cartId, productId, productName, price, quantity) {
await nosqlClient.insert(
nosqlTableName,
["cart_id", cartId],
["product_id", productId],
{ price: price, quantity: quantity, name: productName }
);
}

async function getProducts(cartId, productId) {
return await nosqlClient.get(
nosqlTableName,
["cart_id", cartId],
["product_id", productId]
);
}

async function queryProducts(cartId) {
const res = await nosqlClient.query(
nosqlTableName,
["cart_id", cartId],
"product_id"
);

const products = [];
let priceSum = 0;
let quantitySum = 0;

for (const product of res) {
products.push(product.name);
priceSum += product.price * product.quantity;
quantitySum += product.quantity;
}

const avgPrice = quantitySum > 0 ? priceSum / quantitySum : 0.0;

return {
products: products,
total_cost: priceSum,
avg_price: avgPrice
};
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}

exports.handler = async function(event) {
const results = [];

for (const request of event.requests) {
const route = request.route;
const body = request.body;
let res;

if (route === "PUT /cart") {
await addProduct(
body.cart,
body.product_id,
body.name,
body.price,
body.quantity
);
res = {};
} else if (route === "GET /cart/{id}") {
res = await getProducts(body.cart, request.path.id);
} else if (route === "GET /cart") {
res = await queryProducts(body.cart);
} else {
throw new Error(`Unknown request route: ${route}`);
}

results.push(res);
}

return results;
};
9 changes: 9 additions & 0 deletions benchmarks/100.webapps/130.crud-api/nodejs/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "crud-api",
"version": "1.0.0",
"description": "CRUD API benchmark",
"author": "",
"license": "",
"dependencies": {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ exports.handler = async function(event) {
}
);
await promise;
return {bucket: output_prefix, key: uploadName}
return {result: {bucket: output_prefix, key: uploadName}}
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Keep the thumbnail result flat for the provider wrappers.

Line 28 causes wrapper output to become result: { result: { bucket, key } }. Return the object directly unless every validator was updated to unwrap the extra layer.

🐛 Proposed fix
-  return {result: {bucket: output_prefix, key: uploadName}}
+  return {bucket: output_prefix, key: uploadName};
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
return {result: {bucket: output_prefix, key: uploadName}}
return {bucket: output_prefix, key: uploadName};
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/200.multimedia/210.thumbnailer/nodejs/function.js` at line 28, The
function currently returns an extra nested result object (return {result:
{bucket: output_prefix, key: uploadName}}), which makes provider wrappers see
result.result; change the return to return the thumbnail info directly by
returning an object with bucket and key (use output_prefix and uploadName) so
callers receive {bucket, key} instead of {result: {bucket, key}}; update any
local references expecting the nested layer if present.

};
10 changes: 10 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.22
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
"sharp": "^0.33"
}
}
10 changes: 10 additions & 0 deletions benchmarks/200.multimedia/210.thumbnailer/nodejs/package.json.24
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"name": "",
"version": "1.0.0",
"description": "",
"author": "",
"license": "",
"dependencies": {
"sharp": "^0.33"
}
}
124 changes: 124 additions & 0 deletions benchmarks/300.utilities/311.compression/nodejs/function.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
const fs = require('fs');
const path = require('path');
const archiver = require('archiver');
const { v4: uuidv4 } = require('uuid');
const storage = require('./storage');

let storage_handler = new storage.storage();

/**
* Calculate total size of a directory recursively
* @param {string} directory - Path to directory
* @returns {number} Total size in bytes
*/
function parseDirectory(directory) {
let size = 0;

function walkDir(dir) {
const files = fs.readdirSync(dir);
for (const file of files) {
const filepath = path.join(dir, file);
const stat = fs.statSync(filepath);
if (stat.isDirectory()) {
walkDir(filepath);
} else {
size += stat.size;
}
}
}

walkDir(directory);
return size;
}

/**
* Create a zip archive from a directory using archiver
* @param {string} sourceDir - Directory to compress
* @param {string} outputPath - Path for the output archive file
* @returns {Promise<void>}
*/
async function createZipArchive(sourceDir, outputPath) {
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(outputPath);
const archive = archiver('zip', {
zlib: { level: 9 } // Maximum compression
});

output.on('close', () => {
resolve();
});

archive.on('error', (err) => {
reject(err);
});

archive.pipe(output);

// Add all files from the directory, excluding the archive itself
archive.glob('**/*', {
cwd: sourceDir,
ignore: [path.basename(outputPath)]
});

archive.finalize();
Comment on lines +41 to +63
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Reject on output stream errors.

Line 42 creates a write stream, but only the archive emits are handled. If the output stream fails, the invocation can crash or hang instead of rejecting the archive promise.

🐛 Proposed fix
     output.on('close', () => {
       resolve();
     });
 
+    output.on('error', (err) => {
+      reject(err);
+    });
+
     archive.on('error', (err) => {
       reject(err);
     });
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/300.utilities/311.compression/nodejs/function.js` around lines 41
- 63, The Promise currently listens for archive errors but not errors from the
write stream created by fs.createWriteStream(outputPath) (variable output),
which can cause hangs/crashes; add an output.on('error', (err) => reject(err))
handler alongside the existing archive.on('error', ...) and ensure you don't
double-resolve by keeping the existing output.on('close', () => resolve())
behavior; update the block where output, archive, archive.pipe(output), and
archive.finalize() are used to attach the output error listener for the write
stream.

});
}

exports.handler = async function(event) {
const bucket = event.bucket.bucket;
const input_prefix = event.bucket.input;
const output_prefix = event.bucket.output;
const key = event.object.key;

// Create unique download path
const download_path = path.join('/tmp', `${key}-${uuidv4()}`);
fs.mkdirSync(download_path, { recursive: true });

// Download directory from storage
const s3_download_begin = Date.now();
await storage_handler.downloadDirectory(bucket, path.join(input_prefix, key), download_path);
const s3_download_stop = Date.now();
Comment on lines +77 to +80
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Verify whether download_directory is defined in any Node.js storage wrapper
fd -e js storage.js benchmarks/wrappers | xargs -I{} sh -c 'echo "=== {} ==="; grep -nE "download_directory|downloadDirectory" "{}" || echo "NOT FOUND"'

Repository: spcl/serverless-benchmarks

Length of output: 379


🏁 Script executed:

# Check what methods are actually defined in one storage wrapper (AWS as representative)
echo "=== AWS storage.js exports ===" && grep -nE "^(exports\.|module\.exports|async function|function )" benchmarks/wrappers/aws/nodejs/storage.js | head -20

echo -e "\n=== Check function.js around lines 100-103 ===" && sed -n '95,115p' benchmarks/300.utilities/311.compression/nodejs/function.js

echo -e "\n=== Search for download_directory usage in entire codebase ===" && rg "download_directory|downloadDirectory" --type js

Repository: spcl/serverless-benchmarks

Length of output: 1139


🏁 Script executed:

# Get the full AWS storage.js to see what methods are defined
echo "=== AWS storage.js full content ===" && cat benchmarks/wrappers/aws/nodejs/storage.js

echo -e "\n=== Verify exact line numbers in function.js ===" && cat -n benchmarks/300.utilities/311.compression/nodejs/function.js | sed -n '95,120p'

Repository: spcl/serverless-benchmarks

Length of output: 3243


storage_handler.download_directory is not defined on any storage wrapper.

benchmarks/wrappers/aws/nodejs/storage.js (and all other providers: Azure, GCP, local, OpenWhisk) only expose upload, download, uploadStream, downloadStream, and unique_name. The call at line 102 will throw TypeError: storage_handler.download_directory is not a function at runtime.

Either add download_directory to each provider's storage wrapper or list all S3 objects under path.join(input_prefix, key) and download them individually via storage_handler.download.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/300.utilities/311.compression/nodejs/function.js` around lines 100
- 103, The call to storage_handler.download_directory (in function.js) is
invalid because storage wrappers only implement upload, download, uploadStream,
downloadStream, and unique_name; replace the download_directory usage by listing
all objects under path.join(input_prefix, key) (using the provider-specific
listing utility or S3 list API) and loop over each object calling
storage_handler.download for each target path, or alternatively add a
download_directory method to every storage wrapper (wrappers like
benchmarks/wrappers/aws/nodejs/storage.js, azure, gcp, local, openwhisk)
implementing recursive listing+download so storage_handler.download_directory
exists; update references to storage_handler.download_directory or add the
method consistently across all wrappers so runtime TypeError is avoided.


// Calculate size of downloaded files
const size = parseDirectory(download_path);

// Compress directory
const compress_begin = Date.now();
const archive_name = `${key}.zip`;
const archive_path = path.join(download_path, archive_name);
await createZipArchive(download_path, archive_path);
Comment on lines +73 to +89
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Do not use the storage key as a local filesystem path.

Lines 74 and 87 use key directly for local paths. A key like foo/bar makes archive_path point to a nested directory that may not exist, causing archive creation to fail.

🐛 Proposed fix
   // Create unique download path
-  const download_path = path.join('/tmp', `${key}-${uuidv4()}`);
+  const run_id = uuidv4();
+  const download_path = path.join('/tmp', `compression-${run_id}`);
   fs.mkdirSync(download_path, { recursive: true });
 
@@
   // Compress directory
   const compress_begin = Date.now();
   const archive_name = `${key}.zip`;
-  const archive_path = path.join(download_path, archive_name);
+  const local_archive_name = `${path.basename(key) || 'archive'}.${run_id}.zip`;
+  const archive_path = path.join(download_path, local_archive_name);
   await createZipArchive(download_path, archive_path);
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/300.utilities/311.compression/nodejs/function.js` around lines 73
- 89, The code uses the raw storage key when building local paths
(archive_name/archive_path), which breaks for keys containing slashes; change
usages so local filenames are derived from a safe/sanitized form of key (e.g.,
use path.basename(key) or replace path separators with an underscore) and ensure
archive_path is always created under download_path before calling
createZipArchive; update the archive_name and archive_path construction
(references: archive_name, archive_path, key, download_path) and keep
storage_handler.downloadDirectory and parseDirectory behavior the same so
downloads still land in download_path.

const compress_end = Date.now();

// Get archive size
const archive_size = fs.statSync(archive_path).size;

// Upload compressed archive
const s3_upload_begin = Date.now();
const [key_name, uploadPromise] = storage_handler.upload(
bucket,
path.join(output_prefix, archive_name),
archive_path
);
await uploadPromise;
const s3_upload_stop = Date.now();

// Calculate times in microseconds
const download_time = (s3_download_stop - s3_download_begin) * 1000;
const upload_time = (s3_upload_stop - s3_upload_begin) * 1000;
const process_time = (compress_end - compress_begin) * 1000;

return {
result: {
bucket: bucket,
key: key_name
},
measurement: {
download_time: download_time,
download_size: size,
upload_time: upload_time,
upload_size: archive_size,
compute_time: process_time
}
};
};
Comment thread
mcopik marked this conversation as resolved.

9 changes: 9 additions & 0 deletions benchmarks/300.utilities/311.compression/nodejs/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "compression-benchmark",
"version": "1.0.0",
"description": "Compression benchmark for serverless platforms",
"dependencies": {
"uuid": "^10.0.0",
"archiver": "^7.0.0"
}
}
2 changes: 1 addition & 1 deletion benchmarks/wrappers/aws/nodejs/handler.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ exports.handler = async function(event, context) {
end: end,
compute_time: micro,
results_time: 0,
result: {result: result},
result: result,
is_cold: is_cold,
request_id: context.awsRequestId
}, http_trigger)
Expand Down
104 changes: 104 additions & 0 deletions benchmarks/wrappers/aws/nodejs/nosql.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
// Copyright 2020-2025 ETH Zurich and the SeBS authors. All rights reserved.
//
// This is pretty much a Node.js rewrite of our Python wrapper.



const { DynamoDBDocument } = require("@aws-sdk/lib-dynamodb");
const { DynamoDB } = require("@aws-sdk/client-dynamodb");

class nosql {
constructor() {
this.client = DynamoDBDocument.from(new DynamoDB());
this._tables = {};
}
Comment on lines +7 to +14
Copy link
Copy Markdown

@coderabbitai coderabbitai Bot Apr 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Verify whether the AWS SDK v3 packages are declared as deps anywhere in the benchmark tree
rg -nP '"@aws-sdk/(lib-dynamodb|client-dynamodb)"' --type=json
# Confirm no package.json near this wrapper adds them
fd -t f 'package.json' benchmarks/wrappers/aws/nodejs benchmarks/100.webapps

Repository: spcl/serverless-benchmarks

Length of output: 241


🌐 Web query:

Are @aws-sdk/client-dynamodband@aws-sdk/lib-dynamodb preinstalled in the AWS Lambda Node.js 20/22/24 managed runtimes in 2026?

💡 Result:

No, @aws-sdk/client-dynamodb and @aws-sdk/lib-dynamodb are not preinstalled in the AWS Lambda Node.js 20/22/24 managed runtimes. The runtimes include select @aws-sdk/* packages in /var/runtime/node_modules (e.g., core packages and some client-* like client-s3, client-lambda), but AWS recommends bundling specific clients like client-dynamodb in your deployment package or layers for control and compatibility. Official docs confirm AWS SDK v3 is included but advise including needed modules yourself, as the runtime provides only a convenience subset, not all clients. StackOverflow inspection of Node.js 20 image shows many client-* but not explicitly client-dynamodb or lib-dynamodb; lib-dynamodb is a separate lib package typically bundled with client-dynamodb.

Citations:


These AWS SDK v3 packages are not preinstalled in Lambda Node.js 20/22/24 and must be explicitly bundled as dependencies.

The code imports @aws-sdk/lib-dynamodb and @aws-sdk/client-dynamodb, which are not included in the Lambda Node.js managed runtime (even as part of the convenience subset). These packages are not declared in any package.json in the benchmark tree, meaning the function will fail at runtime with a MODULE_NOT_FOUND error when attempting to require them. You must either:

  • Add these as explicit dependencies in a package.json bundled with the deployment, or
  • Ensure they're included in a Lambda layer

Additionally, new DynamoDB() with no region argument will rely on the AWS_REGION environment variable, which may not be available outside the Lambda execution environment.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/wrappers/aws/nodejs/nosql.js` around lines 7 - 14, The nosql
constructor imports and instantiates AWS SDK v3 modules (`@aws-sdk/lib-dynamodb`
and `@aws-sdk/client-dynamodb`) which are not preinstalled in Lambda Node.js
runtimes and will cause MODULE_NOT_FOUND at runtime; add these packages to the
deployment (declare them in package.json and bundle with the function or include
them in a Lambda layer) and update the DynamoDB instantiation in the nosql
constructor (the DynamoDBDocument.from(new DynamoDB()) call) to supply a region
fallback (use process.env.AWS_REGION || a sensible default) so the client does
not rely solely on Lambda-provided environment variables.

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, it works.

Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mcopik, you're right, and I apologize for the incorrect review comment. I've now confirmed that runtime versions nodejs18.x and later include version 3 of the SDK, which means @aws-sdk/client-dynamodb and @aws-sdk/lib-dynamodb are indeed available on disk in Lambda Node.js 20/22/24 managed runtimes without needing to be declared as explicit dependencies. The Node.js 22 runtime, for example, has version 3.806 of the AWS JavaScript SDK on disk. I'll note this for future reviews.


✏️ Learnings added
Learnt from: mcopik
Repo: spcl/serverless-benchmarks PR: 281
File: benchmarks/wrappers/aws/nodejs/nosql.js:7-14
Timestamp: 2026-04-23T13:21:16.277Z
Learning: In `spcl/serverless-benchmarks`, Lambda Node.js 18+ managed runtimes (nodejs18.x, nodejs20.x, nodejs22.x, nodejs24.x) include AWS SDK v3 pre-installed on disk (including `aws-sdk/client-dynamodb` and `aws-sdk/lib-dynamodb`). Do NOT flag these packages as missing dependencies in `benchmarks/wrappers/aws/nodejs/` code — they are available at runtime without needing to be declared in a `package.json`.

Note: Learnings are effective only in the context of similar code segments. To apply general coding guidelines, please configure review instructions. You can manage existing learnings in the app.


🧠 Learnings used
Learnt from: mcopik
Repo: spcl/serverless-benchmarks PR: 281
File: benchmarks/100.webapps/130.crud-api/nodejs/function.js:1-4
Timestamp: 2026-04-23T13:20:33.872Z
Learning: In the `spcl/serverless-benchmarks` project, the `nosql` singleton is pre-initialized with required platform-specific parameters (database, url, credential) by the platform wrapper `handler.js` files (e.g., `benchmarks/wrappers/azure/nodejs/handler.js`, `benchmarks/wrappers/gcp/nodejs/handler.js`) at module load time before the benchmark function module is loaded. Therefore, a no-argument call to `nosql.nosql.get_instance()` inside a benchmark function (e.g., `benchmarks/100.webapps/130.crud-api/nodejs/function.js`) is intentional and safe — it returns the already-initialized singleton rather than creating a new one.

Learnt from: mcopik
Repo: spcl/serverless-benchmarks PR: 284
File: sebs/aws/config.py:349-351
Timestamp: 2026-04-15T15:34:29.897Z
Learning: In the `spcl/serverless-benchmarks` repository, `AWSResources._use_function_url` intentionally defaults to `True` in both the constructor and `initialize()` in `sebs/aws/config.py`. The maintainer (mcopik) wants Lambda Function URLs to be the new default trigger mechanism, replacing API Gateway. Do not flag this as a backward-compatibility issue.


_get_table(table_name) {
if (!(table_name in this._tables)) {
const env_name = `NOSQL_STORAGE_TABLE_${table_name}`;
if (env_name in process.env) {
this._tables[table_name] = process.env[env_name];
} else {
throw new Error(
`Couldn't find an environment variable ${env_name} for table ${table_name}`
);
}
}
return this._tables[table_name];
}

async insert(table_name, primary_key, secondary_key, data) {
data[primary_key[0]] = primary_key[1];
data[secondary_key[0]] = secondary_key[1];
await this.client
.put({ TableName: this._get_table(table_name), Item: data });
}

async get(table_name, primary_key, secondary_key) {
const key = {};
key[primary_key[0]] = primary_key[1];
key[secondary_key[0]] = secondary_key[1];
const res = await this.client
.get({ TableName: this._get_table(table_name), Key: key });
return res.Item;
}

async update(table_name, primary_key, secondary_key, updates) {

const key = {};
key[primary_key[0]] = primary_key[1];
key[secondary_key[0]] = secondary_key[1];

const update_names = {};
const update_values = {};
const update_expression = ["SET"];
for (const [key_name, value] of Object.entries(updates)) {
update_expression.push(`#${key_name}_name = :${key_name}_value,`);
update_names[`#${key_name}_name`] = key_name;
update_values[`:${key_name}_value`] = value;
}
// remove trailing comma from the last assignment
update_expression[update_expression.length - 1] = update_expression[
update_expression.length - 1
].slice(0, -1);

await this.client
.update({
TableName: this._get_table(table_name),
Key: key,
UpdateExpression: update_expression.join(" "),
ExpressionAttributeNames: update_names,
ExpressionAttributeValues: update_values,
});
}
Comment on lines +46 to +73
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Missing empty-updates guard — update() will send malformed UpdateExpression.

When updates is {}, update_expression stays as ["SET"]; the trailing-comma trimmer then slices "SET" to "SE" and DynamoDB rejects the request with a ValidationException. The sibling local/nodejs/nosql.js and openwhisk/nodejs/nosql.js already short-circuit on empty updates — please mirror that here for consistency.

🐛 Proposed fix
 async update(table_name, primary_key, secondary_key, updates) {
+    if (Object.keys(updates).length === 0) {
+      return;
+    }

     const key = {};
     key[primary_key[0]] = primary_key[1];
     key[secondary_key[0]] = secondary_key[1];
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
async update(table_name, primary_key, secondary_key, updates) {
const key = {};
key[primary_key[0]] = primary_key[1];
key[secondary_key[0]] = secondary_key[1];
const update_names = {};
const update_values = {};
const update_expression = ["SET"];
for (const [key_name, value] of Object.entries(updates)) {
update_expression.push(`#${key_name}_name = :${key_name}_value,`);
update_names[`#${key_name}_name`] = key_name;
update_values[`:${key_name}_value`] = value;
}
// remove trailing comma from the last assignment
update_expression[update_expression.length - 1] = update_expression[
update_expression.length - 1
].slice(0, -1);
await this.client
.update({
TableName: this._get_table(table_name),
Key: key,
UpdateExpression: update_expression.join(" "),
ExpressionAttributeNames: update_names,
ExpressionAttributeValues: update_values,
});
}
async update(table_name, primary_key, secondary_key, updates) {
if (Object.keys(updates).length === 0) {
return;
}
const key = {};
key[primary_key[0]] = primary_key[1];
key[secondary_key[0]] = secondary_key[1];
const update_names = {};
const update_values = {};
const update_expression = ["SET"];
for (const [key_name, value] of Object.entries(updates)) {
update_expression.push(`#${key_name}_name = :${key_name}_value,`);
update_names[`#${key_name}_name`] = key_name;
update_values[`:${key_name}_value`] = value;
}
// remove trailing comma from the last assignment
update_expression[update_expression.length - 1] = update_expression[
update_expression.length - 1
].slice(0, -1);
await this.client
.update({
TableName: this._get_table(table_name),
Key: key,
UpdateExpression: update_expression.join(" "),
ExpressionAttributeNames: update_names,
ExpressionAttributeValues: update_values,
});
}
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@benchmarks/wrappers/aws/nodejs/nosql.js` around lines 46 - 73, The update()
method builds an UpdateExpression even when updates is empty, which leaves
update_expression as ["SET"] and then mangles it into "SE" causing DynamoDB
ValidationException; fix by short-circuiting when updates is empty (e.g., if
(!updates || Object.keys(updates).length === 0) return) before constructing
update_expression so this.client.update(...) is not called with an invalid
UpdateExpression; update the update() function (and any relevant callers) to
mirror the short-circuit behavior used in the sibling local/openwhisk wrappers
and return early without calling this.client.update or _get_table when there are
no updates to apply.


async query(table_name, primary_key, _) {
const key_name = primary_key[0];
const res = await this.client
.query({
TableName: this._get_table(table_name),
KeyConditionExpression: "#key_name = :keyvalue",
ExpressionAttributeNames: { "#key_name": key_name },
ExpressionAttributeValues: { ":keyvalue": primary_key[1] },
});
return res.Items;
}

async delete(table_name, primary_key, secondary_key) {
const key = {};
key[primary_key[0]] = primary_key[1];
key[secondary_key[0]] = secondary_key[1];
await this.client
.delete({ TableName: this._get_table(table_name), Key: key });
}

static get_instance() {
// The instance is independent of the storage table,
// as we provide table name for each operation.
if (!nosql.instance) {
nosql.instance = new nosql();
}
return nosql.instance;
}
}
exports.nosql = nosql;
Loading