Skip to content

Commit 0665bdf

Browse files
authored
Merge pull request #122 from SumoLogic/hpal_security_fix
Published zip files and updated test artifacts
2 parents e4f0492 + dd9fe99 commit 0665bdf

File tree

7 files changed

+21
-15
lines changed

7 files changed

+21
-15
lines changed

BlockBlobReader/src/blobreaderzipdeploy.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -419,7 +419,7 @@
419419
"[variables('BlobReader_resourceId')]"
420420
],
421421
"properties": {
422-
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/taskproducer4.1.2.zip",
422+
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/taskproducer4.1.4.zip",
423423
"appOffline": true
424424
}
425425
}
@@ -512,7 +512,7 @@
512512
"[variables('blobreaderconsumer_resourceId')]"
513513
],
514514
"properties": {
515-
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/taskconsumer4.1.2.zip",
515+
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/taskconsumer4.1.4.zip",
516516
"appOffline": true
517517
}
518518
}
@@ -609,7 +609,7 @@
609609
"[variables('DLQProcessor_resourceId')]"
610610
],
611611
"properties": {
612-
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/dlqprocessor4.1.2.zip",
612+
"packageUri": "https://appdev-cloudformation-templates.s3.amazonaws.com/AzureBlobReader/dlqprocessor4.1.4.zip",
613613
"appOffline": true
614614
}
615615
}

BlockBlobReader/src/consumer.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,8 +195,9 @@ function getRowKey(metadata) {
195195

196196
async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
197197

198+
let rowKey = "";
198199
try {
199-
let rowKey = getRowKey(serviceBusTask);
200+
rowKey = getRowKey(serviceBusTask);
200201
// Todo: this should be atomic update if other request decreases offset it shouldn't allow
201202
context.log.verbose("Attempting to update offset row: %s from: %d to: %d", rowKey, serviceBusTask.startByte, newOffset);
202203
let entity = {
@@ -208,7 +209,7 @@ async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
208209
containerName: serviceBusTask.containerName,
209210
storageName: serviceBusTask.storageName
210211
}
211-
var updateResult = await azureTableClient.updateEntity(entity, "Merge");
212+
let updateResult = await azureTableClient.updateEntity(entity, "Merge");
212213
context.log.verbose("Updated offset result: %s row: %s from: %d to: %d", JSON.stringify(updateResult), rowKey, serviceBusTask.startByte, newOffset);
213214
} catch (error) {
214215
context.log.error(`Error - Failed to update OffsetMap table, error: ${JSON.stringify(error)}, rowKey: ${rowKey}, newOffset: ${newOffset}`)

BlockBlobReader/src/create_zip.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ else
8080
fi
8181

8282
echo "creating zip"
83-
version="4.1.2"
83+
version="4.1.4"
8484
producer_zip_file="taskproducer$version.zip"
8585
consumer_zip_file="taskconsumer$version.zip"
8686
dlqprocessor_zip_file="dlqprocessor$version.zip"

BlockBlobReader/target/consumer_build/BlobTaskConsumer/index.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,8 +195,9 @@ function getRowKey(metadata) {
195195

196196
async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
197197

198+
let rowKey = "";
198199
try {
199-
let rowKey = getRowKey(serviceBusTask);
200+
rowKey = getRowKey(serviceBusTask);
200201
// Todo: this should be atomic update if other request decreases offset it shouldn't allow
201202
context.log.verbose("Attempting to update offset row: %s from: %d to: %d", rowKey, serviceBusTask.startByte, newOffset);
202203
let entity = {
@@ -208,7 +209,7 @@ async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
208209
containerName: serviceBusTask.containerName,
209210
storageName: serviceBusTask.storageName
210211
}
211-
var updateResult = await azureTableClient.updateEntity(entity, "Merge");
212+
let updateResult = await azureTableClient.updateEntity(entity, "Merge");
212213
context.log.verbose("Updated offset result: %s row: %s from: %d to: %d", JSON.stringify(updateResult), rowKey, serviceBusTask.startByte, newOffset);
213214
} catch (error) {
214215
context.log.error(`Error - Failed to update OffsetMap table, error: ${JSON.stringify(error)}, rowKey: ${rowKey}, newOffset: ${newOffset}`)

BlockBlobReader/target/dlqprocessor_build/DLQTaskConsumer/index.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,8 +195,9 @@ function getRowKey(metadata) {
195195

196196
async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
197197

198+
let rowKey = "";
198199
try {
199-
let rowKey = getRowKey(serviceBusTask);
200+
rowKey = getRowKey(serviceBusTask);
200201
// Todo: this should be atomic update if other request decreases offset it shouldn't allow
201202
context.log.verbose("Attempting to update offset row: %s from: %d to: %d", rowKey, serviceBusTask.startByte, newOffset);
202203
let entity = {
@@ -208,7 +209,7 @@ async function setAppendBlobOffset(context, serviceBusTask, newOffset) {
208209
containerName: serviceBusTask.containerName,
209210
storageName: serviceBusTask.storageName
210211
}
211-
var updateResult = await azureTableClient.updateEntity(entity, "Merge");
212+
let updateResult = await azureTableClient.updateEntity(entity, "Merge");
212213
context.log.verbose("Updated offset result: %s row: %s from: %d to: %d", JSON.stringify(updateResult), rowKey, serviceBusTask.startByte, newOffset);
213214
} catch (error) {
214215
context.log.error(`Error - Failed to update OffsetMap table, error: ${JSON.stringify(error)}, rowKey: ${rowKey}, newOffset: ${newOffset}`)

BlockBlobReader/tests/blob_fixtures.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

BlockBlobReader/tests/test_blobreader.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def get_full_testlog_file_name(self):
9898
if len(self.test_filename) > maxMetadataLength:
9999
expected_filename = self.test_filename[:60] + "..." + self.test_filename[-(60-len(file_ext)):] + file_ext
100100
else:
101-
expected_filename = self.test_filename
101+
expected_filename = self.test_filename + file_ext
102102
return expected_filename
103103

104104
def test_03_func_logs(self):
@@ -146,7 +146,7 @@ def test_03_func_logs(self):
146146
expected_record_count = {
147147
"blob": 15,
148148
"log": 10,
149-
"json": 120,
149+
"json": 153,
150150
"csv": 12
151151
}
152152
record_count = record_excluded_by_filter_count = record_unsupported_extension_count = None
@@ -184,7 +184,9 @@ def test_03_func_logs(self):
184184
def upload_message_in_service_bus(self):
185185
file_ext = f".{self.log_type}"
186186
test_filename = self.test_filename + file_ext
187-
file_size = os.path.getsize(f"blob_fixtures{file_ext}")
187+
with open(f"blob_fixtures{file_ext}", "r") as fp:
188+
file_size = len(fp.read())
189+
188190
triggerData = {
189191
"blobName": test_filename,
190192
"containerName": self.test_container_name,
@@ -329,7 +331,8 @@ def insert_mock_json_in_BlobStorage(self):
329331
self.test_container_name, test_filename)
330332
for i, data_block in enumerate(self.get_json_data()):
331333
block_id = self.get_random_name()
332-
file_bytes = json.dumps(data_block)
334+
# removing spaces(added by json.loads) using separators
335+
file_bytes = json.dumps(data_block, separators=(',', ':'))
333336
file_bytes = (file_bytes[1:-1] if i ==
334337
0 else "," + file_bytes[1:-1]).encode()
335338
self.block_blob_service.put_block(

0 commit comments

Comments
 (0)