Skip to content

Commit b9c098d

Browse files
committed
completed testing, added dotnet framework version
1 parent 613a17e commit b9c098d

File tree

9 files changed

+192
-18
lines changed

9 files changed

+192
-18
lines changed

AppendBlobReader/src/appendblobreaderdeploy.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -501,6 +501,7 @@
501501
"http20Enabled": true,
502502
"minTlsVersion": "1.2",
503503
"scmMinTlsVersion": "1.2",
504+
"netFrameworkVersion": "v6.0",
504505
"alwaysOn": true,
505506
"cors": {
506507
"allowedOrigins": [ "https://portal.azure.com" ]
@@ -595,6 +596,7 @@
595596
"http20Enabled": true,
596597
"minTlsVersion": "1.2",
597598
"scmMinTlsVersion": "1.2",
599+
"netFrameworkVersion": "v6.0",
598600
"alwaysOn": true,
599601
"cors": {
600602
"allowedOrigins": [ "https://portal.azure.com" ]
@@ -701,6 +703,7 @@
701703
"http20Enabled": true,
702704
"minTlsVersion": "1.2",
703705
"scmMinTlsVersion": "1.2",
706+
"netFrameworkVersion": "v6.0",
704707
"alwaysOn": true,
705708
"cors": {
706709
"allowedOrigins": [ "https://portal.azure.com" ]

AppendBlobReader/tests/test_appendblobreader.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -139,7 +139,7 @@ def test_03_func_logs(self):
139139
self.assertTrue(self.filter_logs(captured_output, 'message', message),
140140
f"No '{message}' log line found in '{azurefunction}' function logs")
141141

142-
message = "Updated offset result"
142+
message = "Successfully updated OffsetMap"
143143
self.assertTrue(self.filter_logs(captured_output, 'message', message),
144144
f"No '{message}' log line found in '{azurefunction}' function logs")
145145

@@ -188,8 +188,8 @@ def test_03_func_logs(self):
188188
expected_filename = self.test_filename
189189

190190
# Verify addition of _sourceCategory, _sourceHost, _sourceName and also additional metadata
191-
self.assertTrue(source_name == f"{expected_filename}", f"_sourceName {source_name} metadata is incorrect")
192-
self.assertTrue(source_host == f"{self.test_storageaccount_name}/{self.test_container_name}", f"_sourceHost {source_host} metadata is incorrect")
191+
self.assertTrue(source_name == expected_filename, f"_sourceName: {source_name} expected_filename: {expected_filename} metadata is incorrect")
192+
self.assertTrue(source_host == f"{self.test_storageaccount_name}/{self.test_container_name}", f"_sourceHost {source_host} expected_sourcehost: {self.test_storageaccount_name}/{self.test_container_name} metadata is incorrect")
193193

194194
# def test_05_upload_filename_with_utf16_chars_having_utf16_chars_in_deep_folder():
195195
# # Verify with a filename with special characters

BlockBlobReader/src/blobreaderdeploywithPremiumPlan.json

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,7 @@
364364
"http20Enabled": true,
365365
"minTlsVersion": "1.2",
366366
"scmMinTlsVersion": "1.2",
367+
"netFrameworkVersion": "v6.0",
367368
"appSettings": [
368369
{
369370
"name": "FUNCTIONS_EXTENSION_VERSION",
@@ -461,6 +462,7 @@
461462
"http20Enabled": true,
462463
"minTlsVersion": "1.2",
463464
"scmMinTlsVersion": "1.2",
465+
"netFrameworkVersion": "v6.0",
464466
"appSettings": [
465467
{
466468
"name": "FUNCTIONS_EXTENSION_VERSION",
@@ -556,6 +558,7 @@
556558
"http20Enabled": true,
557559
"minTlsVersion": "1.2",
558560
"scmMinTlsVersion": "1.2",
561+
"netFrameworkVersion": "v6.0",
559562
"appSettings": [
560563
{
561564
"name": "FUNCTIONS_EXTENSION_VERSION",

BlockBlobReader/target/consumer_build/host.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
"default": "Warning",
2828
"Host.Aggregator": "Trace",
2929
"Host.Results": "Information",
30-
"Function": "Trace"
30+
"Function": "Information"
3131
}
3232
}
3333
}

BlockBlobReader/target/dlqprocessor_build/host.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
"default": "Warning",
2323
"Host.Aggregator": "Trace",
2424
"Host.Results": "Information",
25-
"Function": "Trace"
25+
"Function": "Information"
2626
}
2727
}
2828
}

BlockBlobReader/target/producer_build/host.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
"default": "Warning",
3030
"Host.Aggregator": "Trace",
3131
"Host.Results": "Information",
32-
"Function": "Trace"
32+
"Function": "Information"
3333
}
3434
}
3535
}
Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,56 @@
1-
{"records":[{"time":"2023-05-15T10:08:26.1964863Z","systemId":"61171fa9-d0e7-44e4-8133-d6f4035bc533","macAddress":"002248340BBC","category":"NetworkSecurityGroupFlowEvent","resourceId":"/SUBSCRIPTIONS/C088DC46-D692-42AD-A4B6-9A542D28AD2A/RESOURCEGROUPS/CON-1774/PROVIDERS/MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/AADDS-NSG","operationName":"NetworkSecurityGroupFlowEvents","properties":{"Version":2,"flows":[{"rule":"DefaultRule_DenyAllInBound","flows":[{"mac":"002248340BBC","flowTuples":["1684145260,198.199.98.123,10.1.1.6,38095,7199,T,I,D,B,,,,","1684145262,162.142.125.180,10.1.1.6,10023,18061,T,I,D,B,,,,","1684145264,162.142.125.249,10.1.1.6,9303,33712,T,I,D,B,,,,","1684145267,146.59.233.75,10.1.1.6,33678,22,T,I,D,B,,,,","1684145275,167.94.145.28,10.1.1.6,39678,13337,T,I,D,B,,,,","1684145281,162.142.125.184,10.1.1.6,56860,9800,T,I,D,B,,,,","1684145290,79.124.62.78,10.1.1.6,53146,46367,T,I,D,B,,,,","1684145297,162.142.125.180,10.1.1.6,16879,10030,T,I,D,B,,,,","1684145297,193.106.192.189,10.1.1.6,35021,23,T,I,D,B,,,,","1684145300,94.102.61.42,10.1.1.6,46560,8426,T,I,D,B,,,,"]}]}]}}]}
1+
{
2+
"records": [
3+
{
4+
"time": "2017-06-16T20:59:43.7340000Z",
5+
"systemId": "abcdef01-2345-6789-0abc-def012345678",
6+
"category": "NetworkSecurityGroupFlowEvent",
7+
"resourceId": "/SUBSCRIPTIONS/00000000-0000-0000-0000-000000000000/RESOURCEGROUPS/MYRESOURCEGROUP/PROVIDERS/MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/MYNSG",
8+
"operationName": "NetworkSecurityGroupFlowEvents",
9+
"properties": {
10+
"Version": 1,
11+
"flows": [
12+
{
13+
"rule": "DefaultRule_AllowInternetOutBound",
14+
"flows": [
15+
{
16+
"mac": "000D3A18077E",
17+
"flowTuples": [
18+
"1497646722,10.0.0.4,168.62.32.14,44904,443,T,O,A",
19+
"1497646722,10.0.0.4,52.240.48.24,45218,443,T,O,A",
20+
"1497646725,10.0.0.4,168.62.32.14,44910,443,T,O,A",
21+
"1497646725,10.0.0.4,52.240.48.24,45224,443,T,O,A",
22+
"1497646728,10.0.0.4,168.62.32.14,44916,443,T,O,A",
23+
"1497646728,10.0.0.4,52.240.48.24,45230,443,T,O,A",
24+
"1497646732,10.0.0.4,168.62.32.14,44922,443,T,O,A",
25+
"1497646732,10.0.0.4,52.240.48.24,45236,443,T,O,A"
26+
]
27+
}
28+
]
29+
},
30+
{
31+
"rule": "DefaultRule_DenyAllInBound",
32+
"flows": []
33+
},
34+
{
35+
"rule": "UserRule_ssh-rule",
36+
"flows": []
37+
},
38+
{
39+
"rule": "UserRule_web-rule",
40+
"flows": [
41+
{
42+
"mac": "000D3A18077E",
43+
"flowTuples": [
44+
"1497646738,13.82.225.93,10.0.0.4,1180,80,T,I,A",
45+
"1497646750,13.82.225.93,10.0.0.4,1184,80,T,I,A",
46+
"1497646768,13.82.225.93,10.0.0.4,1181,80,T,I,A",
47+
"1497646780,13.82.225.93,10.0.0.4,1336,80,T,I,A"
48+
]
49+
}
50+
]
51+
}
52+
]
53+
}
54+
}
55+
]
56+
}

BlockBlobReader/tests/test_blobreader.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,9 @@ def setUpClass(cls):
3030
cls.test_storageaccount_name = "testsa%s" % (test_datetime_value)
3131
# Verify when Test Storage Account and template deployment are in different regions
3232
cls.test_storageAccountRegion = "Central US"
33-
cls.test_container_name = "testcontainer-%s" % (datetime_value)
33+
cls.log_type = os.environ.get("LOG_TYPE", "blob")
34+
35+
cls.test_container_name = "testcontainer-%s" % (datetime_value) if cls.log_type != "json" else "insights-logs-networksecuritygroupflowevent"
3436
cls.test_filename_excluded_by_filter = "blockblob_test_filename_excluded_by_filter.blob"
3537
cls.test_filename_unsupported_extension = "blockblob_test.xml"
3638
# https://learn.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
@@ -87,10 +89,9 @@ def upload_file_of_unknown_extension(self):
8789
data_block, [])
8890

8991
def test_03_func_logs(self):
90-
log_type = os.environ.get("LOG_TYPE", "log")
91-
self.logger.info("inserting mock %s data in BlobStorage" % log_type)
92-
if log_type in ("csv", "log", "blob"):
93-
self.insert_mock_logs_in_BlobStorage(log_type)
92+
self.logger.info("inserting mock %s data in BlobStorage" % cls.log_type)
93+
if cls.log_type in ("csv", "log", "blob"):
94+
self.insert_mock_logs_in_BlobStorage(cls.log_type)
9495
else:
9596
self.insert_mock_json_in_BlobStorage()
9697

@@ -126,7 +127,7 @@ def test_03_func_logs(self):
126127
f"Warning messages found in {azurefunction} azure function logs")
127128

128129
self.logger.info("fetching mock data count from sumo")
129-
log_type = os.environ.get("LOG_TYPE", "json")
130+
130131
query = f'_sourceCategory="{self.source_category}" | count by _sourceName, _sourceHost'
131132
relative_time_in_minutes = 30
132133
expected_record_count = {
@@ -148,8 +149,8 @@ def test_03_func_logs(self):
148149
except Exception as err:
149150
self.logger.info(f"Error in fetching sumo query results {err}")
150151

151-
self.assertTrue(record_count == expected_record_count.get(log_type),
152-
f"block blob file's record count: {record_count} differs from expected count {expected_record_count.get(log_type)} in sumo '{self.source_category}'")
152+
self.assertTrue(record_count == expected_record_count.get(cls.log_type),
153+
f"block blob file's record count: {record_count} differs from expected count {expected_record_count.get(cls.log_type)} in sumo '{self.source_category}'")
153154

154155
# Verify Filter Prefix field
155156
self.assertTrue(record_excluded_by_filter_count == 0,
@@ -159,14 +160,15 @@ def test_03_func_logs(self):
159160
f"block blob file's record count: {record_unsupported_extension_count}, logs with unsupported blob extension should not be ingested")
160161

161162
# Verify with a very long append blob filename (1024 characters)
163+
file_ext = f".{cls.log_type}"
162164
if len(self.test_filename) > 128:
163-
expected_filename = self.test_filename[:60] + "..." + self.test_filename[-60:]
165+
expected_filename = self.test_filename[:60] + "..." + self.test_filename[-(60-len(file_ext)):] + file_ext
164166
else:
165167
expected_filename = self.test_filename
166168

167169
# Verify addition of _sourceCategory, _sourceHost, _sourceName and also additional metadata
168-
self.assertTrue(source_name == f"{expected_filename}", f"_sourceName {source_name} metadata is incorrect")
169-
self.assertTrue(source_host == f"{self.test_storageaccount_name}/{self.test_container_name}", f"_sourceHost {source_host} metadata is incorrect")
170+
self.assertTrue(source_name == expected_filename, f"_sourceName: {source_name} expected_filename: {expected_filename} metadata is incorrect")
171+
self.assertTrue(source_host == f"{self.test_storageaccount_name}/{self.test_container_name}", f"_sourceHost {source_host} expected_sourcehost: {self.test_storageaccount_name}/{self.test_container_name} metadata is incorrect")
170172

171173
def get_random_name(self, length=32):
172174
return str(uuid.uuid4())

deletetestresourcegroups.py

Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
# from azure.common.credentials import ServicePrincipalCredentials
2+
from azure.identity import ClientSecretCredential
3+
import json
4+
import os
5+
from azure.mgmt.resource import ResourceManagementClient
6+
from concurrent.futures import ThreadPoolExecutor, as_completed
7+
from sumologic import SumoLogic
8+
9+
config = {
10+
"AZURE_SUBSCRIPTION_ID": "",
11+
"AZURE_CLIENT_ID": "",
12+
"AZURE_CLIENT_SECRET": "",
13+
"AZURE_TENANT_ID": "",
14+
"AZURE_DEFAULT_REGION": "",
15+
"SUMO_ACCESS_ID": "",
16+
"SUMO_ACCESS_KEY": "",
17+
"SUMO_DEPLOYMENT": "us1"
18+
}
19+
20+
# config_file = os.path.expanduser("~/.azure/azure_credentials.json")
21+
# config = json.load(open(config_file))
22+
subscription_id = str(config['AZURE_SUBSCRIPTION_ID'])
23+
credentials = ClientSecretCredential(
24+
client_id=config['AZURE_CLIENT_ID'],
25+
client_secret=config['AZURE_CLIENT_SECRET'],
26+
tenant_id=config['AZURE_TENANT_ID']
27+
)
28+
location = str(config['AZURE_DEFAULT_REGION'])
29+
30+
print("creating credentials", subscription_id)
31+
32+
resource_client = ResourceManagementClient(credentials, subscription_id)
33+
34+
35+
def delete_resource_group(resource_group_name):
36+
print("Found %s " % resource_group_name)
37+
resp = resource_client.resource_groups.begin_delete(resource_group_name)
38+
resp.wait()
39+
print('Deleted {}'.format(resource_group_name), resp.status())
40+
41+
42+
groups = resource_client.resource_groups.list()
43+
future_to_group = {}
44+
with ThreadPoolExecutor(max_workers=10) as executor:
45+
for group in groups:
46+
group_name = group.name
47+
if (group_name.startswith("TBL") or group_name.startswith("TABR") or group_name.startswith("testsumosa")):
48+
print(f"scheduling {group_name}")
49+
future_to_group[executor.submit(delete_resource_group, group_name)] = group_name
50+
51+
if future_to_group:
52+
for future in as_completed(future_to_group):
53+
group_name = future_to_group[future]
54+
try:
55+
future.result()
56+
print(f"Task completed for {group_name}")
57+
except Exception as exc:
58+
print('%r generated an exception: %s' % (group_name, exc))
59+
else:
60+
print("No resource group found")
61+
62+
63+
def api_endpoint(sumo_deployment):
64+
if sumo_deployment == "us1":
65+
return "https://api.sumologic.com/api"
66+
elif sumo_deployment in ["ca", "au", "de", "eu", "jp", "us2", "fed", "in"]:
67+
return "https://api.%s.sumologic.com/api" % sumo_deployment
68+
else:
69+
return 'https://%s-api.sumologic.net/api' % sumo_deployment
70+
71+
sumologic_cli = SumoLogic(config["SUMO_ACCESS_ID"], config["SUMO_ACCESS_KEY"], api_endpoint(config["SUMO_DEPLOYMENT"]))
72+
73+
74+
def delete_collector(collector_id, collector_name):
75+
print("Found %s " % collector_name)
76+
resp = sumologic_cli.delete_collector({"collector": {"id": collector_id}})
77+
print('Deleted {}'.format(collector_name), resp)
78+
79+
80+
future_to_group = {}
81+
offset = 0
82+
with ThreadPoolExecutor(max_workers=5) as executor:
83+
while True:
84+
collectors = sumologic_cli.collectors(limit=1000, offset=offset, filter_type="hosted")
85+
if not collectors:
86+
break
87+
for collector in collectors:
88+
collector_name = collector["name"]
89+
collector_id = collector["id"]
90+
if (collector_name.startswith("azure_appendblob_unittest") or collector_name.startswith("azure_blockblob_unittest")):
91+
print(f"scheduling {collector_name}")
92+
future_to_group[executor.submit(delete_collector, collector_id, collector_name)] = collector_name
93+
offset += 100
94+
95+
96+
if future_to_group:
97+
for future in as_completed(future_to_group):
98+
collector_name = future_to_group[future]
99+
try:
100+
future.result()
101+
print(f"Task completed for {collector_name}")
102+
except Exception as exc:
103+
print('%r generated an exception: %s' % (collector_name, exc))
104+
else:
105+
print("No collector found")
106+
107+
108+
109+
110+
111+

0 commit comments

Comments
 (0)