11import os
22import sys
33import json
4- import time
5- from time import sleep
6- from requests import Session
7- from datetime import datetime
8- from azure .servicebus import ServiceBusService
4+ from datetime import timedelta
95from azure .mgmt .resource import ResourceManagementClient
106from azure .mgmt .eventhub import EventHubManagementClient
7+ from azure .eventhub import EventHubProducerClient
8+ from azure .eventhub .exceptions import EventHubError
9+ from azure .monitor .query import LogsQueryClient , LogsQueryStatus
10+ from azure .mgmt .loganalytics import LogAnalyticsManagementClient
1111
1212sys .path .insert (0 , '../../test_utils' )
1313from basetest import BaseTest
1414
15+
1516class BaseEventHubTest (BaseTest ):
1617
1718 def setUp (self ):
@@ -41,69 +42,6 @@ def get_resource(self, restype):
4142 return item
4243 raise Exception ("%s Resource Not Found" % (restype ))
4344
44- def get_row_count (self , query ):
45- rows = self .table_service .query_entities (
46- self .log_table_name , filter = "PartitionKey eq 'R2'" , select = 'PartitionKey' )
47-
48- return len (rows .items )
49-
50- def wait_for_table_results (self , query ):
51- max_retries = 50
52- while (max_retries > 0 and (not (self .table_service .exists (
53- self .log_table_name ) and self .get_row_count (query ) > 0 ))):
54- print ("waiting for logs creation..." , max_retries )
55- sleep (15 )
56- max_retries -= 1
57-
58- def insert_mock_logs_in_EventHub (self , filename ):
59- print ("Inserting fake logs in EventHub" )
60- namespace_name = self .get_resource_name (self .event_hub_namespace_prefix , "Microsoft.EventHub/namespaces" )
61-
62- defaultauthorule_name = "RootManageSharedAccessKey"
63-
64- eventhub_client = EventHubManagementClient (self .credentials ,
65- self .subscription_id )
66-
67- ehkeys = eventhub_client .namespaces .list_keys (
68- self .RESOURCE_GROUP_NAME , namespace_name , defaultauthorule_name )
69-
70- sbs = ServiceBusService (
71- namespace_name ,
72- shared_access_key_name = defaultauthorule_name ,
73- shared_access_key_value = ehkeys .primary_key ,
74- request_session = Session ()
75- )
76- mock_logs = json .load (open (filename ))
77- print ("inserting %s" % (mock_logs ))
78- sbs .send_event (self .eventhub_name , json .dumps (mock_logs ))
79-
80- print ("Event inserted" )
81-
82- def insert_mock_metrics_in_EventHub (self , filename ):
83- print ("Inserting fake metrics in EventHub" )
84-
85- defaultauthorule_name = "RootManageSharedAccessKey"
86- namespace_name = self .get_resource_name (self .event_hub_namespace_prefix , "Microsoft.EventHub/namespaces" )
87- eventhub_client = EventHubManagementClient (self .azure_credential , self .subscription_id )
88- eventhub_keys = eventhub_client .namespaces .list_keys (self .RESOURCE_GROUP_NAME , namespace_name , defaultauthorule_name )
89-
90- sbs = ServiceBusService (
91- namespace_name ,
92- shared_access_key_name = defaultauthorule_name ,
93- shared_access_key_value = eventhub_keys .primary_key ,
94- request_session = Session ()
95- )
96-
97- with open (filename , 'r' ) as template_file_fd :
98- mock_logs = json .load (template_file_fd )
99- mock_logs = json .dumps (mock_logs )
100- mock_logs = mock_logs .replace ("2018-03-07T14:23:51.991Z" , datetime .utcnow ().strftime ("%Y-%m-%dT%H:%M:%S.%fZ" ))
101- mock_logs = mock_logs .replace ("C088DC46" , "%d-%s" % (1 , str (int (time .time ()))))
102-
103- # print("inserting %s" % (mock_logs))
104- sbs .send_event (self .eventhub_name , mock_logs )
105- print ("Event inserted" )
106-
10745 def _parse_template (self ):
10846 template_path = os .path .join (os .path .abspath ('..' ), 'src' ,
10947 self .template_name )
@@ -118,3 +56,83 @@ def _parse_template(self):
11856 template_data ["parameters" ]["location" ]["defaultValue" ] = self .resourcegroup_location
11957
12058 return template_data
59+
60+ def send_event_data_list (self , event_hub_namespace_prefix , event_hub_name , event_data_list ):
61+
62+ defaultauthorule_name = "RootManageSharedAccessKey"
63+ namespace_name = self .get_resource_name (event_hub_namespace_prefix , "Microsoft.EventHub/namespaces" )
64+ eventhub_client = EventHubManagementClient (self .azure_credential , self .subscription_id )
65+ eventhub_keys = eventhub_client .namespaces .list_keys (self .RESOURCE_GROUP_NAME , namespace_name , defaultauthorule_name )
66+
67+ producer = EventHubProducerClient .from_connection_string (
68+ conn_str = eventhub_keys .primary_connection_string ,
69+ eventhub_name = event_hub_name
70+ )
71+
72+ with producer :
73+ try :
74+ producer .send_batch (event_data_list )
75+ except ValueError : # Size exceeds limit. This shouldn't happen if you make sure before hand.
76+ print ("Size of the event data list exceeds the size limit of a single send" )
77+ except EventHubError as eh_err :
78+ print ("Sending error: " , eh_err )
79+
80+ print ("Event inserted" )
81+
82+ def fetchlogs (self , app_insights ):
83+ result = []
84+ try :
85+ client = LogsQueryClient (self .azure_credential )
86+ query = f"app('{ app_insights } ').traces | where operation_Name == '{ self .function_name } ' | project operation_Id, timestamp, message, severityLevel"
87+ response = client .query_workspace (self .get_Workspace_Id (), query , timespan = timedelta (hours = 1 ))
88+
89+ if response .status == LogsQueryStatus .FAILURE :
90+ raise Exception (f"LogsQueryError: { response .message } " )
91+ elif response .status == LogsQueryStatus .PARTIAL :
92+ data = response .partial_data
93+ error = response .partial_error
94+ print ("partial_error: " , error )
95+ elif response .status == LogsQueryStatus .SUCCESS :
96+ data = response .tables
97+
98+ for table in data :
99+ for row in table .rows :
100+ row_dict = {str (col ): str (item ) for col , item in zip (table .columns , row )}
101+ result .append (row_dict )
102+ except Exception as e :
103+ print ("An unexpected error occurred during the test:" )
104+ print ("Exception" , e )
105+
106+ return result
107+
108+ def get_resources (self , resource_group_name ):
109+ return self .resource_client .resources .list_by_resource_group (resource_group_name )
110+
111+ def get_Workspace_Id (self ):
112+ workspace = self .get_resource ('microsoft.operationalinsights/workspaces' )
113+ client = LogAnalyticsManagementClient (
114+ credential = self .azure_credential ,
115+ subscription_id = self .subscription_id ,
116+ )
117+
118+ response = client .workspaces .get (
119+ resource_group_name = self .RESOURCE_GROUP_NAME ,
120+ workspace_name = workspace .name ,
121+ )
122+ return response .customer_id
123+
124+ def filter_logs (self , logs , key , value ):
125+ return value in [d .get (key ) for d in logs ]
126+
127+ def check_resource_count (self ):
128+ resource_count = len (list (self .get_resources (self .RESOURCE_GROUP_NAME )))
129+ self .assertTrue (resource_count == self .expected_resource_count , f"resource count of resource group { self .RESOURCE_GROUP_NAME } differs from expected count : { resource_count } " )
130+
131+ def check_success_log (self , logs ):
132+ self .assertTrue (self .filter_logs (logs , 'message' , self .successful_sent_message ))
133+
134+ def check_error_log (self , logs ):
135+ self .assertTrue (not self .filter_logs (logs , 'severityLevel' , '3' ))
136+
137+ def check_warning_log (self , logs ):
138+ self .assertTrue (not self .filter_logs (logs , 'severityLevel' , '2' ))
0 commit comments