@@ -61,11 +61,10 @@ def wait_actor_count(kikimr, activity, expected_count):
6161 count = 0
6262 for node_index in kikimr .compute_plane .kikimr_cluster .nodes :
6363 count = count + kikimr .compute_plane .get_actor_count (node_index , activity )
64- if count == expected_count :
65- break
64+ if count == expected_count :
65+ return node_index # return any node
6666 assert time .time () < deadline , f"Waiting actor { activity } count failed, current count { count } "
6767 time .sleep (1 )
68- pass
6968
7069
7170def wait_row_dispatcher_sensor_value (kikimr , sensor , expected_count , exact_match = True ):
@@ -617,20 +616,20 @@ def test_start_new_query(self, kikimr, client):
617616
618617 @yq_v1
619618 def test_stop_start (self , kikimr , client ):
620- self .init (client , "test_stop_start" )
619+ self .init (client , "test_stop_start" , 10 )
621620
622621 sql1 = Rf'''
623622 INSERT INTO { YDS_CONNECTION } .`{ self .output_topic } `
624623 SELECT Cast(time as String) FROM { YDS_CONNECTION } .`{ self .input_topic } `
625624 WITH (format=json_each_row, SCHEMA (time Int32 NOT NULL));'''
626625
627626 query_id = start_yds_query (kikimr , client , sql1 )
628- wait_actor_count (kikimr , "FQ_ROW_DISPATCHER_SESSION" , 1 )
627+ wait_actor_count (kikimr , "FQ_ROW_DISPATCHER_SESSION" , 10 )
629628
630629 data = ['{"time": 101}' , '{"time": 102}' ]
631630 self .write_stream (data )
632631 expected = ['101' , '102' ]
633- assert self .read_stream (len (expected ), topic_path = self .output_topic ) == expected
632+ assert sorted ( self .read_stream (len (expected ), topic_path = self .output_topic )) == sorted ( expected )
634633
635634 kikimr .compute_plane .wait_completed_checkpoints (
636635 query_id , kikimr .compute_plane .get_completed_checkpoints (query_id ) + 2
@@ -652,7 +651,7 @@ def test_stop_start(self, kikimr, client):
652651
653652 self .write_stream (data )
654653 expected = ['103' , '104' ]
655- assert self .read_stream (len (expected ), topic_path = self .output_topic ) == expected
654+ assert sorted ( self .read_stream (len (expected ), topic_path = self .output_topic )) == sorted ( expected )
656655
657656 stop_yds_query (client , query_id )
658657 wait_actor_count (kikimr , "FQ_ROW_DISPATCHER_SESSION" , 0 )
@@ -667,7 +666,7 @@ def test_stop_start2(self, kikimr, client):
667666 wait_actor_count (kikimr , "FQ_ROW_DISPATCHER_SESSION" , 1 )
668667 self .write_stream (['{"time": 101}' , '{"time": 102}' ])
669668 expected = ['101' , '102' ]
670- assert self .read_stream (len (expected ), topic_path = self .output_topic ) == expected
669+ assert sorted ( self .read_stream (len (expected ), topic_path = self .output_topic )) == sorted ( expected )
671670
672671 kikimr .compute_plane .wait_completed_checkpoints (query_id1 , kikimr .compute_plane .get_completed_checkpoints (query_id1 ) + 2 )
673672 stop_yds_query (client , query_id1 )
@@ -1233,3 +1232,34 @@ def test_json_errors(self, kikimr, client, use_binding):
12331232 assert time .time () < deadline , f"Waiting sensor ParsingErrors value failed, current count { count } "
12341233 time .sleep (1 )
12351234 stop_yds_query (client , query_id )
1235+
1236+ @yq_v1
1237+ def test_redistribute_partition_after_timeout (self , kikimr , client ):
1238+ partitions_count = 3
1239+ self .init (client , "redistribute" , partitions = partitions_count )
1240+ wait_row_dispatcher_sensor_value (kikimr , "KnownRowDispatchers" , 2 * COMPUTE_NODE_COUNT - 1 )
1241+
1242+ sql = Rf'''
1243+ PRAGMA dq.Scheduler=@@{{"type": "single_node"}}@@;
1244+ INSERT INTO { YDS_CONNECTION } .`{ self .output_topic } `
1245+ SELECT data FROM { YDS_CONNECTION } .`{ self .input_topic } `
1246+ WITH (format=json_each_row, SCHEMA (time Int32 NOT NULL, data String NOT NULL));'''
1247+
1248+ query_id = start_yds_query (kikimr , client , sql )
1249+ session_node_index = wait_actor_count (kikimr , "FQ_ROW_DISPATCHER_SESSION" , partitions_count )
1250+ kikimr .compute_plane .wait_completed_checkpoints (query_id , kikimr .compute_plane .get_completed_checkpoints (query_id ) + 2 )
1251+
1252+ message_count = 10
1253+ expected = "hello"
1254+ for i in range (message_count ):
1255+ self .write_stream (['{"time": 100, "data": "hello"}' ], topic_path = None , partition_key = str (i ))
1256+ assert self .read_stream (message_count , topic_path = self .output_topic ) == [expected ] * message_count
1257+ kikimr .compute_plane .wait_completed_checkpoints (query_id , kikimr .compute_plane .get_completed_checkpoints (query_id ) + 2 )
1258+
1259+ logging .debug (f"Stopping node: { session_node_index } " )
1260+ kikimr .compute_plane .kikimr_cluster .nodes [session_node_index ].stop ()
1261+
1262+ expected = "Relativitätstheorie"
1263+ for i in range (message_count ):
1264+ self .write_stream (['{"time": 101, "data": "Relativitätstheorie"}' ], topic_path = None , partition_key = str (i ))
1265+ assert self .read_stream (message_count , topic_path = self .output_topic ) == [expected ] * message_count
0 commit comments