22import os
33import time
44import warnings
5- from typing import List
65
7- from bson import encode , ObjectId
6+ from bson import ObjectId , encode
87from django .test import (
98 TestCase ,
109)
1110
12- from .models import SmallFlatModel , ForeignKeyModel , SmallFlatModelFk , LargeFlatModel , LargeNestedModel , StringEmbeddedModel , IntegerEmbeddedModel
11+ from .models import (
12+ ForeignKeyModel ,
13+ IntegerEmbeddedModel ,
14+ LargeFlatModel ,
15+ LargeNestedModel ,
16+ SmallFlatModel ,
17+ SmallFlatModelFk ,
18+ StringEmbeddedModel ,
19+ )
1320
1421OUTPUT_FILE = os .environ .get ("OUTPUT_FILE" )
1522
1825MAX_ITERATION_TIME = 60
1926NUM_DOCS = 10000
2027
21- result_data : List = []
28+ result_data : list = []
29+
2230
2331def tearDownModule ():
2432 output = json .dumps (result_data , indent = 4 )
@@ -28,6 +36,7 @@ def tearDownModule():
2836 else :
2937 print (output )
3038
39+
3140class Timer :
3241 def __enter__ (self ):
3342 self .start = time .monotonic ()
@@ -37,6 +46,7 @@ def __exit__(self, *args):
3746 self .end = time .monotonic ()
3847 self .interval = self .end - self .start
3948
49+
4050# Copied from the driver benchmarking suite.
4151class PerformanceTest :
4252 dataset : str
@@ -88,9 +98,8 @@ def percentile(self, percentile):
8898 sorted_results = sorted (self .results )
8999 percentile_index = int (len (sorted_results ) * percentile / 100 ) - 1
90100 return sorted_results [percentile_index ]
91- else :
92- self .fail ("Test execution failed" )
93- return None
101+ self .fail ("Test execution failed" )
102+ return None
94103
95104 def runTest (self ):
96105 results = []
@@ -123,12 +132,13 @@ class SmallFlatDocTest(PerformanceTest):
123132
124133 def setUp (self ):
125134 super ().setUp ()
126- with open (self .dataset , "r" ) as data :
135+ with open (self .dataset ) as data :
127136 self .document = json .load (data )
128137
129138 self .data_size = len (encode (self .document )) * NUM_DOCS
130139 self .documents = [self .document .copy () for _ in range (NUM_DOCS )]
131140
141+
132142class TestSmallFlatDocCreation (SmallFlatDocTest , TestCase ):
133143 def do_task (self ):
134144 for doc in self .documents :
@@ -138,6 +148,7 @@ def do_task(self):
138148 def after (self ):
139149 SmallFlatModel .objects .all ().delete ()
140150
151+
141152class TestSmallFlatDocUpdate (SmallFlatDocTest , TestCase ):
142153 def setUp (self ):
143154 super ().setUp ()
@@ -155,6 +166,7 @@ def do_task(self):
155166 def after (self ):
156167 SmallFlatModel .objects .all ().delete ()
157168
169+
158170class TestSmallFlatDocFilterById (SmallFlatDocTest , TestCase ):
159171 def setUp (self ):
160172 super ().setUp ()
@@ -172,6 +184,7 @@ def tearDown(self):
172184 super ().tearDown ()
173185 SmallFlatModel .objects .all ().delete ()
174186
187+
175188class TestSmallFlatDocFilterByForeignKey (SmallFlatDocTest , TestCase ):
176189 def setUp (self ):
177190 super ().setUp ()
@@ -192,17 +205,19 @@ def tearDown(self):
192205 super ().tearDown ()
193206 SmallFlatModelFk .objects .all ().delete ()
194207
208+
195209class LargeFlatDocTest (PerformanceTest ):
196210 dataset = "large_doc.json"
197211
198212 def setUp (self ):
199213 super ().setUp ()
200- with open (self .dataset , "r" ) as data :
214+ with open (self .dataset ) as data :
201215 self .document = json .load (data )
202216
203217 self .data_size = len (encode (self .document )) * NUM_DOCS
204218 self .documents = [self .document .copy () for _ in range (NUM_DOCS )]
205219
220+
206221class TestLargeFlatDocCreation (LargeFlatDocTest , TestCase ):
207222 def do_task (self ):
208223 for doc in self .documents :
@@ -212,6 +227,7 @@ def do_task(self):
212227 def after (self ):
213228 LargeFlatModel .objects .all ().delete ()
214229
230+
215231class TestLargeFlatDocUpdate (LargeFlatDocTest , TestCase ):
216232 def setUp (self ):
217233 super ().setUp ()
@@ -229,12 +245,13 @@ def do_task(self):
229245 def after (self ):
230246 LargeFlatModel .objects .all ().delete ()
231247
248+
232249class LargeNestedDocTest (PerformanceTest ):
233250 dataset = "large_doc_nested.json"
234251
235252 def setUp (self ):
236253 super ().setUp ()
237- with open (self .dataset , "r" ) as data :
254+ with open (self .dataset ) as data :
238255 self .document = json .load (data )
239256
240257 self .data_size = len (encode (self .document )) * NUM_DOCS
@@ -261,13 +278,15 @@ def create_model(self):
261278 setattr (model , k , embedded_int_model )
262279 model .save ()
263280
281+
264282class TestLargeNestedDocCreation (LargeNestedDocTest , TestCase ):
265283 def do_task (self ):
266284 self .create_model ()
267285
268286 def after (self ):
269287 LargeNestedModel .objects .all ().delete ()
270288
289+
271290class TestLargeNestedDocUpdate (LargeNestedDocTest , TestCase ):
272291 def setUp (self ):
273292 super ().setUp ()
@@ -283,11 +302,14 @@ def do_task(self):
283302 model .embedded_str_doc_1 .field1 = "updated_value"
284303 model .save ()
285304
305+
286306class TestLargeNestedDocFilterById (LargeNestedDocTest , TestCase ):
287307 def setUp (self ):
288308 super ().setUp ()
289309 self .create_model ()
290- self .ids = [model .embedded_str_doc_1 .unique_id for model in list (LargeNestedModel .objects .all ())]
310+ self .ids = [
311+ model .embedded_str_doc_1 .unique_id for model in list (LargeNestedModel .objects .all ())
312+ ]
291313
292314 def do_task (self ):
293315 for _id in self .ids :
@@ -297,11 +319,15 @@ def tearDown(self):
297319 super ().tearDown ()
298320 LargeNestedModel .objects .all ().delete ()
299321
322+
300323class TestLargeNestedDocFilterArray (LargeNestedDocTest , TestCase ):
301324 def setUp (self ):
302325 super ().setUp ()
303326 self .create_model ()
304- self .ids = [model .embedded_str_doc_array [0 ].unique_id for model in list (LargeNestedModel .objects .all ())]
327+ self .ids = [
328+ model .embedded_str_doc_array [0 ].unique_id
329+ for model in list (LargeNestedModel .objects .all ())
330+ ]
305331
306332 def do_task (self ):
307333 for _id in self .ids :
@@ -310,5 +336,3 @@ def do_task(self):
310336 def tearDown (self ):
311337 super ().tearDown ()
312338 LargeNestedModel .objects .all ().delete ()
313-
314-
0 commit comments