3131def tearDownModule ():
3232 output = json .dumps (result_data , indent = 4 )
3333 if OUTPUT_FILE :
34- with open (OUTPUT_FILE , "w" ) as opf :
34+ with open (OUTPUT_FILE , "w" ) as opf : # noqa: PTH123
3535 opf .write (output )
3636 else :
37- print (output )
37+ print (output ) # noqa: T201
3838
3939
4040class Timer :
@@ -61,8 +61,9 @@ def tearDown(self):
6161 name = self .__class__ .__name__ [4 :]
6262 median = self .percentile (50 )
6363 megabytes_per_sec = self .data_size / median / 1000000
64- print (
65- f"Completed { self .__class__ .__name__ } { megabytes_per_sec :.3f} MB/s, MEDIAN={ self .percentile (50 ):.3f} s, "
64+ print ( # noqa: T201
65+ f"Completed { self .__class__ .__name__ } { megabytes_per_sec :.3f} MB/s, "
66+ f"MEDIAN={ self .percentile (50 ):.3f} s, "
6667 f"total time={ duration :.3f} s, iterations={ len (self .results )} "
6768 )
6869 result_data .append (
@@ -119,7 +120,9 @@ def runTest(self):
119120 with warnings .catch_warnings ():
120121 warnings .simplefilter ("default" )
121122 warnings .warn (
122- f"{ self .__class__ .__name__ } timed out after { MAX_ITERATION_TIME } s, completed { i } /{ NUM_ITERATIONS } iterations."
123+ f"{ self .__class__ .__name__ } timed out after { MAX_ITERATION_TIME } s, "
124+ f"completed { i } /{ NUM_ITERATIONS } iterations." ,
125+ stacklevel = 2 ,
123126 )
124127
125128 break
@@ -132,7 +135,7 @@ class SmallFlatDocTest(PerformanceTest):
132135
133136 def setUp (self ):
134137 super ().setUp ()
135- with open (self .dataset ) as data :
138+ with open (self .dataset ) as data : # noqa: PTH123
136139 self .document = json .load (data )
137140
138141 self .data_size = len (encode (self .document )) * NUM_DOCS
@@ -204,14 +207,15 @@ def do_task(self):
204207 def tearDown (self ):
205208 super ().tearDown ()
206209 SmallFlatModelFk .objects .all ().delete ()
210+ ForeignKeyModel .objects .all ().delete ()
207211
208212
209213class LargeFlatDocTest (PerformanceTest ):
210214 dataset = "large_doc.json"
211215
212216 def setUp (self ):
213217 super ().setUp ()
214- with open (self .dataset ) as data :
218+ with open (self .dataset ) as data : # noqa: PTH123
215219 self .document = json .load (data )
216220
217221 self .data_size = len (encode (self .document )) * NUM_DOCS
@@ -251,7 +255,7 @@ class LargeNestedDocTest(PerformanceTest):
251255
252256 def setUp (self ):
253257 super ().setUp ()
254- with open (self .dataset ) as data :
258+ with open (self .dataset ) as data : # noqa: PTH123
255259 self .document = json .load (data )
256260
257261 self .data_size = len (encode (self .document )) * NUM_DOCS
0 commit comments