1212import pandas ._testing as tm
1313
1414
15- def test_compression_roundtrip (compression ):
15+ def test_compression_roundtrip (compression , temp_file ):
1616 df = pd .DataFrame (
1717 [[0.123456 , 0.234567 , 0.567567 ], [12.32112 , 123123.2 , 321321.2 ]],
1818 index = ["A" , "B" ],
1919 columns = ["X" , "Y" , "Z" ],
2020 )
2121
22- with tm .ensure_clean () as path :
23- df .to_json (path , compression = compression )
24- tm .assert_frame_equal (df , pd .read_json (path , compression = compression ))
22+ df .to_json (temp_file , compression = compression )
23+ tm .assert_frame_equal (df , pd .read_json (temp_file , compression = compression ))
2524
26- # explicitly ensure file was compressed.
27- with tm .decompress_file (path , compression ) as fh :
28- result = fh .read ().decode ("utf8" )
29- data = StringIO (result )
30- tm .assert_frame_equal (df , pd .read_json (data ))
25+ # explicitly ensure file was compressed.
26+ with tm .decompress_file (temp_file , compression ) as fh :
27+ result = fh .read ().decode ("utf8" )
28+ data = StringIO (result )
29+ tm .assert_frame_equal (df , pd .read_json (data ))
3130
3231
3332def test_read_zipped_json (datapath ):
@@ -43,15 +42,14 @@ def test_read_zipped_json(datapath):
4342@td .skip_if_not_us_locale
4443@pytest .mark .single_cpu
4544@pytest .mark .network
46- def test_with_s3_url (compression , s3_bucket_public , s3so ):
45+ def test_with_s3_url (compression , s3_bucket_public , s3so , temp_file ):
4746 # Bucket created in tests/io/conftest.py
4847 df = pd .read_json (StringIO ('{"a": [1, 2, 3], "b": [4, 5, 6]}' ))
4948
5049 key = f"{ uuid .uuid4 ()} .json"
51- with tm .ensure_clean () as path :
52- df .to_json (path , compression = compression )
53- with open (path , "rb" ) as f :
54- s3_bucket_public .put_object (Key = key , Body = f )
50+ df .to_json (temp_file , compression = compression )
51+ with open (temp_file , "rb" ) as f :
52+ s3_bucket_public .put_object (Key = key , Body = f )
5553
5654 roundtripped_df = pd .read_json (
5755 f"s3://{ s3_bucket_public .name } /{ key } " ,
@@ -61,39 +59,35 @@ def test_with_s3_url(compression, s3_bucket_public, s3so):
6159 tm .assert_frame_equal (df , roundtripped_df )
6260
6361
64- def test_lines_with_compression (compression ):
65- with tm .ensure_clean () as path :
66- df = pd .read_json (StringIO ('{"a": [1, 2, 3], "b": [4, 5, 6]}' ))
67- df .to_json (path , orient = "records" , lines = True , compression = compression )
68- roundtripped_df = pd .read_json (path , lines = True , compression = compression )
69- tm .assert_frame_equal (df , roundtripped_df )
62+ def test_lines_with_compression (compression , temp_file ):
63+ df = pd .read_json (StringIO ('{"a": [1, 2, 3], "b": [4, 5, 6]}' ))
64+ df .to_json (temp_file , orient = "records" , lines = True , compression = compression )
65+ roundtripped_df = pd .read_json (temp_file , lines = True , compression = compression )
66+ tm .assert_frame_equal (df , roundtripped_df )
7067
7168
72- def test_chunksize_with_compression (compression ):
73- with tm .ensure_clean () as path :
74- df = pd .read_json (StringIO ('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}' ))
75- df .to_json (path , orient = "records" , lines = True , compression = compression )
69+ def test_chunksize_with_compression (compression , temp_file ):
70+ df = pd .read_json (StringIO ('{"a": ["foo", "bar", "baz"], "b": [4, 5, 6]}' ))
71+ df .to_json (temp_file , orient = "records" , lines = True , compression = compression )
7672
77- with pd .read_json (
78- path , lines = True , chunksize = 1 , compression = compression
79- ) as res :
80- roundtripped_df = pd .concat (res )
81- tm .assert_frame_equal (df , roundtripped_df )
73+ with pd .read_json (
74+ temp_file , lines = True , chunksize = 1 , compression = compression
75+ ) as res :
76+ roundtripped_df = pd .concat (res )
77+ tm .assert_frame_equal (df , roundtripped_df )
8278
8379
84- def test_write_unsupported_compression_type ():
80+ def test_write_unsupported_compression_type (temp_file ):
8581 df = pd .read_json (StringIO ('{"a": [1, 2, 3], "b": [4, 5, 6]}' ))
86- with tm .ensure_clean () as path :
87- msg = "Unrecognized compression type: unsupported"
88- with pytest .raises (ValueError , match = msg ):
89- df .to_json (path , compression = "unsupported" )
82+ msg = "Unrecognized compression type: unsupported"
83+ with pytest .raises (ValueError , match = msg ):
84+ df .to_json (temp_file , compression = "unsupported" )
9085
9186
92- def test_read_unsupported_compression_type ():
93- with tm .ensure_clean () as path :
94- msg = "Unrecognized compression type: unsupported"
95- with pytest .raises (ValueError , match = msg ):
96- pd .read_json (path , compression = "unsupported" )
87+ def test_read_unsupported_compression_type (temp_file ):
88+ msg = "Unrecognized compression type: unsupported"
89+ with pytest .raises (ValueError , match = msg ):
90+ pd .read_json (temp_file , compression = "unsupported" )
9791
9892
9993@pytest .mark .parametrize (
@@ -102,25 +96,28 @@ def test_read_unsupported_compression_type():
10296@pytest .mark .parametrize ("to_infer" , [True , False ])
10397@pytest .mark .parametrize ("read_infer" , [True , False ])
10498def test_to_json_compression (
105- compression_only , read_infer , to_infer , compression_to_extension , infer_string
99+ compression_only ,
100+ read_infer ,
101+ to_infer ,
102+ compression_to_extension ,
103+ infer_string ,
104+ tmp_path ,
106105):
107106 with pd .option_context ("future.infer_string" , infer_string ):
108107 # see gh-15008
109108 compression = compression_only
110109
111110 # We'll complete file extension subsequently.
112- filename = "test."
113- filename += compression_to_extension [compression ]
111+ filename = tmp_path / f"test.{ compression_to_extension [compression ]} "
114112
115113 df = pd .DataFrame ({"A" : [1 ]})
116114
117115 to_compression = "infer" if to_infer else compression
118116 read_compression = "infer" if read_infer else compression
119117
120- with tm .ensure_clean (filename ) as path :
121- df .to_json (path , compression = to_compression )
122- result = pd .read_json (path , compression = read_compression )
123- tm .assert_frame_equal (result , df )
118+ df .to_json (filename , compression = to_compression )
119+ result = pd .read_json (filename , compression = read_compression )
120+ tm .assert_frame_equal (result , df )
124121
125122
126123def test_to_json_compression_mode (compression ):
0 commit comments