Skip to content

Commit c18cffc

Browse files
authored
Merge pull request #92 from clowder-framework/posixpath-joins
3.0.6 Use posixpath.join
2 parents 50b6b36 + 4c7724a commit c18cffc

File tree

11 files changed

+104
-108
lines changed

11 files changed

+104
-108
lines changed

CHANGELOG.md

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,14 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/)
66
and this project adheres to [Semantic Versioning](https://semver.org/).
77

8-
- ## 3.0.6 - 2023-10-09
8+
9+
## 3.0.7 - 2023-10-11
10+
11+
### Added
12+
13+
- Modified v1 and v2 endpoints to ignore trailing slashes on Clowder host URLs.
14+
15+
- ## 3.0.6 - 2023-10-10
916

1017
### Added
1118

pyclowder/api/v1/datasets.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import logging
33
import os
44
import tempfile
5-
5+
import posixpath
66
import requests
77
from pyclowder.client import ClowderClient
88
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
@@ -22,7 +22,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
2222
"""
2323
logger = logging.getLogger(__name__)
2424

25-
url = '%s/api/datasets/createempty?key=%s' % (client.host, client.key)
25+
url = posixpath.join(client.host, 'api/datasets/createempty?key=%s' % client.key)
2626

2727
if parentid:
2828
if spaceid:
@@ -61,7 +61,7 @@ def delete(connector, client, datasetid):
6161
client -- ClowderClient containing authentication credentials
6262
datasetid -- the dataset to delete
6363
"""
64-
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
64+
url = posixpath.join(client.host, "api/datasets/%s?key=%s" % (datasetid, client.key))
6565

6666
result = requests.delete(url, verify=connector.ssl_verify if connector else True)
6767
result.raise_for_status()
@@ -102,7 +102,7 @@ def download(connector, client, datasetid):
102102
connector.message_process({"type": "dataset", "id": datasetid}, "Downloading dataset.")
103103

104104
# fetch dataset zipfile
105-
url = '%s/api/datasets/%s/download?key=%s' % (client.host, datasetid,client.key)
105+
url = posixpath.join(client.host, 'api/datasets/%s/download?key=%s' % datasetid,client.key)
106106
result = requests.get(url, stream=True,
107107
verify=connector.ssl_verify if connector else True)
108108
result.raise_for_status()
@@ -124,7 +124,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
124124
extractor -- extractor name to filter results (if only one extractor's metadata is desired)
125125
"""
126126
filterstring = "" if extractor is None else "&extractor=%s" % extractor
127-
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key + filterstring)
127+
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key + filterstring))
128128

129129
# fetch data
130130
result = requests.get(url, stream=True,
@@ -142,7 +142,7 @@ def get_info(connector, client, datasetid):
142142
datasetid -- the dataset to get info of
143143
"""
144144

145-
url = "%s/api/datasets/%s?key=%s" % (client.host, datasetid, client.key)
145+
url = posixpath.join(client.host, "api/datasets/%s?key=%s" % (datasetid, client.key))
146146

147147
result = requests.get(url, verify=connector.ssl_verify if connector else True)
148148
result.raise_for_status()
@@ -157,7 +157,7 @@ def get_file_list(connector, client, datasetid):
157157
client -- ClowderClient containing authentication credentials
158158
datasetid -- the dataset to get filelist of
159159
"""
160-
url = "%s/api/datasets/%s/files?key=%s" % (client.host, datasetid, client.key)
160+
url = posixpath.join(client.host, "api/datasets/%s/files?key=%s" % (datasetid, client.key))
161161

162162
result = requests.get(url, verify=connector.ssl_verify if connector else True)
163163
result.raise_for_status()
@@ -175,7 +175,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
175175
!!! ALL JSON-LD METADATA WILL BE REMOVED IF NO extractor PROVIDED !!!
176176
"""
177177
filterstring = "" if extractor is None else "&extractor=%s" % extractor
178-
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
178+
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key))
179179

180180
# fetch data
181181
result = requests.delete(url, stream=True, verify=connector.ssl_verify if connector else True)
@@ -192,7 +192,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
192192
"""
193193
headers = {'Content-Type': 'application/json'}
194194

195-
url = "%s/api/datasets/%s/extractions?key=%s" % (client.host, datasetid, client.key)
195+
url = posixpath.join(client.host, "api/datasets/%s/extractions?key=%s" % (datasetid, client.key))
196196

197197
result = requests.post(url,
198198
headers=headers,
@@ -238,7 +238,7 @@ def upload_tags(connector, client, datasetid, tags):
238238
connector.status_update(StatusMessage.processing, {"type": "dataset", "id": datasetid}, "Uploading dataset tags.")
239239

240240
headers = {'Content-Type': 'application/json'}
241-
url = '%s/api/datasets/%s/tags?key=%s' % (client.host, datasetid, client.key)
241+
url = posixpath.join(client.host, 'api/datasets/%s/tags?key=%s' % (datasetid, client.key))
242242
result = connector.post(url, headers=headers, data=json.dumps(tags),
243243
verify=connector.ssl_verify if connector else True)
244244

@@ -255,7 +255,7 @@ def upload_metadata(connector, client, datasetid, metadata):
255255
headers = {'Content-Type': 'application/json'}
256256
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
257257

258-
url = '%s/api/datasets/%s/metadata?key=%s' % (client.host, datasetid, client.key)
258+
url = posixpath.join(client.host, 'api/datasets/%s/metadata?key=%s' % (datasetid, client.key))
259259
result = requests.post(url, headers=headers, data=json.dumps(metadata),
260260
verify=connector.ssl_verify if connector else True)
261261
result.raise_for_status()
@@ -387,4 +387,4 @@ def add_metadata(self, dataset_id, metadata):
387387
try:
388388
return self.client.post("/datasets/%s/metadata" % dataset_id, metadata)
389389
except Exception as e:
390-
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))
390+
logging.error("Error upload to dataset %s: %s" % (dataset_id, str(e)))

pyclowder/api/v1/files.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import logging
88
import os
99
import tempfile
10-
10+
import posixpath
1111
import requests
1212
from requests_toolbelt.multipart.encoder import MultipartEncoder
1313

@@ -43,7 +43,7 @@ def get_download_url(connector, client, fileid, intermediatefileid=None, ext="")
4343
if not intermediatefileid:
4444
intermediatefileid = fileid
4545

46-
url = '%s/api/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
46+
url = posixpath.join(client.host, 'api/files/%s?key=%s' % (intermediatefileid, client.key))
4747
return url
4848

4949

@@ -65,7 +65,7 @@ def download(connector, client, fileid, intermediatefileid=None, ext=""):
6565
if not intermediatefileid:
6666
intermediatefileid = fileid
6767

68-
url = '%s/api/files/%s?key=%s' % (client.host, intermediatefileid, client.key)
68+
url = posixpath.join(client.host, 'api/files/%s?key=%s' % (intermediatefileid, client.key))
6969
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
7070

7171
(inputfile, inputfilename) = tempfile.mkstemp(suffix=ext)
@@ -89,7 +89,7 @@ def download_info(connector, client, fileid):
8989
fileid -- the file to fetch metadata of
9090
"""
9191

92-
url = '%s/api/files/%s/metadata?key=%s' % (client.host, fileid, client.key)
92+
url = posixpath.join(client.host, 'api/files/%s/metadata?key=%s' % (fileid, client.key))
9393

9494
# fetch data
9595
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
@@ -121,7 +121,7 @@ def download_metadata(connector, client, fileid, extractor=None):
121121
"""
122122

123123
filterstring = "" if extractor is None else "&extractor=%s" % extractor
124-
url = '%s/api/files/%s/metadata.jsonld?key=%s%s' % (client.host, fileid, client.key, filterstring)
124+
url = posixpath.join(client.host, 'api/files/%s/metadata.jsonld?key=%s%s' % (fileid, client.key, filterstring))
125125

126126
# fetch data
127127
result = connector.get(url, stream=True, verify=connector.ssl_verify if connector else True)
@@ -137,7 +137,7 @@ def delete(connector, client, fileid):
137137
client -- ClowderClient containing authentication credentials
138138
fileid -- the dataset to delete
139139
"""
140-
url = "%s/api/files/%s?key=%s" % (client.host, fileid, client.key)
140+
url = posixpath.join(client.host, "api/files/%s?key=%s" % (fileid, client.key))
141141

142142
result = requests.delete(url, verify=connector.ssl_verify if connector else True)
143143
result.raise_for_status()
@@ -155,7 +155,7 @@ def submit_extraction(connector, client, fileid, extractorname):
155155
extractorname -- registered name of extractor to trigger
156156
"""
157157

158-
url = "%s/api/files/%s/extractions?key=%s" % (client.host, fileid, client.key)
158+
url = posixpath.join(client.host, "api/files/%s/extractions?key=%s" % (fileid, client.key))
159159

160160
result = connector.post(url,
161161
headers={'Content-Type': 'application/json'},
@@ -229,7 +229,7 @@ def upload_metadata(connector, client, fileid, metadata):
229229
connector.message_process({"type": "file", "id": fileid}, "Uploading file metadata.")
230230

231231
headers = {'Content-Type': 'application/json'}
232-
url = '%s/api/files/%s/metadata.jsonld?key=%s' % (client.host, fileid, client.key)
232+
url = posixpath.join(client.host, 'api/files/%s/metadata.jsonld?key=%s' % (fileid, client.key))
233233
result = connector.post(url, headers=headers, data=json.dumps(metadata),
234234
verify=connector.ssl_verify if connector else True)
235235

@@ -255,7 +255,7 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
255255
headers = {'Content-Type': 'application/json'}
256256

257257
# upload preview
258-
url = '%s/api/previews?key=%s' % (client.host, client.key)
258+
url = posixpath.join(client.host, 'api/previews?key=%s' % client.key)
259259
with open(previewfile, 'rb') as filebytes:
260260
# If a custom preview file MIME type is provided, use it to generate the preview file object.
261261
if preview_mimetype is not None:
@@ -269,13 +269,13 @@ def upload_preview(connector, client, fileid, previewfile, previewmetadata=None,
269269

270270
# associate uploaded preview with orginal file
271271
if fileid and not (previewmetadata and 'section_id' in previewmetadata and previewmetadata['section_id']):
272-
url = '%s/api/files/%s/previews/%s?key=%s' % (client.host, fileid, previewid, client.key)
272+
url = posixpath.join(client.host, 'api/files/%s/previews/%s?key=%s' % (fileid, previewid, client.key))
273273
result = connector.post(url, headers=headers, data=json.dumps({}),
274274
verify=connector.ssl_verify if connector else True)
275275

276276
# associate metadata with preview
277277
if previewmetadata is not None:
278-
url = '%s/api/previews/%s/metadata?key=%s' % (client.host, previewid, client.key)
278+
url = posixpath.join(client.host, 'api/previews/%s/metadata?key=%s' % (previewid, client.key))
279279
result = connector.post(url, headers=headers, data=json.dumps(previewmetadata),
280280
verify=connector.ssl_verify if connector else True)
281281

@@ -295,7 +295,7 @@ def upload_tags(connector, client, fileid, tags):
295295
connector.message_process({"type": "file", "id": fileid}, "Uploading file tags.")
296296

297297
headers = {'Content-Type': 'application/json'}
298-
url = '%s/api/files/%s/tags?key=%s' % (client.host, fileid, client.key)
298+
url = posixpath.join(client.host, 'api/files/%s/tags?key=%s' % (fileid, client.key))
299299
result = connector.post(url, headers=headers, data=json.dumps(tags),
300300
verify=connector.ssl_verify if connector else True)
301301

@@ -311,7 +311,7 @@ def upload_thumbnail(connector, client, fileid, thumbnail):
311311
"""
312312

313313
logger = logging.getLogger(__name__)
314-
url = '%s/api/fileThumbnail?key=%s' % (client.host, client.key)
314+
url = posixpath.join(client.host, 'api/fileThumbnail?key=%s' % client.key)
315315

316316
# upload preview
317317
with open(thumbnail, 'rb') as inputfile:
@@ -322,7 +322,7 @@ def upload_thumbnail(connector, client, fileid, thumbnail):
322322
# associate uploaded preview with original file/dataset
323323
if fileid:
324324
headers = {'Content-Type': 'application/json'}
325-
url = '%s/api/files/%s/thumbnails/%s?key=%s' % (client.host, fileid, thumbnailid, client.key)
325+
url = posixpath.join(client.host, 'api/files/%s/thumbnails/%s?key=%s' % (fileid, thumbnailid, client.key))
326326
connector.post(url, headers=headers, data=json.dumps({}), verify=connector.ssl_verify if connector else True)
327327

328328
return thumbnailid
@@ -352,7 +352,7 @@ def upload_to_dataset(connector, client, datasetid, filepath, check_duplicate=Fa
352352
if filepath.startswith(connector.mounted_paths[source_path]):
353353
return _upload_to_dataset_local(connector, client, datasetid, filepath)
354354

355-
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
355+
url = posixpath.join(client.host, 'api/uploadToDataset/%s?key=%s' % (datasetid, client.key))
356356

357357
if os.path.exists(filepath):
358358
filename = os.path.basename(filepath)
@@ -381,7 +381,7 @@ def _upload_to_dataset_local(connector, client, datasetid, filepath):
381381
"""
382382

383383
logger = logging.getLogger(__name__)
384-
url = '%s/api/uploadToDataset/%s?key=%s' % (client.host, datasetid, client.key)
384+
url = posixpath.join(client.host, 'api/uploadToDataset/%s?key=%s' % (datasetid, client.key))
385385

386386
if os.path.exists(filepath):
387387
# Replace local path with remote path before uploading

pyclowder/api/v2/datasets.py

Lines changed: 15 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,11 @@
77
import logging
88
import os
99
import tempfile
10-
10+
import posixpath
1111
import requests
1212
from requests_toolbelt.multipart.encoder import MultipartEncoder
1313

14-
from pyclowder.client import ClowderClient
1514
from pyclowder.collections import get_datasets, get_child_collections, delete as delete_collection
16-
from pyclowder.utils import StatusMessage
1715

1816

1917
def create_empty(connector, client, datasetname, description, parentid=None, spaceid=None):
@@ -30,7 +28,7 @@ def create_empty(connector, client, datasetname, description, parentid=None, spa
3028

3129
logger = logging.getLogger(__name__)
3230

33-
url = '%s/api/v2/datasets' % client.host
31+
url = posixpath.join(client.host, 'api/v2/datasets')
3432
headers = {"Content-Type": "application/json",
3533
"X-API-KEY": client.key}
3634
result = requests.post(url, headers=headers,
@@ -54,7 +52,7 @@ def delete(connector, client , datasetid):
5452
datasetid -- the dataset to delete
5553
"""
5654
headers = {"X-API-KEY": client.key}
57-
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
55+
url = posixpath.join(client.host, "api/v2/datasets/%s" % datasetid)
5856

5957
result = requests.delete(url, headers=headers, verify=connector.ssl_verify if connector else True)
6058
result.raise_for_status()
@@ -99,7 +97,7 @@ def download(connector, client, datasetid):
9997

10098
headers = {"X-API-KEY": client.key}
10199
# fetch dataset zipfile
102-
url = '%s/api/v2/datasets/%s/download' % (client.host, datasetid)
100+
url = posixpath.join(client.host, 'api/v2/datasets/%s/download' % datasetid)
103101
result = requests.get(url, stream=True, headers=headers,
104102
verify=connector.ssl_verify if connector else True)
105103
result.raise_for_status()
@@ -124,7 +122,7 @@ def download_metadata(connector, client, datasetid, extractor=None):
124122
headers = {"X-API-KEY": client.key}
125123

126124
filterstring = "" if extractor is None else "&extractor=%s" % extractor
127-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
125+
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)
128126

129127
# fetch data
130128
result = requests.get(url, stream=True, headers=headers,
@@ -144,7 +142,7 @@ def get_info(connector, client, datasetid):
144142
"""
145143
headers = {"X-API-KEY": client.key}
146144

147-
url = "%s/api/v2/datasets/%s" % (client.host, datasetid)
145+
url = posixpath.join(client.host, "api/v2/datasets/%s" % datasetid)
148146

149147
result = requests.get(url, headers=headers,
150148
verify=connector.ssl_verify if connector else True)
@@ -163,7 +161,7 @@ def get_file_list(connector, client, datasetid):
163161
"""
164162
headers = {"X-API-KEY": client.key}
165163

166-
url = "%s/api/v2/datasets/%s/files" % (client.host, datasetid)
164+
url = posixpath.join(client.host, "api/v2/datasets/%s/files" % datasetid)
167165

168166
result = requests.get(url, headers=headers, verify=connector.ssl_verify if connector else True)
169167
result.raise_for_status()
@@ -184,7 +182,7 @@ def remove_metadata(connector, client, datasetid, extractor=None):
184182
headers = {"X-API-KEY": client.key}
185183

186184
filterstring = "" if extractor is None else "&extractor=%s" % extractor
187-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
185+
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)
188186

189187
# fetch data
190188
result = requests.delete(url, stream=True, headers=headers,
@@ -204,7 +202,7 @@ def submit_extraction(connector, client, datasetid, extractorname):
204202
headers = {'Content-Type': 'application/json',
205203
"X-API-KEY": client.key}
206204

207-
url = "%s/api/v2/datasets/%s/extractions?key=%s" % (client.host, datasetid)
205+
url = posixpath.join(client.host, "api/v2/datasets/%s/extractions" % datasetid)
208206

209207
result = requests.post(url,
210208
headers=headers,
@@ -229,7 +227,7 @@ def upload_metadata(connector, client, datasetid, metadata):
229227
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading dataset metadata.")
230228

231229

232-
url = '%s/api/v2/datasets/%s/metadata' % (client.host, datasetid)
230+
url = posixpath.join(client.host, 'api/v2/datasets/%s/metadata' % datasetid)
233231
result = requests.post(url, headers=headers, data=json.dumps(metadata),
234232
verify=connector.ssl_verify if connector else True)
235233
result.raise_for_status()
@@ -259,7 +257,7 @@ def upload_preview(connector, client, datasetid, previewfile, previewmetadata=No
259257
if os.path.exists(previewfile):
260258

261259
# upload visualization URL
262-
visualization_config_url = '%s/api/v2/visualizations/config' % client.host
260+
visualization_config_url = posixpath.join(client.host, 'api/v2/visualizations/config')
263261

264262
if visualization_config_data is None:
265263
visualization_config_data = dict()
@@ -292,8 +290,8 @@ def upload_preview(connector, client, datasetid, previewfile, previewmetadata=No
292290
if visualization_config_id is not None:
293291

294292
# upload visualization URL
295-
visualization_url = '%s/api/v2/visualizations?name=%s&description=%s&config=%s' % (
296-
client.host, visualization_name, visualization_description, visualization_config_id)
293+
visualization_url = posixpath.join(client.host, 'api/v2/visualizations?name=%s&description=%s&config=%s' % (
294+
visualization_name, visualization_description, visualization_config_id))
297295

298296
filename = os.path.basename(previewfile)
299297
if preview_mimetype is not None:
@@ -331,7 +329,7 @@ def upload_thumbnail(connector, client, datasetid, thumbnail):
331329

332330
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading thumbnail to dataset.")
333331

334-
url = '%s/api/v2/thumbnails' % (client.host)
332+
url = posixpath.join(client.host, 'api/v2/thumbnails')
335333

336334
if os.path.exists(thumbnail):
337335
file_data = {"file": open(thumbnail, 'rb')}
@@ -345,7 +343,7 @@ def upload_thumbnail(connector, client, datasetid, thumbnail):
345343
connector.message_process({"type": "dataset", "id": datasetid}, "Uploading thumbnail to dataset.")
346344
headers = {'Content-Type': 'application/json',
347345
'X-API-KEY': client.key}
348-
url = '%s/api/v2/datasets/%s/thumbnail/%s' % (client.host, datasetid, thumbnailid)
346+
url = posixpath.join(client.host, 'api/v2/datasets/%s/thumbnail/%s' % (datasetid, thumbnailid))
349347
result = connector.patch(url, headers=headers,
350348
verify=connector.ssl_verify if connector else True)
351349
return result.json()["thumbnail_id"]

0 commit comments

Comments
 (0)