@@ -202,23 +202,30 @@ def test_tree(zarr_version):
202
202
assert str (zarr .tree (g1 )) == str (g1 .tree ())
203
203
204
204
205
- # TODO: consolidated metadata currently only supported for v2
206
-
207
205
@pytest .mark .parametrize ('zarr_version' , [2 , 3 ])
208
206
@pytest .mark .parametrize ('with_chunk_store' , [False , True ], ids = ['default' , 'with_chunk_store' ])
209
- def test_consolidate_metadata (with_chunk_store , zarr_version ):
210
-
211
- if zarr_version == 2 :
212
- MemoryStoreClass = MemoryStore
213
- path = ''
214
- else :
215
- MemoryStoreClass = MemoryStoreV3
216
- path = 'dataset'
217
-
207
+ @pytest .mark .parametrize ('stores_from_path' , [False , True ])
208
+ def test_consolidate_metadata (with_chunk_store , zarr_version , stores_from_path ):
218
209
# setup initial data
219
- store = MemoryStoreClass ()
220
- chunk_store = MemoryStoreClass () if with_chunk_store else None
221
- z = group (store , chunk_store = chunk_store , path = path )
210
+ if stores_from_path :
211
+ store = tempfile .mkdtemp ()
212
+ atexit .register (atexit_rmtree , store )
213
+ if with_chunk_store :
214
+ chunk_store = tempfile .mkdtemp ()
215
+ atexit .register (atexit_rmtree , chunk_store )
216
+ else :
217
+ chunk_store = None
218
+ version_kwarg = {'zarr_version' : zarr_version }
219
+ else :
220
+ if zarr_version == 2 :
221
+ store = MemoryStore ()
222
+ chunk_store = MemoryStore () if with_chunk_store else None
223
+ elif zarr_version == 3 :
224
+ store = MemoryStoreV3 ()
225
+ chunk_store = MemoryStoreV3 () if with_chunk_store else None
226
+ version_kwarg = {}
227
+ path = 'dataset' if zarr_version == 3 else None
228
+ z = group (store , chunk_store = chunk_store , path = path , ** version_kwarg )
222
229
z .create_group ('g1' )
223
230
g2 = z .create_group ('g2' )
224
231
g2 .attrs ['hello' ] = 'world'
@@ -229,64 +236,81 @@ def test_consolidate_metadata(with_chunk_store, zarr_version):
229
236
arr [:] = 1.0
230
237
assert 16 == arr .nchunks_initialized
231
238
239
+ if stores_from_path :
240
+ # get the actual store class for use with consolidate_metadata
241
+ store_class = z ._store
242
+ else :
243
+ store_class = store
244
+
232
245
if zarr_version == 3 :
233
246
# error on v3 if path not provided
234
247
with pytest .raises (ValueError ):
235
- consolidate_metadata (store , path = None )
248
+ consolidate_metadata (store_class , path = None )
236
249
237
250
with pytest .raises (ValueError ):
238
- consolidate_metadata (store , path = '' )
251
+ consolidate_metadata (store_class , path = '' )
239
252
240
253
# perform consolidation
241
- out = consolidate_metadata (store , path = path )
254
+ out = consolidate_metadata (store_class , path = path )
242
255
assert isinstance (out , Group )
243
256
assert ['g1' , 'g2' ] == list (out )
244
- if zarr_version == 2 :
245
- assert isinstance (out ._store , ConsolidatedMetadataStore )
246
- assert '.zmetadata' in store
247
- meta_keys = ['.zgroup' ,
248
- 'g1/.zgroup' ,
249
- 'g2/.zgroup' ,
250
- 'g2/.zattrs' ,
251
- 'g2/arr/.zarray' ,
252
- 'g2/arr/.zattrs' ]
253
- else :
254
- assert isinstance (out ._store , ConsolidatedMetadataStoreV3 )
255
- assert 'meta/root/consolidated/.zmetadata' in store
256
- meta_keys = ['zarr.json' ,
257
- meta_root + 'dataset.group.json' ,
258
- meta_root + 'dataset/g1.group.json' ,
259
- meta_root + 'dataset/g2.group.json' ,
260
- meta_root + 'dataset/g2/arr.array.json' ,
261
- 'meta/root/consolidated.group.json' ]
262
- for key in meta_keys :
263
- del store [key ]
257
+ if not stores_from_path :
258
+ if zarr_version == 2 :
259
+ assert isinstance (out ._store , ConsolidatedMetadataStore )
260
+ assert '.zmetadata' in store
261
+ meta_keys = ['.zgroup' ,
262
+ 'g1/.zgroup' ,
263
+ 'g2/.zgroup' ,
264
+ 'g2/.zattrs' ,
265
+ 'g2/arr/.zarray' ,
266
+ 'g2/arr/.zattrs' ]
267
+ else :
268
+ assert isinstance (out ._store , ConsolidatedMetadataStoreV3 )
269
+ assert 'meta/root/consolidated/.zmetadata' in store
270
+ meta_keys = ['zarr.json' ,
271
+ meta_root + 'dataset.group.json' ,
272
+ meta_root + 'dataset/g1.group.json' ,
273
+ meta_root + 'dataset/g2.group.json' ,
274
+ meta_root + 'dataset/g2/arr.array.json' ,
275
+ 'meta/root/consolidated.group.json' ]
276
+ for key in meta_keys :
277
+ del store [key ]
264
278
265
279
# open consolidated
266
- z2 = open_consolidated (store , chunk_store = chunk_store , path = path )
280
+ z2 = open_consolidated (store , chunk_store = chunk_store , path = path , ** version_kwarg )
267
281
assert ['g1' , 'g2' ] == list (z2 )
268
282
assert 'world' == z2 .g2 .attrs ['hello' ]
269
283
assert 1 == z2 .g2 .arr .attrs ['data' ]
270
284
assert (z2 .g2 .arr [:] == 1.0 ).all ()
271
285
assert 16 == z2 .g2 .arr .nchunks
272
286
assert 16 == z2 .g2 .arr .nchunks_initialized
273
287
274
- # tests del/write on the store
275
- if zarr_version == 2 :
276
- cmd = ConsolidatedMetadataStore (store )
277
- with pytest .raises (PermissionError ):
278
- del cmd ['.zgroup' ]
279
- with pytest .raises (PermissionError ):
280
- cmd ['.zgroup' ] = None
288
+ if stores_from_path :
289
+ # path string is note a BaseStore subclass so cannot be used to
290
+ # initialize a ConsolidatedMetadataStore.
291
+ if zarr_version == 2 :
292
+ with pytest .raises (ValueError ):
293
+ cmd = ConsolidatedMetadataStore (store )
294
+ elif zarr_version == 3 :
295
+ with pytest .raises (ValueError ):
296
+ cmd = ConsolidatedMetadataStoreV3 (store )
281
297
else :
282
- cmd = ConsolidatedMetadataStoreV3 (store )
283
- with pytest .raises (PermissionError ):
284
- del cmd [meta_root + 'dataset.group.json' ]
285
- with pytest .raises (PermissionError ):
286
- cmd [meta_root + 'dataset.group.json' ] = None
298
+ # tests del/write on the store
299
+ if zarr_version == 2 :
300
+ cmd = ConsolidatedMetadataStore (store )
301
+ with pytest .raises (PermissionError ):
302
+ del cmd ['.zgroup' ]
303
+ with pytest .raises (PermissionError ):
304
+ cmd ['.zgroup' ] = None
305
+ else :
306
+ cmd = ConsolidatedMetadataStoreV3 (store )
307
+ with pytest .raises (PermissionError ):
308
+ del cmd [meta_root + 'dataset.group.json' ]
309
+ with pytest .raises (PermissionError ):
310
+ cmd [meta_root + 'dataset.group.json' ] = None
287
311
288
- # test getsize on the store
289
- assert isinstance (getsize (cmd ), Integral )
312
+ # test getsize on the store
313
+ assert isinstance (getsize (cmd ), Integral )
290
314
291
315
# test new metadata are not writeable
292
316
with pytest .raises (PermissionError ):
@@ -316,62 +340,11 @@ def test_consolidate_metadata(with_chunk_store, zarr_version):
316
340
317
341
# make sure keyword arguments are passed through without error
318
342
open_consolidated (
319
- store , chunk_store = chunk_store , path = path , cache_attrs = True , synchronizer = None
343
+ store , chunk_store = chunk_store , path = path , cache_attrs = True , synchronizer = None ,
344
+ ** version_kwarg ,
320
345
)
321
346
322
347
323
- def test_consolidated_with_chunk_store ():
324
- # setup initial data
325
- store = MemoryStore ()
326
- chunk_store = MemoryStore ()
327
- z = group (store , chunk_store = chunk_store )
328
- z .create_group ('g1' )
329
- g2 = z .create_group ('g2' )
330
- g2 .attrs ['hello' ] = 'world'
331
- arr = g2 .create_dataset ('arr' , shape = (20 , 20 ), chunks = (5 , 5 ), dtype = 'f8' )
332
- assert 16 == arr .nchunks
333
- assert 0 == arr .nchunks_initialized
334
- arr .attrs ['data' ] = 1
335
- arr [:] = 1.0
336
- assert 16 == arr .nchunks_initialized
337
-
338
- # perform consolidation
339
- out = consolidate_metadata (store )
340
- assert isinstance (out , Group )
341
- assert '.zmetadata' in store
342
- for key in ['.zgroup' ,
343
- 'g1/.zgroup' ,
344
- 'g2/.zgroup' ,
345
- 'g2/.zattrs' ,
346
- 'g2/arr/.zarray' ,
347
- 'g2/arr/.zattrs' ]:
348
- del store [key ]
349
- # open consolidated
350
- z2 = open_consolidated (store , chunk_store = chunk_store )
351
- assert ['g1' , 'g2' ] == list (z2 )
352
- assert 'world' == z2 .g2 .attrs ['hello' ]
353
- assert 1 == z2 .g2 .arr .attrs ['data' ]
354
- assert (z2 .g2 .arr [:] == 1.0 ).all ()
355
- assert 16 == z2 .g2 .arr .nchunks
356
- assert 16 == z2 .g2 .arr .nchunks_initialized
357
-
358
- # test the data are writeable
359
- z2 .g2 .arr [:] = 2
360
- assert (z2 .g2 .arr [:] == 2 ).all ()
361
-
362
- # test invalid modes
363
- with pytest .raises (ValueError ):
364
- open_consolidated (store , mode = 'a' , chunk_store = chunk_store )
365
- with pytest .raises (ValueError ):
366
- open_consolidated (store , mode = 'w' , chunk_store = chunk_store )
367
- with pytest .raises (ValueError ):
368
- open_consolidated (store , mode = 'w-' , chunk_store = chunk_store )
369
-
370
- # make sure keyword arguments are passed through without error
371
- open_consolidated (store , cache_attrs = True , synchronizer = None ,
372
- chunk_store = chunk_store )
373
-
374
-
375
348
@pytest .mark .parametrize ("options" , (
376
349
{"dimension_separator" : "/" },
377
350
{"dimension_separator" : "." },
0 commit comments