Skip to content

Commit 870f4de

Browse files
Merge pull request #688 from guzman-raphael/str-adapted-types
Correct Attribute Adapter from casting to string + minor updates
2 parents 6f367d1 + 9907d64 commit 870f4de

File tree

11 files changed

+103
-30
lines changed

11 files changed

+103
-30
lines changed

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,4 +21,5 @@ build/
2121
*.env
2222
local-docker-compose.yml
2323
notebooks/*
24-
__main__.py
24+
__main__.py
25+
jupyter_custom.js

.travis.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,15 @@ services:
1313
main: &main
1414
stage: Alpine
1515
os: linux
16+
dist: xenial # precise, trusty, xenial, bionic
1617
language: shell
1718
script:
1819
- docker-compose -f LNX-docker-compose.yml up --build --exit-code-from dj
1920
jobs:
2021
include:
2122
- <<: *main
2223
env:
23-
- PY_VER: "3.8-rc"
24+
- PY_VER: "3.8"
2425
- MYSQL_VER: "5.7"
2526
- <<: *main
2627
env:
@@ -36,7 +37,7 @@ jobs:
3637
- MYSQL_VER: "5.7"
3738
- <<: *main
3839
env:
39-
- PY_VER: "3.8-rc"
40+
- PY_VER: "3.8"
4041
- MYSQL_VER: "8.0"
4142
- <<: *main
4243
env:
@@ -52,7 +53,7 @@ jobs:
5253
- MYSQL_VER: "8.0"
5354
- <<: *main
5455
env:
55-
- PY_VER: "3.8-rc"
56+
- PY_VER: "3.8"
5657
- MYSQL_VER: "5.6"
5758
- <<: *main
5859
env:

CHANGELOG.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
## Release notes
22

3-
### 0.12.0 -- October 31, 2019
3+
### 0.12.1 -- Nov 2, 2019
4+
* Bugfix - AttributeAdapter converts into a string (#684)
5+
6+
### 0.12.0 -- Oct 31, 2019
47
* Dropped support for Python 3.4
58
* Support secure connections with TLS (aka SSL) PR #620
69
* Convert numpy array from python object to appropriate data type if all elements are of the same type (#587) PR #608
@@ -31,7 +34,7 @@
3134
### 0.11.3 -- Jul 26, 2019
3235
* Fix incompatibility with pyparsing 2.4.1 (#629) PR #631
3336

34-
### 0.11.2 -- July 25, 2019
37+
### 0.11.2 -- Jul 25, 2019
3538
* Fix #628 - incompatibility with pyparsing 2.4.1
3639

3740
### 0.11.1 -- Nov 15, 2018

datajoint/external.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ def download_filepath(self, filepath_hash):
251251
checksum = uuid_from_file(local_filepath)
252252
if checksum != contents_hash: # this should never happen without outside interference
253253
raise DataJointError("'{file}' downloaded but did not pass checksum'".format(file=local_filepath))
254-
return local_filepath, contents_hash
254+
return str(local_filepath), contents_hash
255255

256256
# --- UTILITIES ---
257257

datajoint/fetch.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def _get(connection, attr, data, squeeze, download_path):
5050
adapt = attr.adapter.get if attr.adapter else lambda x: x
5151

5252
if attr.is_filepath:
53-
return str(adapt(extern.download_filepath(uuid.UUID(bytes=data))[0]))
53+
return adapt(extern.download_filepath(uuid.UUID(bytes=data))[0])
5454

5555
if attr.is_attachment:
5656
# Steps:
@@ -65,22 +65,22 @@ def _get(connection, attr, data, squeeze, download_path):
6565
if local_filepath.is_file():
6666
attachment_checksum = _uuid if attr.is_external else hash.uuid_from_buffer(data)
6767
if attachment_checksum == hash.uuid_from_file(local_filepath, init_string=attachment_name + '\0'):
68-
return str(adapt(local_filepath)) # checksum passed, no need to download again
68+
return adapt(str(local_filepath)) # checksum passed, no need to download again
6969
# generate the next available alias filename
7070
for n in itertools.count():
7171
f = local_filepath.parent / (local_filepath.stem + '_%04x' % n + local_filepath.suffix)
7272
if not f.is_file():
7373
local_filepath = f
7474
break
7575
if attachment_checksum == hash.uuid_from_file(f, init_string=attachment_name + '\0'):
76-
return str(adapt(f)) # checksum passed, no need to download again
76+
return adapt(str(f)) # checksum passed, no need to download again
7777
# Save attachment
7878
if attr.is_external:
7979
extern.download_attachment(_uuid, attachment_name, local_filepath)
8080
else:
8181
# write from buffer
8282
safe_write(local_filepath, data.split(b"\0", 1)[1])
83-
return str(adapt(local_filepath)) # download file from remote store
83+
return adapt(str(local_filepath)) # download file from remote store
8484

8585
return adapt(uuid.UUID(bytes=data) if attr.uuid else (
8686
blob.unpack(extern.get(uuid.UUID(bytes=data)) if attr.is_external else data, squeeze=squeeze)

datajoint/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
__version__ = "0.12.0"
1+
__version__ = "0.12.1"
22

33
assert len(__version__) <= 10 # The log table limits version to the 10 characters

docs-parts/admin/5-blob-config_lang1.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
location = 'datajoint-projects/lab1',
99
access_key='1234567',
1010
secret_key='foaf1234'),
11-
'external-raw'] = dict( # 'raw' storage for this pipeline
11+
'external-raw': dict( # 'raw' storage for this pipeline
1212
protocol='file',
1313
location='/net/djblobs/myschema')
1414
}

docs-parts/intro/Releases_lang1.rst

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,8 @@
1-
0.12.0 -- October 31, 2019
1+
0.12.1 -- Nov 2, 2019
2+
-------------------------
3+
* Bugfix - AttributeAdapter converts into a string (#684)
4+
5+
0.12.0 -- Oct 31, 2019
26
-------------------------
37
* Dropped support for Python 3.4
48
* Support secure connections with TLS (aka SSL) PR #620

tests/schema_adapted.py

Lines changed: 47 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,19 @@
11
import datajoint as dj
22
import networkx as nx
3+
from pathlib import Path
4+
import tempfile
35
from datajoint import errors
46

5-
from . import PREFIX, CONN_INFO
7+
from . import PREFIX, CONN_INFO, S3_CONN_INFO
68

9+
stores_config = {
10+
'repo_s3': dict(
11+
S3_CONN_INFO,
12+
protocol='s3',
13+
location='adapted/repo',
14+
stage=tempfile.mkdtemp())
15+
}
16+
dj.config['stores'] = stores_config
717

818
schema_name = PREFIX + '_test_custom_datatype'
919
schema = dj.schema(schema_name, connection=dj.conn(**CONN_INFO))
@@ -40,5 +50,41 @@ class Connectivity(dj.Manual):
4050
conn_graph = null : <graph>
4151
"""
4252

53+
errors._switch_filepath_types(True)
4354

55+
56+
class Filepath2GraphAdapter(dj.AttributeAdapter):
57+
58+
attribute_type = 'filepath@repo_s3'
59+
60+
@staticmethod
61+
def get(obj):
62+
s = open(obj, "r").read()
63+
return nx.spring_layout(
64+
nx.lollipop_graph(4, 2), seed=int(s))
65+
66+
@staticmethod
67+
def put(obj):
68+
path = Path(
69+
dj.config['stores']['repo_s3']['stage'], 'sample.txt')
70+
71+
f = open(path, "w")
72+
f.write(str(obj*obj))
73+
f.close()
74+
75+
return path
76+
77+
78+
file2graph = Filepath2GraphAdapter()
79+
80+
81+
@schema
82+
class Position(dj.Manual):
83+
definition = """
84+
pos_id : int
85+
---
86+
seed_root: <file2graph>
87+
"""
88+
89+
errors._switch_filepath_types(False)
4490
errors._switch_adapted_types(False) # disable again

tests/test_adapted_attributes.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from itertools import zip_longest
44
from nose.tools import assert_true, assert_equal
55
from . import schema_adapted as adapted
6-
from .schema_adapted import graph
6+
from .schema_adapted import graph, file2graph
77

88

99
def test_adapted_type():
@@ -20,6 +20,22 @@ def test_adapted_type():
2020
dj.errors._switch_adapted_types(False)
2121

2222

23+
def test_adapted_filepath_type():
24+
# https://github.com/datajoint/datajoint-python/issues/684
25+
dj.errors._switch_adapted_types(True)
26+
dj.errors._switch_filepath_types(True)
27+
c = adapted.Position()
28+
Position.insert([{'pos_id': 0, 'seed_root': 3}])
29+
result = (Position & 'pos_id=0').fetch1('seed_root')
30+
31+
assert_true(isinstance(result, dict))
32+
assert_equal(0.3761992090175474, result[1][0])
33+
assert_true(6 == len(result))
34+
35+
c.delete()
36+
dj.errors._switch_filepath_types(False)
37+
dj.errors._switch_adapted_types(False)
38+
2339
# test spawned classes
2440
local_schema = dj.schema(adapted.schema_name)
2541
local_schema.spawn_missing_classes()

tests/test_filepath.py

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
def setUp(self):
1111
dj.config['stores'] = stores_config
1212

13+
1314
def test_path_match(store="repo"):
1415
""" test file path matches and empty file"""
1516
ext = schema.external[store]
@@ -22,7 +23,7 @@ def test_path_match(store="repo"):
2223
open(str(managed_file), 'a').close()
2324

2425
# put the file
25-
uuid = ext.upload_filepath(managed_file)
26+
uuid = ext.upload_filepath(str(managed_file))
2627

2728
#remove
2829
managed_file.unlink()
@@ -35,12 +36,13 @@ def test_path_match(store="repo"):
3536

3637
# # Download the file and check its contents.
3738
restored_path, checksum = ext.download_filepath(uuid)
38-
assert_equal(restored_path, managed_file)
39-
assert_equal(checksum, dj.hash.uuid_from_file(managed_file))
39+
assert_equal(restored_path, str(managed_file))
40+
assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file)))
4041

4142
# cleanup
4243
ext.delete(delete_external_files=True)
4344

45+
4446
def test_filepath(store="repo"):
4547
""" test file management """
4648
ext = schema.external[store]
@@ -56,8 +58,8 @@ def test_filepath(store="repo"):
5658
f.write(data)
5759

5860
# put the same file twice to ensure storing once
59-
uuid1 = ext.upload_filepath(managed_file)
60-
uuid2 = ext.upload_filepath(managed_file) # no duplication should arise if file is the same
61+
uuid1 = ext.upload_filepath(str(managed_file))
62+
uuid2 = ext.upload_filepath(str(managed_file)) # no duplication should arise if file is the same
6163
assert_equal(uuid1, uuid2)
6264

6365
# remove to ensure downloading
@@ -67,8 +69,8 @@ def test_filepath(store="repo"):
6769
# Download the file and check its contents. Repeat causes no download from remote
6870
for _ in 1, 2:
6971
restored_path, checksum = ext.download_filepath(uuid1)
70-
assert_equal(restored_path, managed_file)
71-
assert_equal(checksum, dj.hash.uuid_from_file(managed_file))
72+
assert_equal(restored_path, str(managed_file))
73+
assert_equal(checksum, dj.hash.uuid_from_file(str(managed_file)))
7274

7375
# verify same data
7476
with managed_file.open('rb') as f:
@@ -92,8 +94,8 @@ def test_duplicate_upload(store="repo"):
9294
managed_file.parent.mkdir(parents=True, exist_ok=True)
9395
with managed_file.open('wb') as f:
9496
f.write(os.urandom(300))
95-
ext.upload_filepath(managed_file)
96-
ext.upload_filepath(managed_file) # this is fine because the file is the same
97+
ext.upload_filepath(str(managed_file))
98+
ext.upload_filepath(str(managed_file)) # this is fine because the file is the same
9799

98100

99101
def test_duplicate_upload_s3():
@@ -110,10 +112,10 @@ def test_duplicate_error(store="repo"):
110112
managed_file.parent.mkdir(parents=True, exist_ok=True)
111113
with managed_file.open('wb') as f:
112114
f.write(os.urandom(300))
113-
ext.upload_filepath(managed_file)
115+
ext.upload_filepath(str(managed_file))
114116
with managed_file.open('wb') as f:
115117
f.write(os.urandom(300))
116-
ext.upload_filepath(managed_file) # this should raise exception because the file has changed
118+
ext.upload_filepath(str(managed_file)) # this should raise exception because the file has changed
117119

118120

119121
def test_duplicate_error_s3():
@@ -135,7 +137,7 @@ def test_filepath_class(table=Filepath(), store="repo"):
135137
assert_equal(data, contents)
136138

137139
# upload file into shared repo
138-
table.insert1((1, managed_file))
140+
table.insert1((1, str(managed_file)))
139141

140142
# remove file locally
141143
managed_file.unlink()
@@ -187,7 +189,7 @@ def test_filepath_cleanup(table=Filepath(), store="repo"):
187189
managed_file.parent.mkdir(parents=True, exist_ok=True)
188190
with managed_file.open('wb') as f:
189191
f.write(contents) # same in all files
190-
table.insert1((i, managed_file))
192+
table.insert1((i, str(managed_file)))
191193
assert_equal(len(table), n)
192194

193195
ext = schema.external[store]
@@ -235,7 +237,7 @@ def test_return_string(table=Filepath(), store="repo"):
235237
assert_equal(data, contents)
236238

237239
# upload file into shared repo
238-
table.insert1((138, managed_file))
240+
table.insert1((138, str(managed_file)))
239241

240242
# remove file locally
241243
managed_file.unlink()

0 commit comments

Comments
 (0)