Skip to content

Commit 4c36c21

Browse files
Move hash method to metadata package and update requirements.
1 parent f3af705 commit 4c36c21

File tree

6 files changed

+58
-66
lines changed

6 files changed

+58
-66
lines changed

datajoint.pub

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
-----BEGIN PUBLIC KEY-----
2+
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDUMOo2U7YQ1uOrKU/IreM3AQP2
3+
AXJC3au+S9W+dilxHcJ3e98bRVqrFeOofcGeRPoNc38fiLmLDUiBskJeVrpm29Wo
4+
AkH6yhZWk1o8NvGMhK4DLsJYlsH6tZuOx9NITKzJuOOH6X1I5Ucs7NOSKnmu7g5g
5+
WTT5kCgF5QAe5JN8WQIDAQAB
6+
-----END PUBLIC KEY-----

datajoint/plugin.py

+14-63
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,7 @@
1-
import os
21
import pkg_resources
3-
import hashlib
4-
import base64
52
from pathlib import Path
63
from cryptography.exceptions import InvalidSignature
7-
from cryptography.hazmat.backends import default_backend
8-
from cryptography.hazmat.primitives.serialization import load_pem_public_key
9-
from cryptography.hazmat.primitives.asymmetric import padding
10-
from cryptography.hazmat.primitives import hashes
11-
12-
DJ_PUB_KEY = '''
13-
-----BEGIN PUBLIC KEY-----
14-
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDUMOo2U7YQ1uOrKU/IreM3AQP2
15-
AXJC3au+S9W+dilxHcJ3e98bRVqrFeOofcGeRPoNc38fiLmLDUiBskJeVrpm29Wo
16-
AkH6yhZWk1o8NvGMhK4DLsJYlsH6tZuOx9NITKzJuOOH6X1I5Ucs7NOSKnmu7g5g
17-
WTT5kCgF5QAe5JN8WQIDAQAB
18-
-----END PUBLIC KEY-----
19-
'''
4+
from raphael_python_metadata import hash_pkg, verify
205

216
discovered_plugins = {
227
entry_point.module_name: dict(plugon=entry_point.name, verified=False)
@@ -25,54 +10,20 @@
2510
}
2611

2712

28-
def hash_pkg(pkgpath):
29-
refpath = Path(pkgpath).absolute().parents[0]
30-
details = ''
31-
details = _update_details_dir(pkgpath, refpath, details)
32-
# hash output to prepare for signing
33-
return hashlib.sha1('blob {}\0{}'.format(len(details), details).encode()).hexdigest()
34-
35-
36-
def _update_details_dir(dirpath, refpath, details):
37-
paths = sorted(Path(dirpath).absolute().glob('*'))
38-
# walk a directory to collect info
39-
for path in paths:
40-
if 'pycache' not in str(path):
41-
if os.path.isdir(str(path)):
42-
details = _update_details_dir(path, refpath, details)
43-
else:
44-
details = _update_details_file(path, refpath, details)
45-
return details
46-
47-
48-
def _update_details_file(filepath, refpath, details):
49-
if '.sig' not in str(filepath):
50-
with open(str(filepath), 'r') as f:
51-
data = f.read()
52-
# perfrom a SHA1 hash (same as git) that closely matches: git ls-files -s <dirname>
53-
mode = 100644
54-
hash = hashlib.sha1('blob {}\0{}'.format(len(data),data).encode()).hexdigest()
55-
stage_no = 0
56-
relative_path = str(filepath.relative_to(refpath))
57-
details = '{}{} {} {}\t{}\n'.format(details, mode, hash, stage_no, relative_path)
58-
return details
59-
60-
61-
def _update_error_stack(module):
13+
def _update_error_stack(plugin_name):
6214
try:
63-
pkg = pkg_resources.get_distribution(module.__name__)
64-
signature = pkg.get_metadata('datajoint.sig')
65-
pub_key = load_pem_public_key(bytes(DJ_PUB_KEY, 'UTF-8'), backend=default_backend())
66-
data = hash_pkg(module.__path__[0])
67-
pub_key.verify(
68-
base64.b64decode(signature.encode()),
69-
data.encode(),
70-
padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH),
71-
hashes.SHA256())
72-
discovered_plugins[module.__name__]['verified'] = True
73-
print('DataJoint verified plugin `{}` introduced.'.format(module.__name__))
15+
base_name = 'datajoint'
16+
base_meta = pkg_resources.get_distribution(base_name)
17+
plugin_meta = pkg_resources.get_distribution(plugin_name)
18+
19+
data = hash_pkg(str(Path(plugin_meta.module_path, plugin_name)))
20+
signature = plugin_meta.get_metadata('{}.sig'.format(plugin_name))
21+
pubkey_path = str(Path(base_meta.egg_info, '{}.pub'.format(base_name)))
22+
verify(pubkey_path, data, signature)
23+
discovered_plugins[plugin_name]['verified'] = True
24+
print('DataJoint verified plugin `{}` introduced.'.format(plugin_name))
7425
except (FileNotFoundError, InvalidSignature):
75-
print('Unverified plugin `{}` introduced.'.format(module.__name__))
26+
print('Unverified plugin `{}` introduced.'.format(plugin_name))
7627

7728

7829
def override(plugin_type, context, method_list=None):
@@ -84,7 +35,7 @@ def override(plugin_type, context, method_list=None):
8435
module = __import__(module_name)
8536
module_dict = module.__dict__
8637
# update error stack (if applicable)
87-
_update_error_stack(module)
38+
_update_error_stack(module.__name__)
8839
# override based on plugon preference
8940
if method_list is not None:
9041
new_methods = []

local-docker-compose.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ services:
3232
command: >
3333
/bin/sh -c
3434
"
35-
pip install --user nose nose-cov coveralls ptvsd .;
35+
# pip install --user nose nose-cov coveralls ptvsd .;
36+
pip install --user nose nose-cov coveralls ptvsd;
3637
pip freeze | grep datajoint;
3738
## You may run the below tests once sh'ed into container i.e. docker exec -it datajoint-python_app_1 sh
3839
# nosetests -vsw tests --with-coverage --cover-package=datajoint; #run all tests

requirements.txt

+2-1
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,5 @@ networkx
88
pydot
99
minio
1010
matplotlib
11-
cryptography
11+
cryptography
12+
raphael_python_metadata

setup.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -31,5 +31,7 @@
3131
keywords='database organization',
3232
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
3333
install_requires=requirements,
34-
python_requires='~={}.{}'.format(*min_py_version)
34+
python_requires='~={}.{}'.format(*min_py_version),
35+
setup_requires=['raphael_python_metadata'],
36+
pubkey_path='./datajoint.pub'
3537
)

tests/test_plugin.py

+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# import datajoint.errors as djerr
2+
# import datajoint.plugin as p
3+
# import importlib
4+
5+
6+
# def test_normal_djerror():
7+
# try:
8+
# raise djerr.DataJointError
9+
# except djerr.DataJointError as e:
10+
# assert(e.__cause__ is None)
11+
12+
13+
# def test_unverified_djerror():
14+
# try:
15+
# curr_plugins = p.discovered_plugins
16+
# p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
17+
# importlib.reload(djerr)
18+
# raise djerr.DataJointError
19+
# except djerr.DataJointError as e:
20+
# p.discovered_plugins = curr_plugins
21+
# importlib.reload(djerr)
22+
# assert(e.__cause__ is None)
23+
# # p.discovered_plugins = curr_plugins
24+
# # importlib.reload(djerr)
25+
# # print(isinstance(e.__cause__, djerr.PluginWarning))
26+
# # assert(isinstance(e.__cause__, djerr.PluginWarning))
27+
28+
29+
# # def test_verified_djerror():
30+
# # assert_equal(get_host('hub://fakeservices.datajoint.io/datajoint/travis'),
31+
# # 'fakeservices.datajoint.io:3306')

0 commit comments

Comments
 (0)