Skip to content

Flake8 in travis #402

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Feb 7, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 27 additions & 15 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,19 @@ matrix:
- python: 2.7
env:
- INSTALL_TYPE=requirements
- python: 2.7
env:
- STYLE=1
- python: 3.5
env:
- STYLE=1
before_install:
- source tools/travis_tools.sh
- virtualenv --python=python venv
- source venv/bin/activate
- python --version # just to check
- pip install -U pip # upgrade to latest pip to find 3.5 wheels
- retry pip install nose # always
- pip install -U pip wheel # upgrade to latest pip find 3.5 wheels; wheel to avoid errors
- retry pip install nose flake8 # always
- wheelhouse_pip_install $DEPENDS
# pydicom <= 0.9.8 doesn't install on python 3
- if [ "${TRAVIS_PYTHON_VERSION:0:1}" == "2" ]; then
Expand Down Expand Up @@ -98,20 +104,26 @@ install:
- export NIBABEL_DATA_DIR="$PWD/nibabel-data"
# command to run tests, e.g. python setup.py test
script:
# Change into an innocuous directory and find tests from installation
- mkdir for_testing
- cd for_testing
- if [ "${COVERAGE}" == "1" ]; then
cp ../.coveragerc .;
COVER_ARGS="--with-coverage --cover-package nibabel";
fi
- if [ "$DOC_DOC_TEST" == "1" ]; then
pip install sphinx numpydoc texext;
cd ../doc;
make html;
make doctest;
- |
if [ "${STYLE}" == "1" ]; then
# Run styles only on core nibabel code.
flake8 nibabel
else
nosetests --with-doctest $COVER_ARGS nibabel;
# Change into an innocuous directory and find tests from installation
mkdir for_testing
cd for_testing
if [ "${COVERAGE}" == "1" ]; then
cp ../.coveragerc .;
COVER_ARGS="--with-coverage --cover-package nibabel";
fi
if [ "$DOC_DOC_TEST" == "1" ]; then
pip install sphinx numpydoc texext;
cd ../doc;
make html;
make doctest;
else
nosetests --with-doctest $COVER_ARGS nibabel;
fi
fi
after_success:
- if [ "${COVERAGE}" == "1" ]; then coveralls; fi
Expand Down
6 changes: 3 additions & 3 deletions nibabel/affines.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def from_matvec(matrix, vector=None):
t = np.zeros((nin + 1, nout + 1), matrix.dtype)
t[0:nin, 0:nout] = matrix
t[nin, nout] = 1.
if not vector is None:
if vector is not None:
t[0:nin, nout] = vector
return t

Expand Down Expand Up @@ -214,7 +214,7 @@ def append_diag(aff, steps, starts=()):
starts = np.zeros(n_steps, dtype=steps.dtype)
elif len(starts) != n_steps:
raise ValueError('Steps should have same length as starts')
old_n_out, old_n_in = aff.shape[0]-1, aff.shape[1]-1
old_n_out, old_n_in = aff.shape[0] - 1, aff.shape[1] - 1
# make new affine
aff_plus = np.zeros((old_n_out + n_steps + 1,
old_n_in + n_steps + 1), dtype=aff.dtype)
Expand All @@ -223,7 +223,7 @@ def append_diag(aff, steps, starts=()):
aff_plus[:old_n_out, -1] = aff[:old_n_out, -1]
# Add new diagonal elements
for i, el in enumerate(steps):
aff_plus[old_n_out+i, old_n_in+i] = el
aff_plus[old_n_out + i, old_n_in + i] = el
# Add translations for new affine, plus last 1
aff_plus[old_n_out:, -1] = list(starts) + [1]
return aff_plus
Expand Down
24 changes: 12 additions & 12 deletions nibabel/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@
('session_error', 'i2'),
('regular', 'S1'),
('hkey_un0', 'S1')
]
]
image_dimension_dtd = [
('dim', 'i2', (8,)),
('vox_units', 'S4'),
Expand All @@ -127,7 +127,7 @@
('verified', 'i4'),
('glmax', 'i4'),
('glmin', 'i4')
]
]
data_history_dtd = [
('descrip', 'S80'),
('aux_file', 'S24'),
Expand All @@ -147,7 +147,7 @@
('omin', 'i4'),
('smax', 'i4'),
('smin', 'i4')
]
]

# Full header numpy dtype combined across sub-fields
header_dtype = np.dtype(header_key_dtd + image_dimension_dtd +
Expand Down Expand Up @@ -606,7 +606,7 @@ def get_data_shape(self):
ndims = dims[0]
if ndims == 0:
return 0,
return tuple(int(d) for d in dims[1:ndims+1])
return tuple(int(d) for d in dims[1:ndims + 1])

def set_data_shape(self, shape):
''' Set shape of data
Expand All @@ -624,18 +624,18 @@ def set_data_shape(self, shape):
dims[:] = 1
dims[0] = ndims
try:
dims[1:ndims+1] = shape
dims[1:ndims + 1] = shape
except (ValueError, OverflowError):
# numpy 1.4.1 at least generates a ValueError from trying to set a
# python long into an int64 array (dims are int64 for nifti2)
values_fit = False
else:
values_fit = np.all(dims[1:ndims+1] == shape)
values_fit = np.all(dims[1:ndims + 1] == shape)
# Error if we did not succeed setting dimensions
if not values_fit:
raise HeaderDataError('shape %s does not fit in dim datatype' %
(shape,))
self._structarr['pixdim'][ndims+1:] = 1.0
self._structarr['pixdim'][ndims + 1:] = 1.0

def get_base_affine(self):
''' Get affine from basic (shared) header fields
Expand All @@ -659,8 +659,8 @@ def get_base_affine(self):
hdr = self._structarr
dims = hdr['dim']
ndim = dims[0]
return shape_zoom_affine(hdr['dim'][1:ndim+1],
hdr['pixdim'][1:ndim+1],
return shape_zoom_affine(hdr['dim'][1:ndim + 1],
hdr['pixdim'][1:ndim + 1],
self.default_x_flip)

get_best_affine = get_base_affine
Expand Down Expand Up @@ -691,7 +691,7 @@ def get_zooms(self):
if ndim == 0:
return (1.0,)
pixdims = hdr['pixdim']
return tuple(pixdims[1:ndim+1])
return tuple(pixdims[1:ndim + 1])

def set_zooms(self, zooms):
''' Set zooms into header fields
Expand All @@ -708,7 +708,7 @@ def set_zooms(self, zooms):
if np.any(zooms < 0):
raise HeaderDataError('zooms must be positive')
pixdims = hdr['pixdim']
pixdims[1:ndim+1] = zooms[:]
pixdims[1:ndim + 1] = zooms[:]

def as_analyze_map(self):
""" Return header as mapping for conversion to Analyze types
Expand Down Expand Up @@ -794,7 +794,7 @@ def set_slope_inter(self, slope, inter=None):
If float, value must be 0.0 or we raise a ``HeaderTypeError``
'''
if ((slope in (None, 1) or np.isnan(slope)) and
(inter in (None, 0) or np.isnan(inter))):
(inter in (None, 0) or np.isnan(inter))):
return
raise HeaderTypeError('Cannot set slope != 1 or intercept != 0 '
'for Analyze headers')
Expand Down
1 change: 1 addition & 0 deletions nibabel/batteryrunners.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ def __len__(self):


class Report(object):

def __init__(self,
error=Exception,
problem_level=0,
Expand Down
2 changes: 0 additions & 2 deletions nibabel/benchmarks/bench_array_to_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import numpy as np

from ..externals.six import BytesIO
from ..volumeutils import array_to_file

from .butils import print_git_title

Expand Down
2 changes: 0 additions & 2 deletions nibabel/benchmarks/bench_finite_range.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

import numpy as np

from ..externals.six import BytesIO
from ..volumeutils import finite_range

from .butils import print_git_title

Expand Down
8 changes: 4 additions & 4 deletions nibabel/casting.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,10 +269,10 @@ def type_info(np_type):
# Oh dear, we don't recognize the type information. Try some known types
# and then give up. At this stage we're expecting exotic longdouble or
# their complex equivalent.
if not np_type in (np.longdouble, np.longcomplex) or width not in (16, 32):
if np_type not in (np.longdouble, np.longcomplex) or width not in (16, 32):
raise FloatingError('We had not expected type %s' % np_type)
if (vals == (1, 1, 16) and on_powerpc() and
_check_maxexp(np.longdouble, 1024)):
_check_maxexp(np.longdouble, 1024)):
# double pair on PPC. The _check_nmant routine does not work for this
# type, hence the powerpc platform check instead
ret.update(dict(nmant=106, width=width))
Expand Down Expand Up @@ -439,7 +439,7 @@ def int_to_float(val, flt_type):
f : numpy scalar
of type `flt_type`
"""
if not flt_type is np.longdouble:
if flt_type is not np.longdouble:
return flt_type(val)
# The following works around a nasty numpy 1.4.1 bug such that:
# >>> int(np.uint32(2**32-1)
Expand Down Expand Up @@ -664,7 +664,7 @@ def best_float():
except FloatingError:
return np.float64
if (long_info['nmant'] > type_info(np.float64)['nmant'] and
machine() != 'sparc64'): # sparc has crazy-slow float128
machine() != 'sparc64'): # sparc has crazy-slow float128
return np.longdouble
return np.float64

Expand Down
5 changes: 4 additions & 1 deletion nibabel/checkwarns.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,12 @@
from .testing import (error_warnings, suppress_warnings)


warnings.warn('The checkwarns module is deprecated and will be removed in nibabel v3.0', FutureWarning)
warnings.warn('The checkwarns module is deprecated and will be removed '
'in nibabel v3.0', FutureWarning)


class ErrorWarnings(error_warnings):

def __init__(self, *args, **kwargs):
warnings.warn('ErrorWarnings is deprecated and will be removed in '
'nibabel v3.0; use nibabel.testing.error_warnings.',
Expand All @@ -27,6 +29,7 @@ def __init__(self, *args, **kwargs):


class IgnoreWarnings(suppress_warnings):

def __init__(self, *args, **kwargs):
warnings.warn('IgnoreWarnings is deprecated and will be removed in '
'nibabel v3.0; use nibabel.testing.suppress_warnings.',
Expand Down
13 changes: 8 additions & 5 deletions nibabel/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class BomberError(DataError, AttributeError):

class Datasource(object):
''' Simple class to add base path to relative path '''

def __init__(self, base_path):
''' Initialize datasource

Expand Down Expand Up @@ -87,7 +88,7 @@ def list_files(self, relative=True):
out_list = list()
for base, dirs, files in os.walk(self.base_path):
if relative:
base = base[len(self.base_path)+1:]
base = base[len(self.base_path) + 1:]
for filename in files:
out_list.append(pjoin(base, filename))
return out_list
Expand All @@ -97,6 +98,7 @@ class VersionedDatasource(Datasource):
''' Datasource with version information in config file

'''

def __init__(self, base_path, config_filename=None):
''' Initialize versioned datasource

Expand Down Expand Up @@ -239,8 +241,8 @@ def find_data_dir(root_dirs, *names):
if os.path.isdir(pth):
return pth
raise DataError('Could not find datasource "%s" in data path "%s"' %
(ds_relative,
os.path.pathsep.join(root_dirs)))
(ds_relative,
os.path.pathsep.join(root_dirs)))


def make_datasource(pkg_def, **kwargs):
Expand Down Expand Up @@ -296,14 +298,15 @@ def make_datasource(pkg_def, **kwargs):
e)
if 'name' in pkg_def:
msg += '\n\nYou may need the package "%s"' % pkg_def['name']
if not pkg_hint is None:
if pkg_hint is not None:
msg += '\n\n%s' % pkg_hint
raise DataError(msg)
return VersionedDatasource(pth)


class Bomber(object):
''' Class to raise an informative error when used '''

def __init__(self, name, msg):
self.name = name
self.msg = msg
Expand Down Expand Up @@ -350,7 +353,7 @@ def datasource_or_bomber(pkg_def, **options):
return Bomber(sys_relpath, str(e))
# check version
if (version is None or
LooseVersion(ds.version) >= LooseVersion(version)):
LooseVersion(ds.version) >= LooseVersion(version)):
return ds
if 'name' in pkg_def:
pkg_name = pkg_def['name']
Expand Down
1 change: 1 addition & 0 deletions nibabel/deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class ModuleProxy(object):
when you do attribute access and return the attributes of the imported
module.
"""

def __init__(self, module_name):
self._module_name = module_name

Expand Down
11 changes: 8 additions & 3 deletions nibabel/dft.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,11 @@ def __init__(self, series, i, si):

def __str__(self):
fmt = 'expecting instance number %d, got %d'
return fmt % (self.i+1, self.si.instance_number)
return fmt % (self.i + 1, self.si.instance_number)


class _Study(object):

def __init__(self, d):
self.uid = d['uid']
self.date = d['date']
Expand Down Expand Up @@ -93,6 +94,7 @@ def patient_name_or_uid(self):


class _Series(object):

def __init__(self, d):
self.uid = d['uid']
self.study = d['study']
Expand Down Expand Up @@ -160,7 +162,7 @@ def as_nifti(self):
for (i, si) in enumerate(self.storage_instances):
if i + 1 != si.instance_number:
raise InstanceStackError(self, i, si)
logger.info('reading %d/%d' % (i+1, len(self.storage_instances)))
logger.info('reading %d/%d' % (i + 1, len(self.storage_instances)))
d = self.storage_instances[i].dicom()
data[i, :, :] = d.pixel_array

Expand Down Expand Up @@ -190,7 +192,7 @@ def as_nifti(self):
m = ((pdi * cosi[0], pdj * cosj[0], pdk * cosk[0], pos_1[0]),
(pdi * cosi[1], pdj * cosj[1], pdk * cosk[1], pos_1[1]),
(pdi * cosi[2], pdj * cosj[2], pdk * cosk[2], pos_1[2]),
( 0, 0, 0, 1))
(0, 0, 0, 1))

m = numpy.array(m)

Expand All @@ -212,6 +214,7 @@ def nifti_size(self):


class _StorageInstance(object):

def __init__(self, d):
self.uid = d['uid']
self.instance_number = d['instance_number']
Expand All @@ -238,6 +241,7 @@ def dicom(self):

class _db_nochange:
"""context guard for read-only database access"""

def __enter__(self):
self.c = DB.cursor()
return self.c
Expand All @@ -251,6 +255,7 @@ def __exit__(self, type, value, traceback):

class _db_change:
"""context guard for database access requiring a commit"""

def __enter__(self):
self.c = DB.cursor()
return self.c
Expand Down
Loading