Skip to content

Commit

Permalink
Several small code readability cleanups
Browse files Browse the repository at this point in the history
  • Loading branch information
manthey committed Jan 6, 2025
1 parent 58d1230 commit 2198996
Show file tree
Hide file tree
Showing 64 changed files with 207 additions and 247 deletions.
5 changes: 2 additions & 3 deletions docs/generate_format_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,8 @@ def combine_rows(results):
result['tilesource'],
]
break
else:
row_key_index += 1
row_key = f'{row_base_key}_{row_key_index}'
row_key_index += 1
row_key = f'{row_base_key}_{row_key_index}'
if row_key not in table_rows:
table_rows[row_key] = result
return table_rows
Expand Down
12 changes: 6 additions & 6 deletions girder/girder_large_image/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def checkForLargeImageFiles(event): # noqa
pass
# We couldn't automatically set this as a large image
girder.logger.info(
'Saved file %s cannot be automatically used as a largeImage' % str(file['_id']))
'Saved file %s cannot be automatically used as a largeImage', str(file['_id']))


def removeThumbnails(event):
Expand Down Expand Up @@ -411,7 +411,7 @@ def metadataSearchHandler( # noqa
return []
filter = {'$and': filter} if len(filter) > 1 else filter[0]
result = {}
logger.debug('Metadata search uses filter: %r' % filter)
logger.debug('Metadata search uses filter: %r', filter)
for model in searchModels or types:
modelInst = ModelImporter.model(*model if isinstance(model, tuple) else [model])
if searchModels is None:
Expand All @@ -437,7 +437,7 @@ def metadataSearchHandler( # noqa
if entry is not None and offset:
offset -= 1
continue
elif entry is not None:
if entry is not None:
result[searchModels[model]['model']].append(resultModelInst.filter(entry, user))
if limit and len(result[searchModels[model]['model']]) == limit:
break
Expand Down Expand Up @@ -566,7 +566,7 @@ def yamlConfigFile(folder, name, user):
if item:
for file in Item().childFiles(item):
if file['size'] > 10 * 1024 ** 2:
logger.info('Not loading %s -- too large' % file['name'])
logger.info('Not loading %s -- too large', file['name'])
continue
with File().open(file) as fptr:
config = yaml.safe_load(fptr)
Expand Down Expand Up @@ -658,7 +658,7 @@ def validateBoolean(doc):
val = doc['value']
if str(val).lower() not in ('false', 'true', ''):
raise ValidationException('%s must be a boolean.' % doc['key'], 'value')
doc['value'] = (str(val).lower() != 'false')
doc['value'] = str(val).lower() != 'false'


@setting_utilities.validator({
Expand All @@ -675,7 +675,7 @@ def validateBooleanOrICCIntent(doc):
if str(val).lower() not in ('false', 'true', ''):
raise ValidationException(
'%s must be a boolean or a named intent.' % doc['key'], 'value')
doc['value'] = (str(val).lower() != 'false')
doc['value'] = str(val).lower() != 'false'


@setting_utilities.validator({
Expand Down
4 changes: 2 additions & 2 deletions girder/girder_large_image/girder_tilesource.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def getLRUHash(*args, **kwargs):
kwargs.get('jpegSubsampling', 0),
kwargs.get('tiffCompression', 'raw'),
kwargs.get('edge', False),
kwargs.get('style', None))
kwargs.get('style'))

def getState(self):
if hasattr(self, '_classkey'):
Expand Down Expand Up @@ -211,7 +211,7 @@ def getGirderTileSource(item, file=None, *args, **kwargs):
:returns: A girder tilesource for the item.
"""
if not isinstance(item, dict):
item = Item().load(item, user=kwargs.get('user', None), level=AccessType.READ)
item = Item().load(item, user=kwargs.get('user'), level=AccessType.READ)
sourceName = getGirderTileSourceName(item, file, *args, **kwargs)
if sourceName:
return AvailableGirderTileSources[sourceName](item, *args, **kwargs)
2 changes: 1 addition & 1 deletion girder/girder_large_image/models/image_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def _loadTileSource(cls, item, **kwargs):
# tileSource = girder_tilesource.getGirderTileSource(item, **kwargs)
# but, instead, log that the original source no longer works are
# reraise the exception
logger.warning('The original tile source for item %s is not working' % item['_id'])
logger.warning('The original tile source for item %s is not working', item['_id'])
try:
file = File().load(item['largeImage']['fileId'], force=True)
localPath = File().getLocalFilePath(file)
Expand Down
6 changes: 3 additions & 3 deletions girder/girder_large_image/rest/large_image_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -632,7 +632,7 @@ def _configValidateException(self, exc, lineno=None):
matches = re.search(r'\[line[ ]*(\d+)\]', msg)
if matches:
line = int(matches.groups()[0])
msg = msg.split('\n')[0].strip() or 'General error'
msg = msg.split('\n', 1)[0].strip() or 'General error'
msg = msg.rsplit(": '<string>'", 1)[0].rsplit("'<string>'", 1)[-1].strip()
return [{'line': line - 1, 'message': msg}]
except Exception:
Expand Down Expand Up @@ -759,9 +759,9 @@ def configReplace(self, config, restart):
return {'status': 'no change'}
newpath = path + '.' + time.strftime(
'%Y%m%d-%H%M%S', time.localtime(os.stat(path).st_mtime))
logger.info('Copying existing config file from %s to %s' % (path, newpath))
logger.info('Copying existing config file from %s to %s', path, newpath)
shutil.copy2(path, newpath)
logger.warning('Replacing config file %s' % (path))
logger.warning('Replacing config file %s', path)
open(path, 'w').write(config)

class Restart(cherrypy.process.plugins.Monitor):
Expand Down
22 changes: 11 additions & 11 deletions girder/girder_large_image/rest/tiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@ def _pickleParams(params):
if not str(params.get('encoding')).startswith('pickle'):
return None
params['format'] = large_image.constants.TILE_FORMAT_NUMPY
pickle = params['encoding'].split(':')[-1]
pickl = params['encoding'].split(':')[-1]
del params['encoding']
return int(pickle) or 4 if pickle.isdigit() else 4
return int(pickl) or 4 if pickl.isdigit() else 4


def _pickleOutput(data, protocol):
Expand Down Expand Up @@ -764,7 +764,7 @@ def getTilesThumbnail(self, item, params):
('contentDispositionFileName', str),
])
_handleETag('getTilesThumbnail', item, params)
pickle = _pickleParams(params)
pickl = _pickleParams(params)
try:
result = self.imageItemModel.getThumbnail(item, **params)
except TileGeneralError as e:
Expand All @@ -774,8 +774,8 @@ def getTilesThumbnail(self, item, params):
if not isinstance(result, tuple):
return result
thumbData, thumbMime = result
if pickle:
thumbData, thumbMime = _pickleOutput(thumbData, pickle)
if pickl:
thumbData, thumbMime = _pickleOutput(thumbData, pickl)
self._setContentDisposition(
item, params.get('contentDisposition'), thumbMime, 'thumbnail',
params.get('contentDispositionFilename'))
Expand Down Expand Up @@ -904,13 +904,13 @@ def getTilesRegion(self, item, params):
('contentDispositionFileName', str),
])
_handleETag('getTilesRegion', item, params)
pickle = _pickleParams(params)
pickl = _pickleParams(params)
setResponseTimeLimit(86400)
try:
regionData, regionMime = self.imageItemModel.getRegion(
item, **params)
if pickle:
regionData, regionMime = _pickleOutput(regionData, pickle)
if pickl:
regionData, regionMime = _pickleOutput(regionData, pickl)
except TileGeneralError as e:
raise RestException(e.args[0])
except ValueError as e:
Expand Down Expand Up @@ -1332,7 +1332,7 @@ def tileFrames(self, item, params):

params = self._parseParams(params, True, self._tileFramesParams)
_handleETag('tileFrames', item, params)
pickle = _pickleParams(params)
pickl = _pickleParams(params)
if 'frameList' in params:
params['frameList'] = [
int(f.strip()) for f in str(params['frameList']).lstrip(
Expand All @@ -1348,8 +1348,8 @@ def tileFrames(self, item, params):
if not isinstance(result, tuple):
return result
regionData, regionMime = result
if pickle:
regionData, regionMime = _pickleOutput(regionData, pickle)
if pickl:
regionData, regionMime = _pickleOutput(regionData, pickl)
self._setContentDisposition(
item, params.get('contentDisposition'), regionMime, 'tileframes',
params.get('contentDispositionFilename'))
Expand Down
3 changes: 1 addition & 2 deletions girder/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ def prerelease_local_scheme(version):

if os.getenv('CIRCLE_BRANCH') in ('master', ):
return ''
else:
return get_local_node_and_date(version)
return get_local_node_and_date(version)


try:
Expand Down
5 changes: 2 additions & 3 deletions girder/test_girder/test_tiles_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,8 @@ def _createTestTiles(server, admin, params=None, info=None, error=None):
except AssertionError as exc:
if error:
assert error in exc.args[0]
return
else:
raise
return None
raise
assert utilities.respStatus(resp) == 200
infoDict = resp.json
if info:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def validateBoolean(doc):
val = doc['value']
if str(val).lower() not in ('false', 'true', ''):
raise ValidationException('%s must be a boolean.' % doc['key'], 'value')
doc['value'] = (str(val).lower() != 'false')
doc['value'] = str(val).lower() != 'false'


# Defaults
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _itemFromEvent(event, identifierEnding, itemAccessLevel=AccessType.READ):
reference['uuid'] = str(uuid.uuid4())
if 'userId' not in reference or 'itemId' not in reference or 'fileId' not in reference:
logger.error('Reference does not contain required information.')
return
return None

userId = reference['userId']
imageId = reference['fileId']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -842,7 +842,7 @@ def load(self, id, region=None, getElements=True, *args, **kwargs):
"""
annotation = super().load(id, *args, **kwargs)
if annotation is None:
return
return None

if getElements:
# It is possible that we are trying to read the elements of an
Expand Down Expand Up @@ -1108,7 +1108,7 @@ def validate(self, doc): # noqa
element['id'] = str(element['id'])
# Handle elements with large arrays by checking that a
# conversion to a numpy array works
keys = None
keys = {}
if len(element.get('points', element.get('values', []))) > VALIDATE_ARRAY_LENGTH:
key = 'points' if 'points' in element else 'values'
try:
Expand Down Expand Up @@ -1230,7 +1230,7 @@ def revertVersion(self, id, version=None, user=None, force=False):
version = oldVersions[1]['_version']
annotation = Annotation().getVersion(id, version, user, force=force)
if annotation is None:
return
return None
# If this is the most recent (active) annotation, don't do anything.
# Otherwise, revert it.
if not annotation.get('_active', True):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -249,13 +249,13 @@ def _parseFeature(self, geoelem): # noqa
for entry in geoelem:
self._parseFeature(entry)
if not isinstance(geoelem, dict) or 'type' not in geoelem:
return
return None
if geoelem['type'] == 'FeatureCollection':
return self._parseFeature(geoelem.get('features', []))
if geoelem['type'] == 'GeometryCollection' and isinstance(geoelem.get('geometries'), list):
for entry in geoelem['geometry']:
self._parseFeature({'type': 'Feature', 'geometry': entry})
return
return None
if geoelem['type'] in {'Point', 'LineString', 'Polygon', 'MultiPoint',
'MultiLineString', 'MultiPolygon'}:
geoelem = {'type': 'Feature', 'geometry': geoelem}
Expand Down Expand Up @@ -1422,7 +1422,7 @@ def data(self, columns, requiredColumns=None): # noqa
# collects data as a side effect
collist = self._getColumns()
if self.cancel:
return
return None
for coldata in self._datacolumns.values():
rows |= set(coldata.keys())
rows = sorted(rows)
Expand Down Expand Up @@ -1454,7 +1454,7 @@ def data(self, columns, requiredColumns=None): # noqa
if len(subdata) and len(subdata) < len(data):
data = subdata
if self.cancel:
return
return None
# Refresh our count, distinct, distinctcount, min, max for each column
for cidx, col in enumerate(colsout):
col['count'] = len([row[cidx] for row in data if row[cidx] is not None])
Expand Down
3 changes: 1 addition & 2 deletions girder_annotation/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ def prerelease_local_scheme(version):

if os.getenv('CIRCLE_BRANCH') in ('master', ):
return ''
else:
return get_local_node_and_date(version)
return get_local_node_and_date(version)


try:
Expand Down
2 changes: 1 addition & 1 deletion girder_annotation/test_annotation/test_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def testSave(self, admin):
versions = list(Annotation().versionList(saved2['_id']))
assert len(versions) == 2
# If we save with an old version, we should get the original id back
assert not versions[1]['_id'] == loaded['_id']
assert versions[1]['_id'] != loaded['_id']
saved3 = Annotation().save(versions[1])
assert saved3['_id'] == loaded['_id']

Expand Down
2 changes: 1 addition & 1 deletion large_image/cache_util/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def logError(self, err: Any, func: Callable, msg: str) -> None:
"""
curtime = time.time()
key = (err, func)
if (curtime - self.lastError.get(key, {}).get('time', 0) > self.throttleErrors):
if curtime - self.lastError.get(key, {}).get('time', 0) > self.throttleErrors:
skipped = self.lastError.get(key, {}).get('skipped', 0)
if skipped:
msg += ' (%d similar messages)' % skipped
Expand Down
4 changes: 2 additions & 2 deletions large_image/cache_util/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ class LruCacheMetaclass(type):
namedCaches: Dict[str, Any] = {}
classCaches: Dict[type, Any] = {}

def __new__(metacls, name, bases, namespace, **kwargs):
def __new__(mcs, name, bases, namespace, **kwargs):
# Get metaclass parameters by finding and removing them from the class
# namespace (necessary for Python 2), or preferentially as metaclass
# arguments (only in Python 3).
Expand All @@ -151,7 +151,7 @@ def __new__(metacls, name, bases, namespace, **kwargs):
timeout = kwargs.get('cacheTimeout', timeout)

cls = super().__new__(
metacls, name, bases, namespace)
mcs, name, bases, namespace)
if not cacheName:
cacheName = cls

Expand Down
3 changes: 1 addition & 2 deletions large_image/cache_util/rediscache.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,7 @@ def maxsize(self) -> int:
maxmemory = self._getStat('maxmemory')
if maxmemory:
return maxmemory
else:
return self._getStat('total_system_memory')
return self._getStat('total_system_memory')

def _reconnect(self) -> None:
try:
Expand Down
8 changes: 2 additions & 6 deletions large_image/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def _ignoreSourceNames(
"""
ignored_names = getConfig('source_%s_ignored_names' % configKey) or default
if not ignored_names or not os.path.isfile(path):
return None
return
if re.search(ignored_names, os.path.basename(path), flags=re.IGNORECASE):
raise exceptions.TileSourceError('File will not be opened by %s reader' % configKey)

Expand All @@ -157,12 +157,8 @@ def cpu_count(logical: bool = True) -> int:
count = min(count, len(os.sched_getaffinity(0)))
except AttributeError:
pass
try:
import psutil

if HAS_PSUTIL:
count = min(count, psutil.cpu_count(logical) or count)
except ImportError:
pass
return max(1, count)


Expand Down
2 changes: 1 addition & 1 deletion large_image/tilesource/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def listMimeTypes(

__all__ = [
'TileSource', 'FileTileSource',
'exceptions', 'TileGeneralError', 'TileSourceError',
'TileGeneralError', 'TileSourceError',
'TileSourceAssetstoreError', 'TileSourceFileNotFoundError',
'TileGeneralException', 'TileSourceException', 'TileSourceAssetstoreException',
'TileOutputMimeTypes', 'TILE_FORMAT_IMAGE', 'TILE_FORMAT_PIL', 'TILE_FORMAT_NUMPY',
Expand Down
Loading

0 comments on commit 2198996

Please sign in to comment.