Skip to content

Commit

Permalink
Merge pull request #115 from dvm-shlee/main
Browse files Browse the repository at this point in the history
0.3.8, hotfix
  • Loading branch information
dvm-shlee authored Jul 5, 2023
2 parents d139a07 + 3870e50 commit 1c95722
Show file tree
Hide file tree
Showing 8 changed files with 50 additions and 51 deletions.
2 changes: 1 addition & 1 deletion brkraw/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .lib import *

__version__ = '0.3.8'
__version__ = '0.3.10'
__all__ = ['BrukerLoader', '__version__']


Expand Down
8 changes: 4 additions & 4 deletions brkraw/lib/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def set_arc(self, arc_fname, arc_dir, raw_dir):
garbage = True
crashed = True

if raw_dname is not None:
if raw_dname != None:
r = self.get_rpath_obj(raw_dname)
else:
r = None
Expand Down Expand Up @@ -265,7 +265,7 @@ def _parse_info(self):
# update raw dataset information (raw dataset cache will remain even its removed)
print('\nScanning raw dataset cache...')
for r in tqdm.tqdm(self.raw_data[:], bar_format=_bar_fmt):
if r.path is not None:
if r.path != None:
if not os.path.exists(os.path.join(self._rpath, r.path)):
if not r.removed:
r.removed = True
Expand Down Expand Up @@ -300,7 +300,7 @@ def _parse_info(self):

def is_same_as_raw(self, filename):
arc = BrukerLoader(os.path.join(self._apath, filename))
if arc.pvobj.path is not None:
if arc.pvobj.path != None:
raw_path = os.path.join(self._rpath, arc.pvobj.path)
if os.path.exists(raw_path):
raw = BrukerLoader(raw_path)
Expand Down Expand Up @@ -495,7 +495,7 @@ def clean(self):
print('\nStart removing {} archived data...'.format(label.upper()))
if len(dset.items()):
for raw_dname, arcs in dset.items():
if raw_dname is not None:
if raw_dname != None:
raw_path = os.path.join(self._rpath, raw_dname)
if os.path.exists(raw_path):
r_size, r_unit = get_dirsize(raw_path)
Expand Down
14 changes: 7 additions & 7 deletions brkraw/lib/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ def __init__(self, file_name=None, data_type=None):
self.file_name = None
self.data_type = None

if file_name is not None:
if file_name != None:
self.file_name = os.path.basename(file_name)
if os.path.isdir(file_name):
object_type = 'directory'
else:
object_type = 'file'
if data_type is not None:
if data_type != None:
self.data_type = data_type
self.message = "The {} '{}' is not valid {}".format(object_type,
self.file_name,
Expand All @@ -45,7 +45,7 @@ class ArchiveFailedError(Error):
file_name = None

def __init__(self, file_name=None):
if file_name is not None:
if file_name != None:
self.file_name = os.path.basename(file_name)
self.message = "The data '{}' is not archived".format(
self.file_name)
Expand All @@ -58,7 +58,7 @@ class RemoveFailedError(Error):
file_name = None

def __init__(self, file_name=None):
if file_name is not None:
if file_name != None:
self.file_name = os.path.basename(file_name)
self.message = "The file '{}' is not removed".format(
self.file_name)
Expand All @@ -72,11 +72,11 @@ class RenameFailedError(Error):
file2_name = None

def __init__(self, file1_name=None, file2_name=None):
if file1_name is not None:
if file1_name != None:
self.file1_name = os.path.basename(file1_name)
if file2_name is not None:
if file2_name != None:
self.file2_name = os.path.basename(file2_name)
if (self.file1_name is not None) and (self.file2_name is not None):
if (self.file1_name != None) and (self.file2_name != None):
self.message = "Rename failed to execute from:'{}' to:'{}'".format(self.file1_name,
self.file2_name)
else:
Expand Down
30 changes: 15 additions & 15 deletions brkraw/lib/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def swap_slice_axis(group_id_, dataobj_):
dataobj_ = np.swapaxes(dataobj_, 2, slice_axis_)
return dataobj_

if fg_info['frame_type'] is not None:
if fg_info['frame_type'] != None:
if group_id[0] == 'FG_SLICE':
pass

Expand Down Expand Up @@ -327,9 +327,9 @@ def get_niftiobj(self, scan_id, reco_id, crop=None, slope=False, offset=False):
affine = self._get_affine(visu_pars, method)

data_slp, data_off = self._get_dataslp(visu_pars)
if isinstance(data_slp, list) and slope is not None:
if isinstance(data_slp, list) and slope != None:
slope = True
if isinstance(data_off, list) and offset is not None:
if isinstance(data_off, list) and offset != None:
offset = True

imgobj = self.get_dataobj(scan_id, reco_id, slope=slope, offset=offset)
Expand Down Expand Up @@ -359,7 +359,7 @@ def get_niftiobj(self, scan_id, reco_id, crop=None, slope=False, offset=False):
f = multiply_all(imgobj_.shape[3:])
# all converted nifti must be 4D
imgobj_ = imgobj_.reshape([x, y, z, f])
if crop is not None:
if crop != None:
if crop[0] is None:
niiobj_ = Nifti1Image(imgobj_[..., :crop[1]], affine)
elif crop[1] is None:
Expand All @@ -377,7 +377,7 @@ def get_niftiobj(self, scan_id, reco_id, crop=None, slope=False, offset=False):
f = multiply_all(imgobj.shape[3:])
# all converted nifti must be 4D
imgobj = imgobj.reshape([x, y, z, f])
if crop is not None:
if crop != None:
if crop[0] is None:
niiobj = Nifti1Image(imgobj[..., :crop[1]], affine)
elif crop[1] is None:
Expand Down Expand Up @@ -511,7 +511,7 @@ def _parse_json(self, scan_id, reco_id, metadata=None):
val = meta_get_value(v, acqp, method, visu_pars)
if k in ['PhaseEncodingDirection', 'SliceEncodingDirection']:
# Convert the encoding direction meta data into BIDS format
if val is not None:
if val != None:
if isinstance(val, int):
val = encdir_dic[val]
else:
Expand Down Expand Up @@ -551,7 +551,7 @@ def _parse_json(self, scan_id, reco_id, metadata=None):

def save_json(self, scan_id, reco_id, filename, dir='./', metadata=None, condition=None):
json_obj = self._parse_json(scan_id, reco_id, metadata)
if condition is not None:
if condition != None:
code, idx = condition
if code == 'me': # multi-echo
if 'EchoTime' in json_obj.keys():
Expand Down Expand Up @@ -599,7 +599,7 @@ def get_scan_time(self, visu_pars=None):
# date
date = dt.datetime.strptime(re.sub(pattern_1, r'\2', subject_date), '%d %b %Y').date()
# end time
if visu_pars is not None:
if visu_pars != None:
last_scan_time = get_value(visu_pars, 'VisuAcqDate')
last_scan_time = dt.time(*map(int, re.sub(pattern_1, r'\1', last_scan_time).split(':')))
acq_time = get_value(visu_pars, 'VisuAcqScanTime') / 1000.0
Expand All @@ -616,7 +616,7 @@ def get_scan_time(self, visu_pars=None):
date = dt.date(*map(int, re.sub(pattern_2, r'\1', subject_date).split('-')))

# end date
if visu_pars is not None:
if visu_pars != None:
scan_time = get_value(visu_pars, 'VisuCreationDate')[0]
scan_time = dt.time(*map(int, re.sub(pattern_2, r'\2', scan_time).split(':')))
return dict(date=date,
Expand Down Expand Up @@ -785,7 +785,7 @@ def _set_nifti_header(self, niiobj, visu_pars, method, slope, offset):
else:
niiobj.header.set_xyzt_units('mm')
if not slope:
if slope is not None:
if slope != None:
if isinstance(data_slp, list):
raise InvalidApproach('Invalid slope size;'
'The vector type scl_slope cannot be set in nifti header.')
Expand All @@ -795,7 +795,7 @@ def _set_nifti_header(self, niiobj, visu_pars, method, slope, offset):
else:
niiobj.header['scl_slope'] = 1
if not offset:
if offset is not None:
if offset != None:
if isinstance(data_off, list):
raise InvalidApproach('Invalid offset size;'
'The vector type scl_offset cannot be set in nifti header.')
Expand All @@ -814,7 +814,7 @@ def _get_temp_info(self, visu_pars):
total_time = get_value(visu_pars, 'VisuAcqScanTime')
fg_info = self._get_frame_group_info(visu_pars)
parser = []
if fg_info['frame_type'] is not None:
if fg_info['frame_type'] != None:
for id, fg in enumerate(fg_info['group_id']):
if not re.search('slice', fg, re.IGNORECASE):
parser.append(fg_info['matrix_shape'][id])
Expand Down Expand Up @@ -1004,7 +1004,7 @@ def get_axis_orient(orient_matrix):
orient_matrix = get_value(visu_pars, 'VisuCoreOrientation').tolist()
slice_info = self._get_slice_info(visu_pars)
slice_position = get_value(visu_pars, 'VisuCorePosition')
if self._override_position is not None: # add option to override
if self._override_position != None: # add option to override
subj_position = self._override_position
else:
subj_position = get_value(visu_pars, 'VisuSubjectPosition')
Expand Down Expand Up @@ -1062,7 +1062,7 @@ def get_axis_orient(orient_matrix):
oorder_parser = get_axis_orient(omatrix_parser)
vposition_parser = slice_position

if self._override_type is not None: # add option to override
if self._override_type != None: # add option to override
subj_type = self._override_type
else:
subj_type = get_value(visu_pars, 'VisuSubjectType')
Expand Down Expand Up @@ -1147,7 +1147,7 @@ def _get_matrix_size(self, visu_pars, dataobj=None):
if num_temporal_frame > 1:
matrix_size.append(num_temporal_frame)

if dataobj is not None:
if isinstance(dataobj, np.ndarray):
# matrix size inspection
dataobj_shape = dataobj.shape[0]
if multiply_all(matrix_size) != dataobj_shape:
Expand Down
2 changes: 1 addition & 1 deletion brkraw/lib/orient.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def get_origin(slice_position, gradient_orient):
max_delta_axis = np.argmax([dx, dy, dz])
rx, ry, rz = [None, None, None]

if gradient_orient is not None:
if isinstance(gradient_orient, np.ndarray):
zmat = np.zeros(gradient_orient[0].shape)
for cid, col in enumerate(gradient_orient[0].T):
yid = np.argmax(abs(col))
Expand Down
2 changes: 1 addition & 1 deletion brkraw/lib/pvobj.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _reset(self):
self._2dseq = dict()

def _update_studyinfo(self):
if self._subject is not None:
if self._subject != None:
subject = self._subject
self.user_account = subject.headers['OWNER']
self.subj_id = get_value(subject, 'SUBJECT_id')
Expand Down
22 changes: 10 additions & 12 deletions brkraw/lib/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .errors import UnexpectedError
from .errors import *
from .reference import *
import re
import os
Expand Down Expand Up @@ -30,7 +30,7 @@ def load_param(stringlist):
for line_num, line in enumerate(stringlist):
regex_obj = re.match(ptrn_param, line)
# if line is key=value pair
if regex_obj is not None:
if regex_obj != None:
# parse key and value
key = re.sub(ptrn_param, r'\g<key>', line)
value = re.sub(ptrn_param, r'\g<value>', line)
Expand Down Expand Up @@ -105,7 +105,7 @@ def convert_data_to(data, shape):
cont_parser = []
for cont in map(str.strip, parsed.group('contents').split(',')):
cont = convert_data_to(cont, -1)
if cont is not None:
if cont != None:
cont_parser.append(cont)
if key not in parser.keys():
parser[key] = []
Expand All @@ -120,7 +120,7 @@ def convert_data_to(data, shape):
else:
is_array = re.findall(ptrn_array, data)
# parse data shape
if shape is not -1:
if shape != -1:
shape = re.sub(ptrn_array, r'\g<array>', shape)
if ',' in shape:
shape = [convert_string_to(c) for c in shape.split(',')]
Expand Down Expand Up @@ -191,7 +191,7 @@ def meta_get_value(value, acqp, method, visu_pars):
max_index = len(value) - 1
for i, vi in enumerate(value):
val = meta_get_value(vi, acqp, method, visu_pars)
if val is not None:
if val != None:
if val == vi:
if i == max_index:
parser.append(val)
Expand Down Expand Up @@ -228,7 +228,7 @@ def is_express(value):

def meta_check_where(value, acqp, method, visu_pars):
val = meta_get_value(value['key'], acqp, method, visu_pars)
if val is not None:
if val != None:
if isinstance(value['where'], str):
if value['where'] not in val:
return None
Expand All @@ -243,12 +243,12 @@ def meta_check_where(value, acqp, method, visu_pars):

def meta_check_index(value, acqp, method, visu_pars):
val = meta_get_value(value['key'], acqp, method, visu_pars)
if val is not None:
if val != None:
if isinstance(value['idx'], int):
return val[value['idx']]
else:
idx = meta_get_value(value['idx'], acqp, method, visu_pars)
if idx is not None:
if idx != None:
return val[idx]
else:
return None
Expand Down Expand Up @@ -335,7 +335,6 @@ def get_filesize(file_path):

def bids_validation(df, idx, key, val, num_char_allowed, dtype=None):
import string
from shleeh.errors import InvalidValueInField
col = string.ascii_uppercase[df.columns.tolist().index(key)]
special_char = re.compile(r'[^0-9a-zA-Z]')
str_val = str(val)
Expand All @@ -344,13 +343,13 @@ def bids_validation(df, idx, key, val, num_char_allowed, dtype=None):
message = "{} You can't use more than {} characters.".format(loc, num_char_allowed)
raise InvalidValueInField(message)
matched = special_char.search(str_val)
if matched is not None:
if matched != None:
if ' ' in matched.group():
message = "{} Empty string is not allowed.".format(loc)
else:
message = "{} Special characters are not allowed.".format(loc)
raise InvalidValueInField(message)
if dtype is not None:
if dtype != None:
try:
dtype(val)
except:
Expand All @@ -361,7 +360,6 @@ def bids_validation(df, idx, key, val, num_char_allowed, dtype=None):

def get_bids_ref_obj(ref_path, row):
import json
from shleeh.errors import InvalidApproach
if os.path.exists(ref_path) and ref_path.lower().endswith('.json'):
ref_data = json.load(open(ref_path))
ref = ref_data['common']
Expand Down
Loading

0 comments on commit 1c95722

Please sign in to comment.