Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit42959a3

Browse files
Merge pull request#478 from ilogue/qform-provided-then-qform-code-2
MRG: parrec2nii sets qform code to 2Set qform code to 2 instead of 0 when writing out parrec2nii images.Closes#463
2 parents20cc0fc +aa184ed commit42959a3

File tree

8 files changed

+445
-334
lines changed

8 files changed

+445
-334
lines changed

‎.travis.yml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ before_install:
8888
-source venv/bin/activate
8989
-python --version# just to check
9090
-pip install -U pip wheel# upgrade to latest pip find 3.5 wheels; wheel to avoid errors
91-
-retry pip install nose flake8# always
91+
-retry pip install nose flake8mock# always
9292
-wheelhouse_pip_install $EXTRA_PIP_FLAGS $DEPENDS
9393
# pydicom <= 0.9.8 doesn't install on python 3
9494
-if [ "${TRAVIS_PYTHON_VERSION:0:1}" == "2" ]; then

‎appveyor.yml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ install:
4646
-"python -c\"import struct; print(struct.calcsize('P') * 8)\""
4747

4848
# Install the dependencies of the project.
49-
-"conda install --yes --quiet numpy scipy matplotlib nose h5py"
49+
-"conda install --yes --quiet numpy scipy matplotlib nose h5py mock"
5050
-"pip install pydicom"
5151
-"python setup.py install"
5252
-"SET NIBABEL_DATA_DIR=%CD%\\nibabel-data"

‎bin/parrec2nii‎

Lines changed: 1 addition & 326 deletions
Original file line numberDiff line numberDiff line change
@@ -1,333 +1,8 @@
11
#!python
22
"""PAR/REC to NIfTI converter
33
"""
4-
from __future__importdivision,print_function,absolute_import
54

6-
fromoptparseimportOptionParser,Option
7-
importnumpyasnp
8-
importnumpy.linalgasnpl
9-
importsys
10-
importos
11-
importcsv
12-
importnibabel
13-
importnibabel.parrecaspr
14-
fromnibabel.parrecimportone_line
15-
fromnibabel.mriutilsimportcalculate_dwell_time,MRIError
16-
importnibabel.nifti1asnifti1
17-
fromnibabel.filename_parserimportsplitext_addext
18-
fromnibabel.volumeutilsimportfname_ext_ul_case
19-
fromnibabel.orientationsimport (io_orientation,inv_ornt_aff,
20-
apply_orientation)
21-
fromnibabel.affinesimportapply_affine,from_matvec,to_matvec
22-
23-
24-
defget_opt_parser():
25-
# use module docstring for help output
26-
p=OptionParser(
27-
usage="%s [OPTIONS] <PAR files>\n\n"%sys.argv[0]+__doc__,
28-
version="%prog "+nibabel.__version__)
29-
p.add_option(
30-
Option("-v","--verbose",action="store_true",dest="verbose",
31-
default=False,
32-
help="""Make some noise."""))
33-
p.add_option(
34-
Option("-o","--output-dir",action="store",type="string",
35-
dest="outdir",default=None,
36-
help=one_line("""Destination directory for NIfTI files.
37-
Default: current directory.""")))
38-
p.add_option(
39-
Option("-c","--compressed",action="store_true",
40-
dest="compressed",default=False,
41-
help="Whether to write compressed NIfTI files or not."))
42-
p.add_option(
43-
Option("-p","--permit-truncated",action="store_true",
44-
dest="permit_truncated",default=False,
45-
help=one_line(
46-
"""Permit conversion of truncated recordings. Support for
47-
this is experimental, and results *must* be checked
48-
afterward for validity.""")))
49-
p.add_option(
50-
Option("-b","--bvs",action="store_true",dest="bvs",default=False,
51-
help=one_line(
52-
"""Output bvals/bvecs files in addition to NIFTI
53-
image.""")))
54-
p.add_option(
55-
Option("-d","--dwell-time",action="store_true",default=False,
56-
dest="dwell_time",
57-
help=one_line(
58-
"""Calculate the scan dwell time. If supplied, the magnetic
59-
field strength should also be supplied using
60-
--field-strength (default 3). The field strength must be
61-
supplied because it is not encoded in the PAR/REC
62-
format.""")))
63-
p.add_option(
64-
Option("--field-strength",action="store",type="float",
65-
dest="field_strength",
66-
help=one_line(
67-
"""The magnetic field strength of the recording, only needed
68-
for --dwell-time. The field strength must be supplied
69-
because it is not encoded in the PAR/REC format.""")))
70-
p.add_option(
71-
Option("-i","--volume-info",action="store_true",dest="vol_info",
72-
default=False,
73-
help=one_line(
74-
"""Export .PAR volume labels corresponding to the fourth
75-
dimension of the data. The dimension info will be stored in
76-
CSV format with the first row containing dimension labels
77-
and the subsequent rows (one per volume), the corresponding
78-
indices. Only labels that vary along the 4th dimension are
79-
exported (e.g. for a single volume structural scan there
80-
are no dynamic labels and no output file will be created).
81-
""")))
82-
p.add_option(
83-
Option("--origin",action="store",dest="origin",default="scanner",
84-
help=one_line(
85-
"""Reference point of the q-form transformation of the NIfTI
86-
image. If 'scanner' the (0,0,0) coordinates will refer to
87-
the scanner's iso center. If 'fov', this coordinate will be
88-
the center of the recorded volume (field of view). Default:
89-
'scanner'.""")))
90-
p.add_option(
91-
Option("--minmax",action="store",nargs=2,dest="minmax",
92-
help=one_line(
93-
"""Mininum and maximum settings to be stored in the NIfTI
94-
header. If any of them is set to 'parse', the scaled data is
95-
scanned for the actual minimum and maximum. To bypass this
96-
potentially slow and memory intensive step (the data has to
97-
be scaled and fully loaded into memory), fixed values can be
98-
provided as space-separated pair, e.g. '5.4 120.4'. It is
99-
possible to set a fixed minimum as scan for the actual
100-
maximum (and vice versa). Default: 'parse parse'.""")))
101-
p.set_defaults(minmax=('parse','parse'))
102-
p.add_option(
103-
Option("--store-header",action="store_true",dest="store_header",
104-
default=False,
105-
help=one_line(
106-
"""If set, all information from the PAR header is stored in
107-
an extension ofthe NIfTI file header. Default: off""")))
108-
p.add_option(
109-
Option("--scaling",action="store",dest="scaling",default='dv',
110-
help=one_line(
111-
"""Choose data scaling setting. The PAR header defines two
112-
different data scaling settings: 'dv' (values displayed on
113-
console) and 'fp' (floating point values). Either one can be
114-
chosen, or scaling can be disabled completely ('off'). Note
115-
that neither method will actually scale the data, but just
116-
store the corresponding settings in the NIfTI header, unless
117-
non-uniform scaling is used, in which case the data is
118-
stored in the file in scaled form. Default: 'dv'""")))
119-
p.add_option(
120-
Option('--keep-trace',action="store_true",dest='keep_trace',
121-
default=False,
122-
help=one_line("""Do not discard the diagnostic Philips DTI
123-
trace volume, if it exists in the data.""")))
124-
p.add_option(
125-
Option("--overwrite",action="store_true",dest="overwrite",
126-
default=False,
127-
help=one_line("""Overwrite file if it exists. Default:
128-
False""")))
129-
p.add_option(
130-
Option("--strict-sort",action="store_true",dest="strict_sort",
131-
default=False,
132-
help=one_line("""Use additional keys in determining the order
133-
to sort the slices within the .REC file. This may be necessary
134-
for more complicated scans with multiple echos,
135-
cardiac phases, ASL label states, etc.""")))
136-
returnp
137-
138-
139-
defverbose(msg,indent=0):
140-
ifverbose.switch:
141-
print("%s%s"% (' '*indent,msg))
142-
143-
144-
deferror(msg,exit_code):
145-
sys.stderr.write(msg+'\n')
146-
sys.exit(exit_code)
147-
148-
149-
defproc_file(infile,opts):
150-
# figure out the output filename, and see if it exists
151-
basefilename=splitext_addext(os.path.basename(infile))[0]
152-
ifopts.outdirisnotNone:
153-
# set output path
154-
basefilename=os.path.join(opts.outdir,basefilename)
155-
156-
# prep a file
157-
ifopts.compressed:
158-
verbose('Using gzip compression')
159-
outfilename=basefilename+'.nii.gz'
160-
else:
161-
outfilename=basefilename+'.nii'
162-
ifos.path.isfile(outfilename)andnotopts.overwrite:
163-
raiseIOError('Output file "%s" exists, use --overwrite to '
164-
'overwrite it'%outfilename)
165-
166-
# load the PAR header and data
167-
scaling='dv'ifopts.scaling=='off'elseopts.scaling
168-
infile=fname_ext_ul_case(infile)
169-
pr_img=pr.load(infile,
170-
permit_truncated=opts.permit_truncated,
171-
scaling=scaling,
172-
strict_sort=opts.strict_sort)
173-
pr_hdr=pr_img.header
174-
affine=pr_hdr.get_affine(origin=opts.origin)
175-
slope,intercept=pr_hdr.get_data_scaling(scaling)
176-
ifopts.scaling!='off':
177-
verbose('Using data scaling "%s"'%opts.scaling)
178-
# get original scaling, and decide if we scale in-place or not
179-
ifopts.scaling=='off':
180-
slope=np.array([1.])
181-
intercept=np.array([0.])
182-
in_data=pr_img.dataobj.get_unscaled()
183-
out_dtype=pr_hdr.get_data_dtype()
184-
elifnotnp.any(np.diff(slope))andnotnp.any(np.diff(intercept)):
185-
# Single scalefactor case
186-
slope=slope.ravel()[0]
187-
intercept=intercept.ravel()[0]
188-
in_data=pr_img.dataobj.get_unscaled()
189-
out_dtype=pr_hdr.get_data_dtype()
190-
else:
191-
# Multi scalefactor case
192-
slope=np.array([1.])
193-
intercept=np.array([0.])
194-
in_data=np.array(pr_img.dataobj)
195-
out_dtype=np.float64
196-
# Reorient data block to LAS+ if necessary
197-
ornt=io_orientation(np.diag([-1,1,1,1]).dot(affine))
198-
ifnp.all(ornt== [[0,1],
199-
[1,1],
200-
[2,1]]):# already in LAS+
201-
t_aff=np.eye(4)
202-
else:# Not in LAS+
203-
t_aff=inv_ornt_aff(ornt,pr_img.shape)
204-
affine=np.dot(affine,t_aff)
205-
in_data=apply_orientation(in_data,ornt)
206-
207-
bvals,bvecs=pr_hdr.get_bvals_bvecs()
208-
ifnotopts.keep_trace:# discard Philips DTI trace if present
209-
ifbvecsisnotNone:
210-
bad_mask=np.logical_and(bvals!=0, (bvecs==0).all(axis=1))
211-
ifbad_mask.sum()>0:
212-
pl='s'ifbad_mask.sum()!=1else''
213-
verbose('Removing %s DTI trace volume%s'
214-
% (bad_mask.sum(),pl))
215-
good_mask=~bad_mask
216-
in_data=in_data[...,good_mask]
217-
bvals=bvals[good_mask]
218-
bvecs=bvecs[good_mask]
219-
220-
# Make corresponding NIfTI image
221-
nimg=nifti1.Nifti1Image(in_data,affine,pr_hdr)
222-
nhdr=nimg.header
223-
nhdr.set_data_dtype(out_dtype)
224-
nhdr.set_slope_inter(slope,intercept)
225-
226-
if'parse'inopts.minmax:
227-
# need to get the scaled data
228-
verbose('Loading (and scaling) the data to determine value range')
229-
ifopts.minmax[0]=='parse':
230-
nhdr['cal_min']=in_data.min()*slope+intercept
231-
else:
232-
nhdr['cal_min']=float(opts.minmax[0])
233-
ifopts.minmax[1]=='parse':
234-
nhdr['cal_max']=in_data.max()*slope+intercept
235-
else:
236-
nhdr['cal_max']=float(opts.minmax[1])
237-
238-
# container for potential NIfTI1 header extensions
239-
ifopts.store_header:
240-
# dump the full PAR header content into an extension
241-
withopen(infile,'rb')asfobj:# contents must be bytes
242-
hdr_dump=fobj.read()
243-
dump_ext=nifti1.Nifti1Extension('comment',hdr_dump)
244-
nhdr.extensions.append(dump_ext)
245-
246-
verbose('Writing %s'%outfilename)
247-
nibabel.save(nimg,outfilename)
248-
249-
# write out bvals/bvecs if requested
250-
ifopts.bvs:
251-
ifbvalsisNoneandbvecsisNone:
252-
verbose('No DTI volumes detected, bvals and bvecs not written')
253-
elifbvecsisNone:
254-
verbose('DTI volumes detected, but no diffusion direction info was'
255-
'found. Writing .bvals file only.')
256-
withopen(basefilename+'.bvals','w')asfid:
257-
# np.savetxt could do this, but it's just a loop anyway
258-
forvalinbvals:
259-
fid.write('%s '%val)
260-
fid.write('\n')
261-
else:
262-
verbose('Writing .bvals and .bvecs files')
263-
# Transform bvecs with reorientation affine
264-
orig2new=npl.inv(t_aff)
265-
bv_reorient=from_matvec(to_matvec(orig2new)[0], [0,0,0])
266-
bvecs=apply_affine(bv_reorient,bvecs)
267-
withopen(basefilename+'.bvals','w')asfid:
268-
# np.savetxt could do this, but it's just a loop anyway
269-
forvalinbvals:
270-
fid.write('%s '%val)
271-
fid.write('\n')
272-
withopen(basefilename+'.bvecs','w')asfid:
273-
forrowinbvecs.T:
274-
forvalinrow:
275-
fid.write('%s '%val)
276-
fid.write('\n')
277-
278-
# export data labels varying along the 4th dimensions if requested
279-
ifopts.vol_info:
280-
labels=pr_img.header.get_volume_labels()
281-
iflen(labels)>0:
282-
vol_keys=list(labels.keys())
283-
withopen(basefilename+'.ordering.csv','w')ascsvfile:
284-
csvwriter=csv.writer(csvfile,delimiter=',')
285-
csvwriter.writerow(vol_keys)
286-
forvalsinzip(*[labels[k]forkinvol_keys]):
287-
csvwriter.writerow(vals)
288-
289-
# write out dwell time if requested
290-
ifopts.dwell_time:
291-
try:
292-
dwell_time=calculate_dwell_time(
293-
pr_hdr.get_water_fat_shift(),
294-
pr_hdr.get_echo_train_length(),
295-
opts.field_strength)
296-
exceptMRIError:
297-
verbose('No EPI factors, dwell time not written')
298-
else:
299-
verbose('Writing dwell time (%r sec) calculated assuming %sT '
300-
'magnet'% (dwell_time,opts.field_strength))
301-
withopen(basefilename+'.dwell_time','w')asfid:
302-
fid.write('%r\n'%dwell_time)
303-
# done
304-
305-
306-
defmain():
307-
parser=get_opt_parser()
308-
(opts,infiles)=parser.parse_args()
309-
310-
verbose.switch=opts.verbose
311-
312-
ifopts.originnotin ['scanner','fov']:
313-
error("Unrecognized value for --origin: '%s'."%opts.origin,1)
314-
ifopts.dwell_timeandopts.field_strengthisNone:
315-
error('Need --field-strength for dwell time calculation',1)
316-
317-
# store any exceptions
318-
errs= []
319-
forinfileininfiles:
320-
verbose('Processing %s'%infile)
321-
try:
322-
proc_file(infile,opts)
323-
exceptExceptionase:
324-
errs.append('%s: %s'% (infile,e))
325-
326-
iflen(errs):
327-
error('Caught %i exceptions. Dump follows:\n\n %s'
328-
% (len(errs),'\n'.join(errs)),1)
329-
else:
330-
verbose('Done')
5+
fromnibabel.parrec2nii_cmdimportmain
3316

3327

3338
if__name__=='__main__':

‎dev-requirements.txt‎

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
# Requirements for running tests
22
-r requirements.txt
33
nose
4+
mock

‎doc/source/installation.rst‎

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ Requirements
9292
*PyDICOM_ 0.9.7 or greater (optional, for DICOM support)
9393
* `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS)
9494
*nose_ 0.11 or greater (optional, to run the tests)
95+
*mock_ (optional, to run the tests)
9596
*sphinx_ (optional, to build the documentation)
9697

9798
Get the development sources

‎nibabel/__init__.py‎

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -67,16 +67,17 @@
6767
from .importstreamlines
6868
from .importviewers
6969

70-
# be friendly on systems with ancient numpy -- no tests, but at least
71-
# importable
70+
# Note test requirement for "mock". Requirement for "nose" tested by numpy.
7271
try:
72+
importmock
73+
exceptImportError:
74+
deftest(*args,**kwargs):
75+
raiseRuntimeError('Need "mock" package for tests')
76+
else:
7377
fromnumpy.testingimportTester
7478
test=Tester().test
7579
bench=Tester().bench
76-
delTester
77-
exceptImportError:
78-
deftest(*args,**kwargs):
79-
raiseRuntimeError('Need numpy >= 1.2 for tests')
80+
delmock,Tester
8081

8182
from .pkg_infoimportget_pkg_infoas_get_pkg_info
8283

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp