function motion_compensate(fnInput1,fnInput2,fnMatch,fnDeepMatching,fnOut1,fnOut2,fnColor,fnVector,N,param,tmpdir,outdir) %% Description % Motion is estimated with a brigthness constancy data term defined on fnInput1 % and a feature matching similarity term defined on fnMatch. The sequences fnIput1 and % fnInput2 are warped according to the estimated motion field. % For more details see the paper: "Imaging neural activity in the ventral % nerve cord of behaving adult Drosophila", bioRxiv % %% Input % fnInput1: filename of the sequence used for the brightness constancy term, in TIF format % fnInput2: filename of the sequence warped with the motion field estimated from fnInput1 and fnMatch, in TIF format % fnMatch: filename of the sequence used for feature matching, in TIF format % fnDeepMatching: filename of the deepmatching code % fnOut1: filename used to save the warped version of fnInput1, in TIF format % fnOut2: filename used to save the warped version of fnInput2, in TIF format % fnOut2: filename used to save the color visualization of the estimated motion, in TIF format % N: number of frames to process % param: parameters of the algorithm (see 'default_parameters.m') % % Copyright (C) 2017 D. Fortun, denis.fortun@epfl.ch % % This program is free software: you can redistribute it and/or modify % it under the terms of the GNU General Public License as published by % the Free Software Foundation, either version 3 of the License, or % (at your option) any later version. % % This program is distributed in the hope that it will be useful, % but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the % GNU General Public License for more details. %% Input data seq=mijread_stack(fnInput1); seqW=mijread_stack(fnInput2); seqMatch=mijread_stack(fnMatch); if N==-1 N=size(seq,3); end seq=double(seq(:,:,1:N)); seqRescale=(seq-min(seq(:)))/(max(seq(:))-min(seq(:)))*255; seqW=double(seqW(:,:,1:N)); seqMatch=double(seqMatch(:,:,1:N)); seqMatch=(seqMatch-min(seqMatch(:)))/(max(seqMatch(:))-min(seqMatch(:)))*255; % Motion estimation w=zeros(size(seqRescale,1),size(seqRescale,2),2,size(seqRescale,3)-1); colorFlow=zeros(size(seqRescale,1),size(seqRescale,2),3,size(seqRescale,3)-1); vectorFlow=zeros(512,512,size(seqRescale,3)-1); i1=seqRescale(:,:,1); i1Match=seqMatch(:,:,1); parfor t=1:N-1 % Replace parfor by for if you don't want to parallelize presteps_to_compute_motion(i1Match,seqMatch(:,:,t+1),t,tmpdir); end fprintf('Presteps_to_compute_motion done!\n') return_number_of_running_processes = 'ps r | wc -l'; for t=1:N-1 [status, n_processes] = system(return_number_of_running_processes); while str2num(n_processes)-1 > 28 pause(1); [status, n_processes] = system(return_number_of_running_processes); end fprintf(['Strating deep of frame ', num2str(t),'\n']); fnI1=[tmpdir,'/I1_',num2str(t),'.png']; fnI2=[tmpdir,'/I2_',num2str(t),'.png']; fnMatch=[tmpdir,'/match_',num2str(t),'.txt']; command = [fnDeepMatching,'/deepmatching ',fnI1,' ',fnI2,' -out ',fnMatch,' &']; s = system(command); end parfor t = 1:N-1 i2=seqRescale(:,:,t+1); [i10,i2]=midway(i1,i2); w(:,:,:,t) = poststeps_to_compute_motion(i10,i2,i1Match,param,t,tmpdir); fprintf(['poststeps_to_compute_motion done for frame ',num2str(t),'\n']); colorFlow(:,:,:,t) = flowToColor(w(:,:,:,t)); vectorFlow(:,:,t) = flowToVectorImage(w(:,:,:,t),[18,18],[512,512],t,tmpdir); end parfor t = 1:N-1 dlmwrite([outdir,'/wx_frame',num2str(t),'.dat'],w(:,:,1,t)); dlmwrite([outdir,'/wy_frame',num2str(t),'.dat'],w(:,:,2,t)); end %% Registration seqWarped=seq; seqwWarped=seqW; parfor t=1:N-1 seqWarped(:,:,t+1)=warpImg(seq(:,:,t+1),w(:,:,1,t),w(:,:,2,t)); seqwWarped(:,:,t+1)=warpImg(seqW(:,:,t+1),w(:,:,1,t),w(:,:,2,t)); end %% Save mijwrite_stack(single(seqWarped),fnOut1); mijwrite_stack(single(seqwWarped),fnOut2); mijwrite_stack(single(colorFlow),fnColor,1); mijwrite_stack(single(vectorFlow),fnVector);