www.gusucode.com > 声音的处理有:LPC,FFT,共振峰,频谱源码程序 > siganlandsystemusingMatlab/SSUM/xsynthesis/xsynthexpofn.m

    function xsynthfn(action)

	if nargin < 1
		action='init';
	end

	% Fix LPC
	% Linked zoom?

    name = mfilename;
    figname = [name(1:end-2) '_fig'];
    f=findobj('Tag',figname);
    handles = get(f,'UserData');

	switch action
		case 'help'
			display_help(figname);
		case 'init'
			setdefaults;
			reset(handles.sig_1_spec);
            reset(handles.sig_2_spec);
            reset(handles.sig_3_spec);
            reset(handles.sig_1_time);
            reset(handles.sig_2_time);
            reset(handles.sig_3_time);
			handles.fileopen = {false, false, false};
			set(handles.freqzoom,'Value',1);
		case 'loadsound'
			signum = handles.signum;
			handles.audio{signum} = load_audiodata;
			if ~isfield(handles.audio{signum}, 'filenamepath')
                return;
            end
			if (size(handles.audio{signum}.data,2) > 1)
                handles.audio{signum}.data = to_mono(handles.audio{signum}.data);
            end
			handles.fileopen{signum} = true;
			contents = get(handles.fftsize,'String');
			fftsize = str2double(contents{get(handles.fftsize,'Value')});
			contents = get(handles.window,'String');
			shape = contents{get(handles.window,'Value')};
			[handles.audio{signum}.spectrum,...
			 handles.audio{signum}.bin,...
			 handles.audio{signum}.st] = ...
				spectrogram(handles.audio{signum}, fftsize, shape);

			set(f,'UserData',handles);
			loadSpecPlot(handles, signum);
			loadTimePlot(handles, signum);
			linkedzoom([handles.sig_1_time, handles.sig_1_spec,...
                handles.sig_3_time, handles.sig_3_spec],'onx');
			xsynthexpofn 'freqzoom';
			if signum == 1
	            handles.xlim_orig = get(handles.sig_1_time,'XLim');
			end
		case 'loadworkspace'
			handles = loadworkspace(handles);
			set(f,'UserData',handles);
			for signum=1:3,
				if (handles.fileopen{signum})
					loadSpecPlot(handles, signum);
					loadTimePlot(handles, signum);
				end
			end
		case 'saveworkspace'
			saveworkspace(handles);
		case 'write_soundfile'
			if (handles.fileopen{3})
				save_audiodata(handles.audio{3});
			end
		case 'play'
			signum = handles.signum;
			if (handles.fileopen{signum})
				timeplot = eval(['handles.sig_' num2str(signum) '_time']);
                samples = round(get(timeplot,'XLim').*handles.audio{signum}.Fs+1);
                if samples(1) <= 0
                    samples(1) = 1;
                elseif samples(2) > length(handles.audio{signum}.data)
                    samples(2) = length(handles.audio{signum}.data);
                end
                audiodata.data = handles.audio{signum}.data(samples(1):samples(2));
                audiodata.Fs = handles.audio{signum}.Fs;
                eval(['playbutton = handles.s' num2str(signum) '_play;']);
                play_audiodata(audiodata, playbutton);
			end
		case {'fftsize','window'}
			for signum = 1:3
				if (handles.fileopen{signum})
					contents = get(handles.fftsize,'String');
					fftsize = str2double(contents{get(handles.fftsize,'Value')});
					contents = get(handles.window,'String');
					shape = contents{get(handles.window,'Value')};
					[handles.audio{signum}.spectrum,...
					 handles.audio{signum}.bin,...
					 handles.audio{signum}.st] = ...
						spectrogram(handles.audio{signum}, fftsize, shape);
					loadSpecPlot(handles, signum);
				end
			end
			xsynthexpofn 'freqzoom';
		case {'db','colormap','inverse','interpolate'}
			for signum = 1:3
				if (handles.fileopen{signum})
					eval(['timeplot = handles.sig_' num2str(signum) '_time;']);
                    xlim = get(timeplot,'XLim');
                    eval(['spectrumplot = handles.sig_' num2str(signum) '_spec;']);
                    loadSpecPlot(handles, signum);
                    set(spectrumplot,'XLim',xlim);
				end
			end
			xsynthexpofn 'freqzoom';
		case 'convolution'
			handles = convolution(handles);
			set(f,'UserData',handles);
			signum = 3;
			loadTimePlot(handles, signum);
			loadSpecPlot(handles, signum);
			xsynthexpofn 'freqzoom';
		case 'ampenv'
			handles = ampenv(handles);
			set(f,'UserData',handles);
			signum = 3;
			loadTimePlot(handles, signum);
			loadSpecPlot(handles, signum);
			xsynthexpofn 'freqzoom';
		case 'lpc'
			handles = lpcresynth(handles);
			set(f,'UserData',handles);
			signum = 3;
			loadTimePlot(handles, signum);
			loadSpecPlot(handles, signum);
			xsynthexpofn 'freqzoom';
		case 'freqzoom'
			val = get(handles.freqzoom,'Value');
			if val == 0,
				val = 0.001;
			end
			for signum = 1:3
				specplot = eval(['handles.sig_' num2str(signum) '_spec']);
				if (handles.fileopen{signum})
					Fs = handles.audio{signum}.Fs;
					set(specplot,'YLim',[0 val*Fs/2])
				end
            end
		case 'zoomreset'
            if isfield(handles,'xlim_orig')
                for signum=[1,3]
                    timeplot = eval(['handles.sig_' num2str(signum) '_time']);
                    specplot = eval(['handles.sig_' num2str(signum) '_spec']);
                    set(timeplot,'XLim',handles.xlim_orig);
                    set(specplot,'XLim',handles.xlim_orig);
                end
            end
		case 'print'
			print_figure(f);
		case 'close'
			close_figure(f,figname(1:end-4));
            return;
	end
	set(f,'UserData',handles);


% --------------------------------------------------------------------
function loadTimePlot(handles, signum)
	Fs = handles.audio{signum}.Fs;
	t = [0:1/Fs:(length(handles.audio{signum}.data)-1)/Fs];
	eval(['timeplot = handles.sig_' num2str(signum) '_time;']);
	axes(timeplot)
    plot(t,handles.audio{signum}.data)
    maxtime = length(t)/Fs;
    set(timeplot,'XLim',[0 maxtime]);
    set(timeplot,'YLim',[-1.0 1.0]);
	if (signum ~= 1)
		set(timeplot,'YTickLabel',['']);
	end
    grid;
    xlabel('time (s)');
    
% --------------------------------------------------------------------
function loadSpecPlot(handles, signum)
	Fs = handles.audio{signum}.Fs;
	eval(['spectrumplot = handles.sig_' num2str(signum) '_spec;']);
    axes(spectrumplot);
    handles.pos = get(gca,'Position'); % Save axes position
    if (get(handles.interpolate,'Value'))
        if (get(handles.dB,'Value'))
            pcolor(handles.audio{signum}.st,handles.audio{signum}.bin,...
				20*log10(abs(handles.audio{signum}.spectrum)));
        else
            pcolor(handles.audio{signum}.st,handles.audio{signum}.bin,...
				abs(handles.audio{signum}.spectrum));
        end
        shading interp;
    else
        if (get(handles.dB,'Value'))
            imagesc(handles.audio{signum}.st,handles.audio{signum}.bin,...
				20*log10(abs(handles.audio{signum}.spectrum)));
        else
            imagesc(handles.audio{signum}.st,handles.audio{signum}.bin,...
				abs(handles.audio{signum}.spectrum));
        end
    end

    axis xy;
	ylabel('Frequency (Hz)');
	if (signum ~= 1)
		ylabel('');
		set(spectrumplot,'YTickLabel',['']);
	end
	contents = get(handles.colormap,'String');
    cmap = colormap(lower(contents{get(handles.colormap,'Value')}));
	if (get(handles.inverse,'Value'))
		cmap = flipud(cmap);
	end
	colormap(cmap);
    set(spectrumplot,'XTickLabel',['']);

	info = sprintf(['Filename: %s\n Duration: %s seconds\n Fs: %s Hz'], ...
		handles.audio{signum}.filenamepath, ...
		num2str(length(handles.audio{signum}.data)/Fs), ...
		num2str(Fs));

	eval(['set(handles.text' num2str(signum) ',''String'', info);']);


% --------------------------------------------------------------------
function handles = convolution(handles)
	if (handles.fileopen{1} & handles.fileopen{2})
		signal_1 = handles.audio{1}.data;
		signal_2 = handles.audio{2}.data;
		% Slow brute-force convolution
		% signal_3 = conv(signal_1,signal_2);
		% Fast overlap add convolution!!!
        % Find which is longest one:
		if (length(signal_1) > length(signal_2))
			signal_1 = [signal_1; zeros(length(signal_2),1)];
		    audiodata = fftfilt(signal_2,signal_1);
		else
			signal_2 = [signal_2; zeros(length(signal_1),1)];
		    audiodata = fftfilt(signal_1,signal_2);
		end
		filename = 'Convolution';
		Fs = max(handles.audio{1}.Fs,handles.audio{2}.Fs);
		handles.audio{3}.filenamepath = filename;
		handles.audio{3}.Fs = Fs;
        if (max(abs(audiodata)) > 1.0)
            audiodata = normalize(audiodata);
        end
		handles.audio{3}.data = audiodata;
		signum = 3;
		contents = get(handles.fftsize,'String');
		fftsize = str2double(contents{get(handles.fftsize,'Value')});
		contents = get(handles.window,'String');
		shape = contents{get(handles.window,'Value')};
		[handles.audio{signum}.spectrum,...
		 handles.audio{signum}.bin,...
		 handles.audio{signum}.st] = ...
			spectrogram(handles.audio{signum}, fftsize, shape);
		handles.fileopen{3} = true;
	else
		warndlg('You must open two files to perform cross-synthesis!',...
			'Error');
	end


% --------------------------------------------------------------------
function handles = ampenv(handles)
	if (handles.fileopen{1} & handles.fileopen{2})

		% Make amplitude envelope of signal_1
		window_size = 512;
		window_skip = window_size;
		amp_env = makeampenv(handles.audio{1}.data, length(handles.audio{2}.data), ...
			window_size, window_skip);

		% Now multiply
		audiodata = amp_env'.*handles.audio{2}.data;
		filename = 'amplitude';
		Fs = max(handles.audio{1}.Fs,handles.audio{2}.Fs);
		handles.audio{3}.filenamepath = filename;
		handles.audio{3}.Fs = Fs;
		% Normalize
        if (max(abs(audiodata)) > 1.0)
            audiodata = normalize(audiodata);
        end
		handles.audio{3}.data = audiodata;
		signum = 3;
		contents = get(handles.fftsize,'String');
		fftsize = str2double(contents{get(handles.fftsize,'Value')});
		contents = get(handles.window,'String');
		shape = contents{get(handles.window,'Value')};
		[handles.audio{signum}.spectrum,...
		 handles.audio{signum}.bin,...
		 handles.audio{signum}.st] = ...
			spectrogram(handles.audio{signum}, fftsize, shape);
		handles.fileopen{3} = true;
	else
		warndlg('You must open two files to perform cross-synthesis!',...
			'Error');
	end


% --------------------------------------------------------------------
function handles = lpcresynth(handles)
	if (handles.fileopen{1} & handles.fileopen{2})
		signal_1 = handles.audio{1}.data;
		signal_2 = handles.audio{2}.data;
		% LPC analysis of signal_1.
		[a,g,e] = lpcfit(signal_1,12);
		% Model excited by signal_2.
		audiodata = lpcsynth(a,g,signal_2,128);
		if (length(audiodata) > length(signal_1))
			audiodata = audiodata(1:length(signal_1));
		end
		filename = 'LPC';

		Fs = max(handles.audio{1}.Fs,handles.audio{2}.Fs);
		handles.audio{3}.t = [0:1/Fs:(length(audiodata)-1)/Fs];
		handles.audio{3}.filenamepath = filename;
		handles.audio{3}.Fs = Fs;
		% Normalize
        if (max(abs(audiodata)) > 1.0)
            audiodata = audiodata./max(abs(audiodata)+0.1);
        end
		handles.audio{3}.data = audiodata;
		signum = 3;
		contents = get(handles.fftsize,'String');
		fftsize = str2double(contents{get(handles.fftsize,'Value')});
		contents = get(handles.window,'String');
		shape = contents{get(handles.window,'Value')};
		[handles.audio{signum}.spectrum,...
		 handles.audio{signum}.bin,...
		 handles.audio{signum}.st] = ...
			spectrogram(handles.audio{signum}, fftsize, shape);
		handles.fileopen{3} = true;
	else
		warndlg('You must open two files to perform cross-synthesis!',...
			'Error');
	end


% --------------------------------------------------------------------
function handles = loadworkspace(handles)
	[file, path] = uigetfile({'*.mat','MATLAB datafile'}, 'Open');
	load([path file]);
	set(handles.colormap,'Value',graphprops{1});
	set(handles.window,'Value',graphprops{2});
	set(handles.fftsize,'Value',graphprops{3});
	set(handles.inverse,'Value',graphprops{4});
	set(handles.dB,'Value',graphprops{5});
	set(handles.interpolate,'Value',graphprops{6});
	for signum=1:3,
		if (openfiles{signum})
			handles.fileopen{signum} = true;
			handles.audio{signum} = audio{signum};
		end
	end


% --------------------------------------------------------------------
function saveworkspace(handles)
	[file, path] = uiputfile({'*.mat','MATLAB datafile'}, 'Save as');
    % Save these variables
	audio = handles.audio;
	openfiles = handles.fileopen;
	graphprops = {get(handles.colormap,'Value'), ...
				get(handles.window,'Value'),...
				get(handles.fftsize,'Value'),...
				get(handles.inverse,'Value'),...
				get(handles.dB,'Value'),...
				get(handles.interpolate,'Value')};
    save([path file], 'audio','openfiles','graphprops');