In [1]:
# 导入写好的似然函数 import sys sys.path.append('../likelihood_path/') from likelihood import LCDM #导入多线程 import os os.environ["OMP_NUM_THREADS"] = "1" from multiprocessing import Pool #导入numpy和emcee import numpy as np import emcee #设置输出清晰图像 %config InlineBackend.figure_format = 'retina' %matplotlib inline
In [2]:
#定义完整的对数概率函数 def LCDM_loglike(theta,prioru,priord): flagup = theta>np.array(prioru) flagdown = theta<np.array(priord) if np.logical_or(flagup, flagdown).any(): return -np.inf H0, Magnitude, Omegam = theta loglike = LCDM(H0, Magnitude, Omegam).xx return loglike
In [3]:
def run_mcmc(params_list, nsteps=1000): """ Runs MCMC using EMCEE Python module and returns a dictionary of parameter samples Arguments: params_list -- list of lists containing name, initial value, prior and label information nsteps -- number of MCMC steps (default 1000) Returns: samples_dict -- dictionary of parameter samples containing the following keys: 'names' -- list of parameter names 'labels' -- list of parameter labels 'samples' -- array of parameter sample values 'chi2' -- minimum chi2 value """ n_params = len(params_list) priord = [p[2][0] for p in params_list] prioru = [p[2][1] for p in params_list] inits = [p[1] for p in params_list] nwalkers = n_params*3 ndim = n_params pos = inits + 0.01 * np.random.randn(nwalkers, ndim) sampler = emcee.EnsembleSampler(nwalkers, ndim, LCDM_loglike,args=(prioru,priord),pool=Pool()) sampler.run_mcmc(pos, nsteps, progress=True) tau = sampler.get_autocorr_time() burnin = int(2 * np.max(tau)) thin = int(0.5 * np.min(tau)) samples = sampler.get_chain(discard=burnin, thin=thin, flat=True) loglikevalues = sampler.get_log_prob(discard=burnin, thin=thin, flat=True) names, _, _,labels = zip(*params_list) samples_dict = { 'names': names, 'labels': labels, 'samples': samples, 'chi2': -2*max(loglikevalues) } return samples_dict
In [4]:
#输入参数列表 params_list = [ ['H0', 70, (60, 80), 'H_0'], ['Mag',-19, (-20, -18), 'M'], ['omegam',0.3, (0, 1), '\Omega_m'] ]
In [5]:
#运行MCMC并返回参数样本字典 samples_dict = run_mcmc(params_list, nsteps=5000)
100%|███████████████████████| 5000/5000 [01:03<00:00, 78.49it/s]
In [6]:
#保存 np.save('../Chains/LCDM.npy', samples_dict)
In [7]:
load_dict = np.load('../Chains/LCDM.npy',allow_pickle=True).item()
In [8]:
from getdist import plots,MCSamples samples = MCSamples(samples=load_dict['samples'], names=load_dict['names'], labels=load_dict['labels']) p=samples.getParams() q0=3/2*p.omegam-1 samples.addDerived(q0,name="q0",label="q_0") g = plots.get_subplot_plotter() g.triangle_plot(samples, filled=True)#title_limit=1 plt.savefig('contour.pdf', dpi=1200, bbox_inches='tight')
Removed 0.3 as burn in
In [9]:
print(samples.getMeans(),samples.sddev,load_dict['chi2'])
[ 73.43026861 -19.2482022 0.33267774 -0.50098339] [1.01714236 0.02965232 0.01813599 0.02720399] 1522.9870255193334
Smilie Vote is loading.
更精确地求最小chi2:
import scipy.optimize as op
def chi2(theta):
…
def optimization(init,method):
fun=lambda theta:chi2
res=op.minimize(chi2,init,method=method)
return(res)
result=optimization([70,-19,.3],’SLSQP’)