In [2]:
from scipy.optimize import curve_fit
import numpy as np

from matplotlib import pyplot as plt
%matplotlib inline
In [3]:
def linearFunc(x,intercept,slope):
    y = intercept + slope * x
    return y
In [4]:
xdata = np.array([579, 577, 546.1, 435.8, 404.7])
ydata = np.array([578.7, 576.3, 545.5, 435.5, 404.7])
ydata2 = np.array([578, 576, 545, 435.4, 404.5])
sigma_y = np.array([1, 1, 1, 1, 1])
In [5]:
a_fit,cov=curve_fit(linearFunc,xdata,ydata,sigma=sigma_y)
In [6]:
a_fit2,cov2=curve_fit(linearFunc,xdata,ydata2,sigma=sigma_y)
In [8]:
inter = a_fit[0]
slope = a_fit[1]
d_inter = np.sqrt(cov[0][0])
d_slope = np.sqrt(cov[1][1])
In [9]:
inter2 = a_fit2[0]
slope2 = a_fit2[1]
d_inter2 = np.sqrt(cov2[0][0])
d_slope2 = np.sqrt(cov2[1][1])
In [12]:
plt.figure(figsize=(12,8))
plt.errorbar(xdata,ydata,yerr=sigma_y,fmt='r.',label='Data1')
plt.errorbar(xdata,ydata2,yerr=sigma_y,fmt='b.',label='Data2')


# Compute a best fit line from the fit intercept and slope.
yfit = inter + slope*xdata

yfit2 = inter2 + slope2*xdata


# Create a graph of the fit to the data. We just use the ordinary plot
# command for this.
plt.plot(xdata,yfit,label=str('Fit1, y = %.3f (knob) + %.3f' % (slope, inter)))
plt.plot(xdata,yfit2,label=str('Fit2, y = %.3f (knob) + %.3f' % (slope2, inter2)))


# Display a legend, label the x and y axes and title the graph.
plt.legend()
plt.xlabel('Known Wavelength (nm)',fontsize=16)
plt.ylabel('Dial measurement (nm)',fontsize=16)
Out[12]:
Text(0, 0.5, 'Dial measurement (nm)')
In [21]:
print('Fit1')
print(f'The slope = {slope}, with uncertainty {d_slope}')
print(f'The intercept = {inter}, with uncertainty {d_inter}')
Fit1
The slope = 0.9974273274123038, with uncertainty 0.0012558490265354887
The intercept = 0.9282554690792497, with uncertainty 0.6452914621011211
In [22]:
print('Fit2')
print(f'The slope = {slope2}, with uncertainty {d_slope2}')
print(f'The intercept = {inter2}, with uncertainty {d_inter2}')
Fit2
The slope = 0.9951928781779457, with uncertainty 0.0007429145984210686
The intercept = 1.7045175874802738, with uncertainty 0.38173027453951736
In [24]:
print('Fit1')
residuals = ydata- linearFunc(xdata, *a_fit)
ss_res = np.sum(residuals**2)
ss_tot = np.sum((ydata-np.mean(ydata))**2)
r_squared = 1 - (ss_res / ss_tot)
print('R^2 is: ', r_squared)
Fit1
R^2 is:  0.9999952439836517
In [23]:
print('Fit2')
residuals2 = ydata2- linearFunc(xdata, *a_fit2)
ss_res2 = np.sum(residuals2**2)
ss_tot2 = np.sum((ydata2-np.mean(ydata2))**2)
r_squared2 = 1 - (ss_res2 / ss_tot2)
print('R^2 is: ', r_squared2)
Fit2
R^2 is:  0.9999983282268557
In [20]:
chisqr = sum((ydata-linearFunc(xdata,inter,slope))**2/sigma_y**2)
dof = len(ydata) - 2
chisqr_red = chisqr/dof
print(f'Reduced chi^2 = {chisqr_red}')
Reduced chi^2 = 0.04279887112765884
In [ ]: