A Quick Demo

The demo below was “borrowed” from https://www.rdocumentation.org/packages/MethComp/versions/1.30.0/topics/Deming.

  ### Install or load the MethComp package
  p_load(MethComp)

  ### Create the 'True' values 
  M <- runif(100,0,5)
  ### Add error to get the observed Measurements
  x <- M + rnorm(100, sd=1)
  y <- 2 + 3 * M + rnorm(100,sd=2)
  
  ### Deming regression with equal variances (lambda=1) and variance ratio 2 (lambda=2)
  print("Deming with lambda=1")
## [1] "Deming with lambda=1"
  Deming(x,y)
## Intercept     Slope   sigma.x   sigma.y 
## 0.2779725 3.7795841 1.1728933 1.1728933
  print("Deming with lambda=2")
## [1] "Deming with lambda=2"
  Deming(x,y,vr=2)
## Intercept     Slope   sigma.x   sigma.y 
## 0.7878712 3.5786397 1.1344922 1.6044142
  ### Bootstrap the values
  print("Deming with lambda=1 and bootstrap")
## [1] "Deming with lambda=1 and bootstrap"
  Deming(x,y,boot=TRUE)
## y  = alpha + beta* x
##            Estimate S.e.(boot)      50%       2.5%    97.5%
## Intercept 0.2779725 1.09806363 0.308637 -2.3602776 1.908483
## Slope     3.7795841 0.37777012 3.780435  3.2462964 4.689469
## sigma.x   1.1728933 0.08847811 1.158598  0.9718944 1.312712
## sigma.y   1.1728933 0.08847811 1.158598  0.9718944 1.312712
  print("Bootstrap with saved values")
## [1] "Bootstrap with saved values"
  bb <- Deming(x,y,boot=TRUE,keep.boot=TRUE)
##            Estimate S.e.(boot)       50%       2.5%    97.5%
## Intercept 0.2779725 1.07153103 0.3329016 -2.4646799 1.872697
## Slope     3.7795841 0.37168094 3.7746125  3.1994811 4.701918
## sigma.x   1.1728933 0.09235565 1.1551580  0.9817681 1.334027
## sigma.y   1.1728933 0.09235565 1.1551580  0.9817681 1.334027
  str(bb)
##  num [1:1000, 1:4] -1.911 0.536 0.219 0.286 -0.304 ...
##  - attr(*, "dimnames")=List of 2
##   ..$ : NULL
##   ..$ : chr [1:4] "Intercept" "Slope" "sigma.x" "sigma.y"
  ### Plot data with the two classical regression lines
  plot(x,y)
  abline(lm(y~x), col="green")
  ir <- coef(lm(x~y))
  abline(-ir[1]/ir[2],1/ir[2], col="orange")
  abline(Deming(x,y,sdr=2)[1:2],col="red")
  abline(Deming(x,y,sdr=10)[1:2],col="blue")

  ### Comparing classical regression and "Deming extreme"
  print("OLS")
## [1] "OLS"
  summary(lm(y~x))
## 
## Call:
## lm(formula = y ~ x)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -10.9249  -2.5712   0.2958   2.1817   6.9821 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)   4.7449     0.6188   7.668 1.29e-11 ***
## x             2.0192     0.2025   9.969  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 3.446 on 98 degrees of freedom
## Multiple R-squared:  0.5035, Adjusted R-squared:  0.4985 
## F-statistic: 99.39 on 1 and 98 DF,  p-value: < 2.2e-16
  print("Deming with lambda=1000000")
## [1] "Deming with lambda=1000000"
  Deming(x,y,vr=1000000)
##  Intercept      Slope    sigma.x    sigma.y 
## 4.74485276 2.01924495 0.00344596 3.44595965

Another discussion of Deming regression is at https://www.r-bloggers.com/2015/09/deming-and-passing-bablok-regression-in-r/.

set.seed(20)
x <- runif(100,0,100)
y <- 1.10*x - 0.001*x^2 + rnorm(100,0,1)*(2 + 0.05*x) + 15

plot(x,y, main = "Regression Comparison", xlab = "Current Method", ylab = "New Method")

plot(x,y, main = "Regression Comparison", xlab = "Current Method", ylab = "New Method")
lin.reg <- lm(y~x)
abline(lin.reg, col="blue")

p_load(mcr)
dem.reg <- mcreg(x,y, method.reg = "Deming")

str(dem.reg)
## Formal class 'MCResultResampling' [package "mcr"] with 21 slots
##   ..@ glob.coef  : num [1:2] 15.58 1.04
##   ..@ glob.sigma : num [1:2] 0.8165 0.0147
##   ..@ xmean      : num 46.8
##   ..@ nsamples   : int 999
##   ..@ nnested    : num 25
##   ..@ B0         : num [1:999] 15.9 15.8 15.5 15.8 16.5 ...
##   ..@ B1         : num [1:999] 1.04 1.03 1.03 1.04 1.02 ...
##   ..@ sigmaB0    : num [1:999] 0.703 0.799 0.779 0.87 0.874 ...
##   ..@ sigmaB1    : num [1:999] 0.0127 0.0139 0.0135 0.0153 0.0152 ...
##   ..@ MX         : num [1:999] 46.8 48.6 49.1 47.6 49.2 ...
##   ..@ bootcimeth : chr "quantile"
##   ..@ rng.seed   : num NA
##   ..@ rng.kind   : chr NA
##   ..@ data       :'data.frame':  100 obs. of  3 variables:
##   .. ..$ sid: chr [1:100] "S1" "S2" "S3" "S4" ...
##   .. ..$ x  : num [1:100] 87.8 76.9 27.9 52.9 96.3 ...
##   .. ..$ y  : num [1:100] 110.8 93.5 45.6 76.6 116.6 ...
##   ..@ para       : num [1:2, 1:4] 15.58 1.04 NA NA 14.42 ...
##   .. ..- attr(*, "dimnames")=List of 2
##   .. .. ..$ : chr [1:2] "Intercept" "Slope"
##   .. .. ..$ : chr [1:4] "EST" "SE" "LCI" "UCI"
##   ..@ mnames     : chr [1:2] "Method1" "Method2"
##   ..@ regmeth    : chr "Deming"
##   ..@ cimeth     : chr "bootstrap"
##   ..@ error.ratio: num 1
##   ..@ alpha      : num 0.05
##   ..@ weight     : Named num [1:100] 1 1 1 1 1 1 1 1 1 1 ...
##   .. ..- attr(*, "names")= chr [1:100] "S1" "S2" "S3" "S4" ...
dem.reg@para
##                EST SE       LCI       UCI
## Intercept 15.57790 NA 14.415158 16.817795
## Slope      1.03658 NA  1.007993  1.068619
plot(x,y, main = "Regression Comparison", xlab = "Current Method", ylab = "New Method")
abline(dem.reg@para[1:2], col = "blue")

mcreg(x,y, method.reg = "Deming", error.ratio = 1.2)@para
##                 EST SE       LCI       UCI
## Intercept 15.534921 NA 14.375421 16.798091
## Slope      1.037499 NA  1.006823  1.066961
w.dem.reg <- mcreg(x,y, method.reg = "WDeming")
## The global.sigma is calculated with Linnet's method
w.dem.reg@para
##                 EST SE       LCI       UCI
## Intercept 13.788450 NA 12.852150 14.842971
## Slope      1.088119 NA  1.056867  1.119852
plot(x,y, main = "Regression Comparison", xlab = "Current Method", ylab = "New Method")
abline(dem.reg@para[1:2], col = "blue")
abline(w.dem.reg@para[1:2], col = "green")
legend("topleft", c("Deming","Weighted Deming"), lty=c(1,1), col = c("blue","green"))

PB.reg <- mcreg(x,y, method.reg = "PaBa")
PB.reg@para
##                 EST SE       LCI       UCI
## Intercept 14.684463 NA 13.671919 16.581452
## Slope      1.046021 NA  1.011216  1.075311
x <- 1:20
y <- c(1:19,10) + rnorm(20,0,0.5)

plot(PB.reg)

MCResult.plot(PB.reg, equal.axis = TRUE, x.lab = "x method", y.lab = "y method", points.col = "#FF7F5060", points.pch = 19, ci.area = TRUE, ci.area.col = "#0000FF50", main = "My Passing Bablok Regression", sub = "", add.grid = FALSE, points.cex = 1)

RFID-like Data

Now try some rfid type data.

  p_load(mcr)

  nreps <- 50
  ntrials <- 10
  x <- rep(rep(1:3,nreps),ntrials) 
  d <- x + rnorm(rep(seq(-0.045,0.045,length.out=ntrials),nreps), sd=0.01)
  trial <- rep(1:(3*ntrials),nreps)
  samp <- rep(1:nreps,3*ntrials)
  rfid <- 1 * rep(rep(1:3,nreps),ntrials) + rnorm(3*ntrials*nreps, sd=0.1)
  
  lm.rfid.d <- lm(rfid~d)
  summary(lm.rfid.d)
## 
## Call:
## lm(formula = rfid ~ d)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.36701 -0.06829  0.00132  0.06472  0.30086 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -0.005553   0.006802  -0.816    0.414    
## d            1.001538   0.003149 318.080   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.09958 on 1498 degrees of freedom
## Multiple R-squared:  0.9854, Adjusted R-squared:  0.9854 
## F-statistic: 1.012e+05 on 1 and 1498 DF,  p-value: < 2.2e-16
  betahat <- coef(lm.rfid.d)
  lm.d.rfid <- lm(d~rfid)
  summary(lm.d.rfid)
## 
## Call:
## lm(formula = d ~ rfid)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.29644 -0.06494 -0.00077  0.06792  0.34662 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 0.034646   0.006684   5.183 2.48e-07 ***
## rfid        0.983897   0.003093 318.080  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.0987 on 1498 degrees of freedom
## Multiple R-squared:  0.9854, Adjusted R-squared:  0.9854 
## F-statistic: 1.012e+05 on 1 and 1498 DF,  p-value: < 2.2e-16
  b <- coef(lm.d.rfid)
  b[1] <- -b[1]/b[2]
  b[2] <- 1/b[2]
  names(b)[2] <- "d"
  b
## (Intercept)           d 
## -0.03521293  1.01636699
  betahat
##  (Intercept)            d 
## -0.005553211  1.001538143
  lm.rfid.d.trial <- lm(rfid~d*trial)
  summary(lm.rfid.d.trial)
## 
## Call:
## lm(formula = rfid ~ d * trial)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.36374 -0.06770  0.00059  0.06499  0.30585 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -1.079e-02  1.359e-02  -0.794    0.427    
## d            1.000e+00  6.473e-03 154.565   <2e-16 ***
## trial        3.975e-04  7.876e-04   0.505    0.614    
## d:trial      3.894e-05  3.644e-04   0.107    0.915    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.09956 on 1496 degrees of freedom
## Multiple R-squared:  0.9854, Adjusted R-squared:  0.9854 
## F-statistic: 3.374e+04 on 3 and 1496 DF,  p-value: < 2.2e-16
  x <- data.frame(d=seq(0,10,by=0.1))
  rfid.hat <- predict(lm.rfid.d, newdata=x)
  plot(x$d,rfid.hat, type="l", col="red")
  abline(0, 1, col="black")

  PB.reg <- mcreg(rfid, d, method.reg = "PaBaLarge")
  PB.reg@para
##                 EST SE       LCI       UCI
## Intercept 0.1208764 NA 0.1070690 0.1355024
## Slope     0.9402870 NA 0.9342067 0.9466839
  MCResult.plot(PB.reg, equal.axis = TRUE, x.lab = "x method", y.lab = "y method", points.col = "#FF7F5060", points.pch = 19, ci.area = TRUE, ci.area.col = "#0000FF50", main = "My Passing Bablok Regression", sub = "", add.grid = FALSE, points.cex = 1)