# Input the data. > x <- c(10,14,2,10) > y <- c(82,94,50,70) # The "lm" command does simple linear regression. The "y~x" # argument specifies the model "yhat = beta0hat + beta1hat x". # We store the regression results in the object "study.results". > study.results <- lm(y~x) # Lets check out the results. > summary(study.results) Call: lm(formula = y ~ x) Residuals: 1 2 3 4 4.421 2.105 1.053 -7.579 Coefficients: Estimate Std. Error t value Pr(>|t|) (Intercept) 41.7895 7.3684 5.671 0.0297 x 3.5789 0.7368 4.857 0.0399 Residual standard error: 6.424 on 2 degrees of freedom Multiple R-squared: 0.9219, Adjusted R-squared: 0.8828 F-statistic: 23.59 on 1 and 2 DF, p-value: 0.03987 # Analysis of Variance (ANOVA) table summarizing regression. > anova(study.results) Analysis of Variance Table Response: y Df Sum Sq Mean Sq F value Pr(>F) x 1 973.47 973.47 23.592 0.03987 Residuals 2 82.53 41.26 # We want to get predictions and confidence intervals at # x=4 and 11. The "fit" column specifies the yhat values, # and the "lwr" and "upr" specify the CI. > predict(study.results, list(x=c(4,11)), interval="confidence", se.fit=TRUE) $fit fit lwr upr 1 56.10526 35.07537 77.13516 2 81.15789 65.95331 96.36248 $se.fit [1] 4.89 3.53