Vous êtes sur la page 1sur 5

Projet sur le modele lineaire generalise

Mahamoud Ibrahim Abdek


09 Decembre 2018

> # Les observations


> Dose=c(1.6907, 1.7242, 1.7552, 1.7842, 1.8113, 1.8369, 1.8610, 1.8839)
> Dose

[1] 1.6907 1.7242 1.7552 1.7842 1.8113 1.8369 1.8610 1.8839

> NScarab=c(59,60,62,56,63,59,62,60)
> NScarab

[1] 59 60 62 56 63 59 62 60

> NTouch =c(6 ,13,18,28,52,53,61,60)


> NTouch

[1] 6 13 18 28 52 53 61 60

> # Les notations acceptées dans ce programme


>
> X=Dose
> Y=NTouch
> n=NScarab
> p=Y/n
> N=length(Y)
> # introduce the XX matrix
> X1=rep(1,N)
> X2=X
> m=matrix(c(X1,X2),ncol=2)
> #XX.hat = t(XX)%*%XX
>
> # la fonction lien
> # les fonctions logistique et logit
>
>
> logit =function(p) log(p/(1-p))
> logistique=function(x) (exp(x)/(1+exp(x)))

1
> # Visualiser les observations
>
>
> plot(X,p,ylim=c(0,1))
> # On trace une droite quelquonque
> a=4.8
> b=0.15
> Ylin=a*(X-1.7)+b
> lines(X,Ylin,col="red")
> betaline= p%*%m %*% solve(t(m)%*%m)
> predline= betaline %*% t(m)
> lines(X,predline,col="blue")
> # On trace un ajoustement non-linéaire quelquonque
>
> lines(X, logistique( 40.2 * X - 71 ),col="green")
>
1.0
0.8
0.6
p

0.4
0.2
0.0

1.70 1.75 1.80 1.85

1 Modèle GLM avec réponce binomiale link=logit


fonction de lien probit
> REZ=glm(cbind(Y,n-Y) ~ 1 + X, family = binomial(link=probit))
> plot(X,p,ylim=c(0,1))
> REZ=glm(cbind(Y,n-Y) ~ 1 + X, family = binomial(link=logit))
> # Analyser les résultats
>

2
> REZ=glm(cbind(Y,n-Y) ~ X, family = binomial(link=logit))
> summary(REZ)

Call:
glm(formula = cbind(Y, n - Y) ~ X, family = binomial(link = logit))

Deviance Residuals:
Min 1Q Median 3Q Max
-1.5941 -0.3944 0.8329 1.2592 1.5940

Coefficients:
Estimate Std. Error z value Pr(>|z|)
(Intercept) -60.717 5.181 -11.72 <2e-16 ***
X 34.270 2.912 11.77 <2e-16 ***
---
Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

(Dispersion parameter for binomial family taken to be 1)

Null deviance: 284.202 on 7 degrees of freedom


Residual deviance: 11.232 on 6 degrees of freedom
AIC: 41.43

Number of Fisher Scoring iterations: 4

> names(REZ)

[1] "coefficients" "residuals" "fitted.values"


[4] "effects" "R" "rank"
[7] "qr" "family" "linear.predictors"
[10] "deviance" "aic" "null.deviance"
[13] "iter" "weights" "prior.weights"
[16] "df.residual" "df.null" "y"
[19] "converged" "boundary" "model"
[22] "call" "formula" "terms"
[25] "data" "offset" "control"
[28] "method" "contrasts" "xlevels"

> z1=REZ$coefficients[1]/5.181
> # tester l'hypothese H0: beta1=Intercept = 0
>
> alpha=0.95
> CritVal = qnorm( (alpha +1)/2 )
> abs(z1) <= CritVal

(Intercept)
FALSE

3
> # on recupère les estimateurs de beta
>
> beta.est = as.vector(REZ$coefficients)
> beta.est

[1] -60.71745 34.27033

> beta1 = beta.est[1]


> beta2 = beta.est[2]
> eta = beta1 + beta2 * X
> pest = exp(eta)/(1+exp(eta))
> lines(X,pest, col="red")
> xx=seq(1.6,2,len=100)
> yy=logistique(beta1+beta2*xx)
> lines(xx,yy,col="magenta")
> # pediction des parties linéaires
> predict(REZ) #

1 2 3 4 5 6 7
-2.7766148 -1.6285589 -0.5661788 0.4276606 1.3563864 2.2337068 3.0596216
8
3.8444121

> predict.glm(REZ)

1 2 3 4 5 6 7
-2.7766148 -1.6285589 -0.5661788 0.4276606 1.3563864 2.2337068 3.0596216
8
3.8444121

> # pediction des proportions p_i


>
> predict.glm(REZ,type="link")

1 2 3 4 5 6 7
-2.7766148 -1.6285589 -0.5661788 0.4276606 1.3563864 2.2337068 3.0596216
8
3.8444121

> predict(REZ,type="link")

1 2 3 4 5 6 7
-2.7766148 -1.6285589 -0.5661788 0.4276606 1.3563864 2.2337068 3.0596216
8
3.8444121

4
> predict.glm(REZ,type="response")

1 2 3 4 5 6 7
0.05860103 0.16402787 0.36211901 0.60531491 0.79517177 0.90323582 0.95519611
8
0.97904934

> predict(REZ,type="response")

1 2 3 4 5 6 7
0.05860103 0.16402787 0.36211901 0.60531491 0.79517177 0.90323582 0.95519611
8
0.97904934

> pest

[1] 0.05860103 0.16402787 0.36211901 0.60531491 0.79517177 0.90323582 0.95519611


[8] 0.97904934

> yy = predict.glm(REZ, newdata = as.data.frame(X),type="response")


> M=20
> beta=matrix(NA,nrow=2,ncol=20)
> beta[,1] =c(0,0)
>
1.0
0.8
0.6
p

0.4
0.2
0.0

1.70 1.75 1.80 1.85

Vous aimerez peut-être aussi