mon master2 ISIFAR
Vous souhaitez réagir à ce message ? Créez un compte en quelques clics ou connectez-vous pour continuer.
mon master2 ISIFAR

ISIFAR
 
AccueilRechercherDernières imagesS'enregistrerConnexion
Le Deal du moment : -29%
PC portable – MEDION 15,6″ FHD Intel i7 ...
Voir le deal
499.99 €

 

 tp1

Aller en bas 
AuteurMessage
Admin
Admin



Nombre de messages : 418
Date d'inscription : 27/09/2005

tp1 Empty
MessageSujet: tp1   tp1 Icon_minitimeMar 15 Nov à 14:01

# generate data

# first generate means for each class
nmix <- 10 # number of mixture components
x <- rnorm(nmix,0,1)
y <- rnorm(nmix,1,1)
means_1 = rbind(x,y)
x <- rnorm(nmix,1,1)
y <- rnorm(nmix,0,1)
means_0 = rbind(x,y)

# generate n examples
prop <- array(1,c(1,nmix)) # met 1 dans un tableau de 1 ligne nmix colonnes
prop <- prop/nmix
n <- 100

# first generate which mixture components
m_0 = sample(1:nmix, n, replace = TRUE, prob = prop) #echantillon de n nombres entre 1 et nmix de proba prop
m_1 = sample(1:nmix, n, replace = TRUE, prob = prop)
x_0 <- array(0,c(2,n)) # met 0 dans un tableau de 2 ligne n colonnes
x_1 <- array(0,c(2,n))

# assigns means
#classe 0 et classe 1 pour x_0 et x_1 (abscisses [1,] et ordonnees[2,])
for ( i in 1:n )
{
x_0[,i]=means_0[,m_0[i]]
x_1[,i]=means_1[,m_1[i]]
}
x_0[1,] = x_0[1,] + rnorm(n,0,1/5) #pour faile la loi normale N(mk,I/5)
x_0[2,] = x_0[2,] + rnorm(n,0,1/5)
x_1[1,] = x_1[1,] + rnorm(n,0,1/5)
x_1[2,] = x_1[2,] + rnorm(n,0,1/5)

#genere aleatoirement deux classes de 100 points qui suivent une normale de #moyenne x_0 pour la classe 1 et x_1 pour la classe2
# on obtient 100 points aleatoires dans la classe 1 et 2


# plot the data
x <- cbind(x_0,x_1);
# colle "l'un a cote de l'autre" x_0 et x_1


plot(x[1,],x[2,])
points(x_0[1,],x_0[2,],col="blue")
points(x_1[1,],x_1[2,],col="red")



# linear regression
x <- cbind(x_0,x_1);
y <- cbind(array(-1,c(1,n)),array(1,c(1,n)));
mu_x <- c( mean(x[1,]),mean(x[2,]))
# mean(x[1,]) donne la moyenne de la classe1
mu_y <- mean(y)
var_xx <- var(t(x))
var_xy <- c( var(x[1,],t(y)), var(x[2,],t(y)) )

w <- solve(var_xx,var_xy)
b <- mu_y - sum( w * mu_x)

a1=-5;
a2=5;
lines( c( a1,a2 ),c( ( - b - w[1] * a1 ) / w[2] , ( - b - w[1] * a2 ) / w[2] ),type='l')
Revenir en haut Aller en bas
https://mastertwo.jeun.fr
Admin
Admin



Nombre de messages : 418
Date d'inscription : 27/09/2005

tp1 Empty
MessageSujet: Re: tp1   tp1 Icon_minitimeMar 15 Nov à 14:16

# generate data

# first generate means for each class
nmix <- 5000 # number of mixture components
x <- rnorm(nmix,0,1)
y <- rnorm(nmix,1,1)
means_1 = rbind(x,y)
x <- rnorm(nmix,1,1)
y <- rnorm(nmix,0,1)
means_0 = rbind(x,y)

# generate ntrain examples for training
prop <- array(1,c(1,nmix))
prop <- prop/nmix
ntrain = 100
n <- ntrain

# first generate which mixture components
m_0 = sample(1:nmix, n, replace = TRUE, prob = prop)
m_1 = sample(1:nmix, n, replace = TRUE, prob = prop)
x_0 <- array(0,c(2,n))
x_1 <- array(0,c(2,n))

# assigns means
for ( i in 1:n )
{
x_0[,i]=means_0[,m_0[i]]
x_1[,i]=means_1[,m_1[i]]
}
x_0[1,] = x_0[1,] + rnorm(n,0,1/5)
x_0[2,] = x_0[2,] + rnorm(n,0,1/5)
x_1[1,] = x_1[1,] + rnorm(n,0,1/5)
x_1[2,] = x_1[2,] + rnorm(n,0,1/5)

xtrain <- cbind(x_0,x_1);
ytrain <- cbind(array(-1,c(1,n)),array(1,c(1,n)));





# generate ntrain examples for training
prop <- array(1,c(1,nmix))
prop <- prop/nmix
ntest = 500
n <- ntest

# first generate which mixture components
m_0 = sample(1:nmix, n, replace = TRUE, prob = prop)
m_1 = sample(1:nmix, n, replace = TRUE, prob = prop)
x_0 <- array(0,c(2,n))
x_1 <- array(0,c(2,n))

# assigns means
for ( i in 1:n )
{
x_0[,i]=means_0[,m_0[i]]
x_1[,i]=means_1[,m_1[i]]
}
x_0[1,] = x_0[1,] + rnorm(n,0,1/5)
x_0[2,] = x_0[2,] + rnorm(n,0,1/5)
x_1[1,] = x_1[1,] + rnorm(n,0,1/5)
x_1[2,] = x_1[2,] + rnorm(n,0,1/5)

xtest <- cbind(x_0,x_1);
ytest <- cbind(array(-1,c(1,n)),array(1,c(1,n)));



# linear regression
x <- xtrain
y <- ytrain

mu_x <- c( mean(x[1,]),mean(x[2,]))
mu_y <- mean(y)
var_xx <- var(t(x))
var_xy <- c( var(x[1,],t(y)), var(x[2,],t(y)) )

w <- solve(var_xx,var_xy)
b <- mu_y - sum( w * mu_x)


# test on test data
test_error_linear_regression = mean(abs(sign(xtest[1,]*w[1] + xtest[2,]*w[2] + b)-ytest))/2
test_error_linear_regression

train_error_linear_regression = mean(abs(sign(xtrain[1,]*w[1] + xtrain[2,]*w[2] + b)-ytrain))/2
train_error_linear_regression




# nearest neighbors
ks=1:20

test_error_knn <- array(0,c(1,length(ks)))
train_error_knn <- array(0,c(1,length(ks)))

for (i in 1:length(ks) )
{
k <- ks[i]
rs_knn <- knn(t(xtrain), t(xtest), ytrain, k = ks[i], l = 0, prob = FALSE, use.all = TRUE)
test_error_knn[i]= 1 - mean(rs_knn == factor(ytest));

rs_knn <- knn(t(xtrain), t(xtrain), ytrain, k = ks[i], l = 0, prob = FALSE, use.all = TRUE)
train_error_knn[i]= 1 - mean(rs_knn == factor(ytrain));

}



plot(ks,train_error_knn,type='b',ylim=c(0,max(test_error_knn)))
points(ks,test_error_knn,type='b',col="red")
test_error_linear_regression
train_error_linear_regression
Revenir en haut Aller en bas
https://mastertwo.jeun.fr
 
tp1
Revenir en haut 
Page 1 sur 1

Permission de ce forum:Vous ne pouvez pas répondre aux sujets dans ce forum
mon master2 ISIFAR :: 1er semestre :: Data Mining : Classification-
Sauter vers:  
Ne ratez plus aucun deal !
Abonnez-vous pour recevoir par notification une sélection des meilleurs deals chaque jour.
IgnorerAutoriser