1 復習 Review

1.1 ファイルの読み込み

1.1.1 準備:今どこにいるか?

  • メニュー「Session」> Set Working Directory > Choose Directory で目的のフォルダを開く
  • Knit ▼> Knit Diretory > Current Working Directory

1.1.2 場所の確認:getwd()

getwd()

[1] “C:/(中略)/NICER1_3_2/2020-11-24NICER1_3_2/NICER_NNS”

2 言語特徴指標のいろいろ

  1. 総文数:文章を構成する文の数
  2. 平均文長: 文の「長さ」
  1. 平均単語長: 単語の「長さ」
  1. 7つの言語特徴を一度にすべて出せるプログラムを作成

2.1 平均単語長:Average Word Length (AWL)

  • 単語の数と文字の数
    • 単語の数は Token
    • 文字の数を数える命令: nchar()
    • すべての単語をつなげて長い文字列にして、その文字列の文字の数を数える
      • 単語を何もなし “” でつなげる
    paste(token, collapse="")
myAWL.df <- function(){                    # 独自の命令の名前は変えておきましょう

  fileV   <- NULL
  typeV   <- NULL
  tokenV  <- NULL
  TTRV    <- NULL
  GIV     <- NULL
  NoSV    <- NULL
  ASLV    <- NULL
  AWLV    <- NULL                                   # AWL用
  
  file.zenbu <- list.files()                        # 
  ruiseki <- ""                                     # 
  
  for (i in file.zenbu){                            # 
    yomikomi <- readLines(i, warn=F)                # 
    tmp1 <- grep("\\*(JPN|NS)", yomikomi, value=T)  # 
    tmp2 <- gsub("\\*(JPN|NS)...:\t", "", tmp1)     # 
    tmp2b <- gsub("[[:punct:]]", "", tmp2)          # 
    tmp2c <- tolower(tmp2b)                         # 
    tmp3 <- strsplit(tmp2c, " ")                    # 
    tmp4 <- unlist(tmp3)                            # 
    tmp4 <- tmp4[tmp4 != ""]                        # 
    token.list <- sort(tmp4)                        # 
    type.list <- unique(token.list)                 # 
    token <- length(token.list)                     #
    type <- length(type.list)                       #
    TTR <- type/token                               #
    GI <- type/sqrt(token)
    NoS <- length(tmp1)
    ASL <- token/NoS
    
    mojiretu <- paste(token.list, collapse="")      # 長い文字列
    mojisuu <- nchar(mojiretu)                      # 文字数
    
    AWL <- mojisuu/token                            # 文字数÷単語数

    
    # 各要素の種類ごとにベクトルを作成
    
    fileV   <- c(fileV, i)
    tokenV  <- c(tokenV, token)
    typeV   <- c(typeV, type)
    TTRV    <- c(TTRV, TTR)
    GIV     <- c(GIV, GI)
    NoSV    <- c(NoSV, NoS)
    ASLV     <- c(ASLV, ASL)
    AWLV     <- c(AWLV, AWL)                       # AWLの追加
  }
  
  data.frame(fileV, tokenV, typeV, TTRV, GIV, NoSV, ASLV, AWLV) # 追加修正

}

2.1.1 myAWL.df()を使ってみる

setwd("NICER_NNS")

NNS.Index.df <- myAWL.df()

names(NNS.Index.df) <- c("ID", "Token", "Type", "TTR", "GI", "NoS", "ASL", "AWL")     # 見出しの名前も変えて

head(NNS.Index.df )
##           ID Token Type       TTR       GI NoS      ASL      AWL
## 1 JPN501.txt   319  134 0.4200627 7.502560  30 10.63333 4.304075
## 2 JPN502.txt   351  158 0.4501425 8.433416  29 12.10345 4.293447
## 3 JPN503.txt   201  121 0.6019900 8.534682  13 15.46154 4.746269
## 4 JPN504.txt   260  139 0.5346154 8.620414  27  9.62963 4.765385
## 5 JPN505.txt   417  174 0.4172662 8.520817  25 16.68000 4.023981
## 6 JPN506.txt   260  123 0.4730769 7.628136  20 13.00000 4.088462

2.2 言語指標の相関

  • 言語指標ではない、一列目のファイル名を外す
    • データフレーム[行, 列]
    • データフレーム[ , -1]
pairs(NNS.Index.df[,-1])

3 その他の属性情報もデータフレームに入れる

3.1 トピック

  • トピックはどこに書いてあるか
    • データのフォーマットの確認
    @Topic: education
    @EnglishEssay:  4
    @SelfEval:  3
    @TopicEase: 3
    @EssayTraining: 5
    @Proctor:   1
    @Criterion: 6
  • @Topic:の行
  • どうやったらデータに取り込めるか
  grep("@Topic:", 行, value=T)
  • その後の処理:不要部分の削除

3.2 スコア

  • スコアはどこに書いてあるか
  • データの方の変換:文字から数値に
  as.integer()

3.3 トピックとスコアを含んだ言語指標データフレームの作成: myIndex.df()

myIndex.df <- function(){                    # 独自の命令の名前は変えておきましょう
  
  topicV  <- NULL                                   # topic用
  scoreV  <- NULL                                   # score用
  
  fileV   <- NULL
  typeV   <- NULL
  tokenV  <- NULL
  TTRV    <- NULL
  GIV     <- NULL
  NoSV    <- NULL
  ASLV    <- NULL
  AWLV    <- NULL
  
  file.zenbu <- list.files()                        # 
  ruiseki <- ""                                     # 
  
  for (i in file.zenbu){                            # 
    yomikomi <- readLines(i, warn=F)                # 
    
    topic.tmp <- grep("@Topic:", yomikomi, value=T)  # Topicの行
    topic <- gsub("@Topic:\t", "", topic.tmp)       # 不要部分削除
    
    score.tmp <- grep("@Criterion", yomikomi, value=T)  # Scoreの行
    score <- gsub("@Criterion:\t", "", score.tmp)       # 不要部分削除
    
    
    tmp1 <- grep("\\*(JPN|NS)", yomikomi, value=T)  # 
    tmp2 <- gsub("\\*(JPN|NS)...:\t", "", tmp1)     # 
    tmp2b <- gsub("[[:punct:]]", "", tmp2)          # 
    tmp2c <- tolower(tmp2b)                         # 
    tmp3 <- strsplit(tmp2c, " ")                    # 
    tmp4 <- unlist(tmp3)                            # 
    tmp4 <- tmp4[tmp4 != ""]                        # 
    token.list <- sort(tmp4)                        # 
    type.list <- unique(token.list)                 # 
    token <- length(token.list)                     #
    type <- length(type.list)                       #
    TTR <- type/token                               #
    GI <- type/sqrt(token)
    NoS <- length(tmp1)
    ASL <- token/NoS
    
    mojiretu <- paste(token.list, collapse="")      #
    mojisuu <- nchar(mojiretu)                      #
    AWL <- mojisuu/token                            #
    
    score <- as.integer(score)                      # scoreを整数に

    
    # 各要素の種類ごとにベクトルを作成
    
    topicV  <- c(topicV, topic)                     # Topicの追加
    scoreV  <- c(scoreV, score)                     # Scoreの追加
    
    fileV   <- c(fileV, i)
    tokenV  <- c(tokenV, token)
    typeV   <- c(typeV, type)
    TTRV    <- c(TTRV, TTR)
    GIV     <- c(GIV, GI)
    NoSV    <- c(NoSV, NoS)
    ASLV     <- c(ASLV, ASL)
    AWLV     <- c(AWLV, AWL)
  }
  
  data.frame(fileV, topicV, scoreV, tokenV, typeV, TTRV, GIV, NoSV, ASLV, AWLV) # 追加修正
}

3.3.1 myIndex.df()を使ってみる

setwd("NICER_NNS")

NNS.Index.df <- myIndex.df()

names(NNS.Index.df) <- c("ID", "Topic", "Score", "Token", "Type", "TTR", "GI", "NoS", "ASL", "AWL")     # 見出しの名前も変えて

head(NNS.Index.df)
##           ID     Topic Score Token Type       TTR       GI NoS      ASL
## 1 JPN501.txt    sports     4   319  134 0.4200627 7.502560  30 10.63333
## 2 JPN502.txt education     4   351  158 0.4501425 8.433416  29 12.10345
## 3 JPN503.txt education     3   201  121 0.6019900 8.534682  13 15.46154
## 4 JPN504.txt    sports     4   260  139 0.5346154 8.620414  27  9.62963
## 5 JPN505.txt    sports     4   417  174 0.4172662 8.520817  25 16.68000
## 6 JPN506.txt     money     3   260  123 0.4730769 7.628136  20 13.00000
##        AWL
## 1 4.304075
## 2 4.293447
## 3 4.746269
## 4 4.765385
## 5 4.023981
## 6 4.088462

3.3.2 データの全体を見てみる

3.3.2.1 概要:summary()

summary(NNS.Index.df)
##       ID               Topic               Score           Token      
##  Length:381         Length:381         Min.   :1.000   Min.   : 85.0  
##  Class :character   Class :character   1st Qu.:3.000   1st Qu.:209.0  
##  Mode  :character   Mode  :character   Median :3.000   Median :262.0  
##                                        Mean   :3.522   Mean   :275.4  
##                                        3rd Qu.:4.000   3rd Qu.:323.0  
##                                        Max.   :5.000   Max.   :728.0  
##                                        NA's   :2                      
##       Type            TTR               GI              NoS       
##  Min.   : 49.0   Min.   :0.2531   Min.   : 4.566   Min.   : 7.00  
##  1st Qu.:101.0   1st Qu.:0.4230   1st Qu.: 6.947   1st Qu.:17.00  
##  Median :122.0   Median :0.4699   Median : 7.502   Median :21.00  
##  Mean   :125.6   Mean   :0.4697   Mean   : 7.582   Mean   :22.07  
##  3rd Qu.:146.0   3rd Qu.:0.5141   3rd Qu.: 8.279   3rd Qu.:26.00  
##  Max.   :251.0   Max.   :0.6581   Max.   :10.443   Max.   :51.00  
##                                                                   
##       ASL             AWL       
##  Min.   : 6.96   Min.   :3.507  
##  1st Qu.:10.82   1st Qu.:4.163  
##  Median :12.20   Median :4.395  
##  Mean   :12.70   Mean   :4.419  
##  3rd Qu.:14.08   3rd Qu.:4.652  
##  Max.   :24.00   Max.   :5.415  
## 

3.3.2.2 データ構造:str()

str(NNS.Index.df)
## 'data.frame':    381 obs. of  10 variables:
##  $ ID   : chr  "JPN501.txt" "JPN502.txt" "JPN503.txt" "JPN504.txt" ...
##  $ Topic: chr  "sports" "education" "education" "sports" ...
##  $ Score: int  4 4 3 4 4 3 4 3 4 3 ...
##  $ Token: int  319 351 201 260 417 260 355 195 260 183 ...
##  $ Type : int  134 158 121 139 174 123 149 97 103 99 ...
##  $ TTR  : num  0.42 0.45 0.602 0.535 0.417 ...
##  $ GI   : num  7.5 8.43 8.53 8.62 8.52 ...
##  $ NoS  : int  30 29 13 27 25 20 26 20 19 14 ...
##  $ ASL  : num  10.63 12.1 15.46 9.63 16.68 ...
##  $ AWL  : num  4.3 4.29 4.75 4.77 4.02 ...

3.3.3 データの「型」の変換: カテゴリー変数はファクター型に変換: as.factor()

  • 数値: num, int
  • 文字: chr
  • カテゴリー: factor
データフレーム$変数 <- as.factor(データフレーム$変数)

3.3.3.1 IDとTopicを変換

NNS.Index.df$ID <- as.factor(NNS.Index.df$ID)
NNS.Index.df$Topic <- as.factor(NNS.Index.df$Topic)
summary(NNS.Index.df)
##           ID            Topic         Score           Token      
##  JPN501.txt:  1   education:145   Min.   :1.000   Min.   : 85.0  
##  JPN502.txt:  1   money    : 77   1st Qu.:3.000   1st Qu.:209.0  
##  JPN503.txt:  1   sports   :159   Median :3.000   Median :262.0  
##  JPN504.txt:  1                   Mean   :3.522   Mean   :275.4  
##  JPN505.txt:  1                   3rd Qu.:4.000   3rd Qu.:323.0  
##  JPN506.txt:  1                   Max.   :5.000   Max.   :728.0  
##  (Other)   :375                   NA's   :2                      
##       Type            TTR               GI              NoS       
##  Min.   : 49.0   Min.   :0.2531   Min.   : 4.566   Min.   : 7.00  
##  1st Qu.:101.0   1st Qu.:0.4230   1st Qu.: 6.947   1st Qu.:17.00  
##  Median :122.0   Median :0.4699   Median : 7.502   Median :21.00  
##  Mean   :125.6   Mean   :0.4697   Mean   : 7.582   Mean   :22.07  
##  3rd Qu.:146.0   3rd Qu.:0.5141   3rd Qu.: 8.279   3rd Qu.:26.00  
##  Max.   :251.0   Max.   :0.6581   Max.   :10.443   Max.   :51.00  
##                                                                   
##       ASL             AWL       
##  Min.   : 6.96   Min.   :3.507  
##  1st Qu.:10.82   1st Qu.:4.163  
##  Median :12.20   Median :4.395  
##  Mean   :12.70   Mean   :4.419  
##  3rd Qu.:14.08   3rd Qu.:4.652  
##  Max.   :24.00   Max.   :5.415  
## 
str(NNS.Index.df)
## 'data.frame':    381 obs. of  10 variables:
##  $ ID   : Factor w/ 381 levels "JPN501.txt","JPN502.txt",..: 1 2 3 4 5 6 7 8 9 10 ...
##  $ Topic: Factor w/ 3 levels "education","money",..: 3 1 1 3 3 2 1 3 3 1 ...
##  $ Score: int  4 4 3 4 4 3 4 3 4 3 ...
##  $ Token: int  319 351 201 260 417 260 355 195 260 183 ...
##  $ Type : int  134 158 121 139 174 123 149 97 103 99 ...
##  $ TTR  : num  0.42 0.45 0.602 0.535 0.417 ...
##  $ GI   : num  7.5 8.43 8.53 8.62 8.52 ...
##  $ NoS  : int  30 29 13 27 25 20 26 20 19 14 ...
##  $ ASL  : num  10.63 12.1 15.46 9.63 16.68 ...
##  $ AWL  : num  4.3 4.29 4.75 4.77 4.02 ...

3.4 データフレーム内のカラムごとに扱う

3.4.1 例: スコアの分布

hist(NNS.Index.df$Score)

3.4.2 例: 文数と単語数の関係

3.4.2.1 散布図

plot(NNS.Index.df$NoS, NNS.Index.df$Token)

3.4.2.2 相関分析: cor.test()

cor.test(NNS.Index.df$NoS, NNS.Index.df$Token)
## 
##  Pearson's product-moment correlation
## 
## data:  NNS.Index.df$NoS and NNS.Index.df$Token
## t = 23.697, df = 379, p-value < 2.2e-16
## alternative hypothesis: true correlation is not equal to 0
## 95 percent confidence interval:
##  0.7287991 0.8102583
## sample estimates:
##       cor 
## 0.7726906
  • 相関係数は 0.77 (相関は強い)
  • p値は ゼロに近い (相関が無い可能性はほぼゼロ)

4 母語話者データの言語指標のデータフレーム作成: NS.Index.dfとして

  1. myIndex()
  2. 見出しの付け替え
  3. 型変換
setwd("NICER_NS")

NS.Index.df <- myIndex.df()

names(NS.Index.df) <- c("ID", "Topic", "Score", "Token", "Type", "TTR", "GI", "NoS", "ASL", "AWL")     # 見出しの名前も変えて

NS.Index.df$ID <- as.factor(NS.Index.df$ID)
NS.Index.df$Topic <- as.factor(NS.Index.df$Topic)

head(NS.Index.df)
##          ID     Topic Score Token Type       TTR       GI NoS      ASL      AWL
## 1 NS501.txt education     5   736  359 0.4877717 13.23292  39 18.87179 4.592391
## 2 NS502.txt education     6   636  340 0.5345912 13.48188  26 24.46154 5.201258
## 3 NS503.txt education     6   834  353 0.4232614 12.22339  22 37.90909 5.565947
## 4 NS504.txt education     6   824  336 0.4077670 11.70511  30 27.46667 5.276699
## 5 NS505.txt    sports     6   898  393 0.4376392 13.11458  39 23.02564 4.749443
## 6 NS506.txt education     6   829  339 0.4089264 11.77396  31 26.74194 4.460796

5 スコアも含めて、全体の相関を見てみる: pairs()

5.1 学習者の場合

pairs(NNS.Index.df[,3:10])

5.2 母語話者の場合

pairs(NS.Index.df[3:10])

5.3 相関係数を一度に出すパッケージ(PerformanceAnalytics)

始めて使う人は、パソコンにインストールする必要がある。

install.packages(“PerformanceAnalytics”)

一度インストールしたら、あとは、使う前に、library(PerformanceAnalytics) とする。

library(PerformanceAnalytics)

5.3.1 chart.Correlation(データフレーム)

  • データフレーム内に、数値以外の変数がある場合はそれを除いておくこと

5.3.1.1 データ構造確認

str(NNS.Index.df)
## 'data.frame':    381 obs. of  10 variables:
##  $ ID   : Factor w/ 381 levels "JPN501.txt","JPN502.txt",..: 1 2 3 4 5 6 7 8 9 10 ...
##  $ Topic: Factor w/ 3 levels "education","money",..: 3 1 1 3 3 2 1 3 3 1 ...
##  $ Score: int  4 4 3 4 4 3 4 3 4 3 ...
##  $ Token: int  319 351 201 260 417 260 355 195 260 183 ...
##  $ Type : int  134 158 121 139 174 123 149 97 103 99 ...
##  $ TTR  : num  0.42 0.45 0.602 0.535 0.417 ...
##  $ GI   : num  7.5 8.43 8.53 8.62 8.52 ...
##  $ NoS  : int  30 29 13 27 25 20 26 20 19 14 ...
##  $ ASL  : num  10.63 12.1 15.46 9.63 16.68 ...
##  $ AWL  : num  4.3 4.29 4.75 4.77 4.02 ...

5.3.1.2 学習者データ

chart.Correlation(NNS.Index.df[3:10])

5.3.1.3 母語話者データ

chart.Correlation(NS.Index.df[3:10])

6 相関関係と因果関係

6.1 因果関係は回帰分析

y = ax + b
  • yが結果

  • xが原因

  • 散布図の中に直線を引く

plot(NNS.Index.df$Token, NNS.Index.df$Score)
abline(lm(Score ~ Token, data=NNS.Index.df))

  • 直線を引く「式」を「モデル」と呼ぶ:直線のモデルなので linear model

7 原因が一つだけ: 単回帰分析

  lm(結果 ~ 原因, data=データフレーム)

7.1 モデルを作る

model.1 <- lm(Score ~ Token, data = NNS.Index.df)

7.2 モデルのまとめを見る:summary()

summary(model.1)
## 
## Call:
## lm(formula = Score ~ Token, data = NNS.Index.df)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.57666 -0.27899  0.00791  0.30202  1.31169 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 1.662214   0.070355   23.63   <2e-16 ***
## Token       0.006751   0.000242   27.90   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.4369 on 377 degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## Multiple R-squared:  0.6737, Adjusted R-squared:  0.6728 
## F-statistic: 778.3 on 1 and 377 DF,  p-value: < 2.2e-16
  • Coefficientsで、Token が有意: Tokenの影響力がある * InterceptのEstimateが切片 y = ax + b の b * TokenのEstimateが、Tokenの係数 y = ax + b の a
  • R^2が、説明率(決定係数、寄与率)で、どのくらいこのモデルで説明できるかを示す。 * Multiple R^2 単回帰での説明率 * Adjusted R^2 自由度修正済み説明率(重回帰での説明率) * F統計量 係数が0である(=影響力はない)確率(p値がゼロに近い=ないなんてありえない)

7.3 グラフで影響力を見てみる:plot(allEffects())

install.packages("effects")
library(effects)

plot(allEffects(model.1))

8 原因が二つ以上: 重回帰分析(要因を足すだけ)

例えば、時間内にたくさん書けて(Tokenが多い)、難しい単語(AWLが長い)を使う場合

model.2 <- lm(Score ~ Token + AWL, data = NNS.Index.df)

summary(model.2)
## 
## Call:
## lm(formula = Score ~ Token + AWL, data = NNS.Index.df)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.45033 -0.25813  0.00101  0.26967  1.18702 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -0.7612546  0.2840958  -2.680   0.0077 ** 
## Token        0.0069188  0.0002217  31.214   <2e-16 ***
## AWL          0.5377749  0.0614107   8.757   <2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.3987 on 376 degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## Multiple R-squared:  0.729,  Adjusted R-squared:  0.7275 
## F-statistic: 505.6 on 2 and 376 DF,  p-value: < 2.2e-16
plot(allEffects(model.2))

8.1 重回帰分析

試しに、全部の要因を入れてみる。

model.3 <- lm(Score ~ Token + Type + TTR + GI + NoS + ASL + AWL, data = NNS.Index.df)

summary(model.3)
## 
## Call:
## lm(formula = Score ~ Token + Type + TTR + GI + NoS + ASL + AWL, 
##     data = NNS.Index.df)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.23548 -0.25526 -0.00781  0.26377  1.48044 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -0.489784   0.625095  -0.784  0.43381    
## Token        0.002668   0.002036   1.310  0.19089    
## Type        -0.010717   0.009695  -1.105  0.26966    
## TTR         -6.650822   1.236955  -5.377 1.34e-07 ***
## GI           0.625503   0.216334   2.891  0.00406 ** 
## NoS          0.013683   0.013825   0.990  0.32294    
## ASL          0.043424   0.024018   1.808  0.07142 .  
## AWL          0.486207   0.060223   8.073 9.64e-15 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.3701 on 371 degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## Multiple R-squared:  0.7696, Adjusted R-squared:  0.7653 
## F-statistic:   177 on 7 and 371 DF,  p-value: < 2.2e-16

8.1.1 VIF (Variance Inflation Factor) の問題

  • VIF値が10以上は適切ではない。

  • 要因間の相関が高い。影響が強く出すぎてしまう。

  • 各要因は「独立」している前提がある。

  • 相関の高い要因のどれかを外す。どれを外すかはよく考える。

  • 相関係数が 0.8以下くらいが目安

      https://www.sugiura-ken.org/wiki/wiki.cgi/exp?page=VIF
  • 不要なものを外して、モデルを作り直し、VIFを確認して、10以下にする

  • パッケージ car に入っている vif() で確認

install.packages("car")
library(car)

vif(model.3)
##     Token      Type       TTR        GI       NoS       ASL       AWL 
##  98.72870 269.93935  18.13305 123.04640  25.66229  11.97882   1.12481

これではVIFが大きすぎて問題があるので、よく考えよう。(根拠に基づいて合理的に判断)

  • 総語数が多いだけでよいスコアになるので興味深くないので、Tokenを外すと判断する。
  • TTRとGIは、Typeを基に計算しているので、いずれにせよそれらの間に相関があるので除く、と判断する。

8.1.2 考えたモデルで試してみる

model.4 <- lm(Score ~ Type + NoS + ASL + AWL, data = NNS.Index.df)

summary(model.4)
## 
## Call:
## lm(formula = Score ~ Type + NoS + ASL + AWL, data = NNS.Index.df)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.36867 -0.26243 -0.00216  0.25720  1.22204 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -2.163820   0.320158  -6.759 5.36e-11 ***
## Type         0.005424   0.001347   4.025 6.89e-05 ***
## NoS          0.063868   0.006150  10.385  < 2e-16 ***
## ASL          0.127821   0.011484  11.130  < 2e-16 ***
## AWL          0.445741   0.061970   7.193 3.49e-12 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.3867 on 374 degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## Multiple R-squared:  0.7465, Adjusted R-squared:  0.7437 
## F-statistic: 275.3 on 4 and 374 DF,  p-value: < 2.2e-16

8.1.3 VIFの確認

vif(model.4)
##     Type      NoS      ASL      AWL 
## 4.776499 4.651624 2.508681 1.090959

8.1.4 効果の可視化

plot(allEffects(model.4))

8.1.5 つまり

  Score = 0.005424*Type + 0.063868*NoS + 0.127821*ASL + 0.445741*AWL -2.163820

という関係になる。

どういういうことか、JPN501を参考に考えてみる。

  • Type 134
  • NoS 30
  • ASL 10.6
  • AWL 4.3

の場合、

  Score = 0.005424*134 + 0.063868*30 + 0.127821*10.6 + 0.445741*4.3 -2.163820

を計算するとScoreが予測できる:3.750625

8.2 分析の妥当性評価: easystatsパッケージ

library(easystats)

8.2.1 モデルのチェックを一度に

check_model(model.4)

8.2.2 多重共線性の確認

check_collinearity(model.4)
## # Check for Multicollinearity
## 
## Low Correlation
## 
##  Term  VIF   VIF 95% CI Increased SE Tolerance Tolerance 95% CI
##  Type 4.78 [4.02, 5.72]         2.19      0.21     [0.17, 0.25]
##   NoS 4.65 [3.92, 5.57]         2.16      0.21     [0.18, 0.26]
##   ASL 2.51 [2.16, 2.96]         1.58      0.40     [0.34, 0.46]
##   AWL 1.09 [1.03, 1.33]         1.04      0.92     [0.75, 0.98]
plot(check_collinearity(model.4))
## Variable `Component` is not in your data frame :/

8.2.3 正規性の確認

check_normality(model.4)
## OK: residuals appear as normally distributed (p = 0.350).
plot(check_normality(model.4))

8.2.4 結果の報告

report(model.4)
## We fitted a linear model (estimated using OLS) to predict Score with Type, NoS,
## ASL and AWL (formula: Score ~ Type + NoS + ASL + AWL). The model explains a
## statistically significant and substantial proportion of variance (R2 = 0.75,
## F(4, 374) = 275.27, p < .001, adj. R2 = 0.74). The model's intercept,
## corresponding to Type = 0, NoS = 0, ASL = 0 and AWL = 0, is at -2.16 (95% CI
## [-2.79, -1.53], t(374) = -6.76, p < .001). Within this model:
## 
##   - The effect of Type is statistically significant and positive (beta =
## 5.42e-03, 95% CI [2.77e-03, 8.07e-03], t(374) = 4.03, p < .001; Std. beta =
## 0.23, 95% CI [0.12, 0.34])
##   - The effect of NoS is statistically significant and positive (beta = 0.06, 95%
## CI [0.05, 0.08], t(374) = 10.39, p < .001; Std. beta = 0.58, 95% CI [0.47,
## 0.69])
##   - The effect of ASL is statistically significant and positive (beta = 0.13, 95%
## CI [0.11, 0.15], t(374) = 11.13, p < .001; Std. beta = 0.46, 95% CI [0.38,
## 0.54])
##   - The effect of AWL is statistically significant and positive (beta = 0.45, 95%
## CI [0.32, 0.57], t(374) = 7.19, p < .001; Std. beta = 0.20, 95% CI [0.14,
## 0.25])
## 
## Standardized parameters were obtained by fitting the model on a standardized
## version of the dataset. 95% Confidence Intervals (CIs) and p-values were
## computed using a Wald t-distribution approximation.

8.2.5 結果のまとめの表

report_table(model.4)
## Parameter   | Coefficient |         95% CI | t(374) |      p | Std. Coef. | Std. Coef. 95% CI |    Fit
## ------------------------------------------------------------------------------------------------------
## (Intercept) |       -2.16 | [-2.79, -1.53] |  -6.76 | < .001 |   1.33e-15 |     [-0.05, 0.05] |       
## Type        |    5.42e-03 | [ 0.00,  0.01] |   4.03 | < .001 |       0.23 |     [ 0.12, 0.34] |       
## NoS         |        0.06 | [ 0.05,  0.08] |  10.39 | < .001 |       0.58 |     [ 0.47, 0.69] |       
## ASL         |        0.13 | [ 0.11,  0.15] |  11.13 | < .001 |       0.46 |     [ 0.38, 0.54] |       
## AWL         |        0.45 | [ 0.32,  0.57] |   7.19 | < .001 |       0.20 |     [ 0.14, 0.25] |       
##             |             |                |        |        |            |                   |       
## AIC         |             |                |        |        |            |                   | 362.31
## AICc        |             |                |        |        |            |                   | 362.54
## BIC         |             |                |        |        |            |                   | 385.94
## R2          |             |                |        |        |            |                   |   0.75
## R2 (adj.)   |             |                |        |        |            |                   |   0.74
## Sigma       |             |                |        |        |            |                   |   0.39

9 GLM: Generalized Linear Model 一般化線形モデル

一人の被験者からは一回だけ(ランダム効果なし)

応答変数の分布の確認をする。

  1. 正規分布だったら gaussian (重回帰分析はこれに相当する)
  2. 正の連続値だったら Gamma
  3. 正の整数だったら poisson
  4. 正誤などの二値だったら binomial

9.1 使い方: 重回帰分析の「応用」

family=分布 という形で、応答変数の分布を指定する

9.2 使用するパッケージ

library(tidyverse)
library(openxlsx)
library(lme4)
library(lmerTest)
library(MuMIn)
library(effects)
library(ggplot2)
library(easystats)

9.3 正の整数なので poisson分布を使用

  • 命令を lm() ではなく glm()
  • 分布の指定 family = poisson
model.glm.1 <- glm(Score ~ Type + NoS + ASL + AWL, data = NNS.Index.df, family = poisson)

summary(model.glm.1)
## 
## Call:
## glm(formula = Score ~ Type + NoS + ASL + AWL, family = poisson, 
##     data = NNS.Index.df)
## 
## Coefficients:
##              Estimate Std. Error z value Pr(>|z|)  
## (Intercept) -0.323186   0.439331  -0.736   0.4620  
## Type         0.001548   0.001759   0.880   0.3790  
## NoS          0.016669   0.007929   2.102   0.0355 *
## ASL          0.033741   0.015088   2.236   0.0253 *
## AWL          0.130021   0.085635   1.518   0.1289  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for poisson family taken to be 1)
## 
##     Null deviance: 64.327  on 378  degrees of freedom
## Residual deviance: 19.251  on 374  degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## AIC: 1212.2
## 
## Number of Fisher Scoring iterations: 4

9.4 結果のプロット

plot(allEffects(model.glm.1))

9.5 有意な変数だけにして best model を作成

model.glm.1.best <- glm(Score ~ NoS + ASL, data = NNS.Index.df, family = poisson)

summary(model.glm.1.best)
## 
## Call:
## glm(formula = Score ~ NoS + ASL, family = poisson, data = NNS.Index.df)
## 
## Coefficients:
##             Estimate Std. Error z value Pr(>|z|)    
## (Intercept) 0.189812   0.174077   1.090    0.276    
## NoS         0.021647   0.003790   5.712 1.12e-08 ***
## ASL         0.045374   0.009862   4.601 4.21e-06 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for poisson family taken to be 1)
## 
##     Null deviance: 64.327  on 378  degrees of freedom
## Residual deviance: 23.001  on 376  degrees of freedom
##   ( 2 個の観測値が欠損のため削除されました )
## AIC: 1211.9
## 
## Number of Fisher Scoring iterations: 4

9.6 best modelの結果のプロット

plot(allEffects(model.glm.1.best))

10 GLMM: Generalized Linear Mixed Model 一般化線形混合モデル

summary(NNS.Index.df)
##           ID            Topic         Score           Token      
##  JPN501.txt:  1   education:145   Min.   :1.000   Min.   : 85.0  
##  JPN502.txt:  1   money    : 77   1st Qu.:3.000   1st Qu.:209.0  
##  JPN503.txt:  1   sports   :159   Median :3.000   Median :262.0  
##  JPN504.txt:  1                   Mean   :3.522   Mean   :275.4  
##  JPN505.txt:  1                   3rd Qu.:4.000   3rd Qu.:323.0  
##  JPN506.txt:  1                   Max.   :5.000   Max.   :728.0  
##  (Other)   :375                   NA's   :2                      
##       Type            TTR               GI              NoS       
##  Min.   : 49.0   Min.   :0.2531   Min.   : 4.566   Min.   : 7.00  
##  1st Qu.:101.0   1st Qu.:0.4230   1st Qu.: 6.947   1st Qu.:17.00  
##  Median :122.0   Median :0.4699   Median : 7.502   Median :21.00  
##  Mean   :125.6   Mean   :0.4697   Mean   : 7.582   Mean   :22.07  
##  3rd Qu.:146.0   3rd Qu.:0.5141   3rd Qu.: 8.279   3rd Qu.:26.00  
##  Max.   :251.0   Max.   :0.6581   Max.   :10.443   Max.   :51.00  
##                                                                   
##       ASL             AWL       
##  Min.   : 6.96   Min.   :3.507  
##  1st Qu.:10.82   1st Qu.:4.163  
##  Median :12.20   Median :4.395  
##  Mean   :12.70   Mean   :4.419  
##  3rd Qu.:14.08   3rd Qu.:4.652  
##  Max.   :24.00   Max.   :5.415  
## 

10.1 分布を調べる

install.packages("fitdistplus")
library(fitdistrplus)
##  要求されたパッケージ MASS をロード中です
## 
##  次のパッケージを付け加えます: 'MASS'
##  以下のオブジェクトは 'package:dplyr' からマスクされています:
## 
##     select
##  要求されたパッケージ survival をロード中です

10.1.1 データの確認

summary(NNS.Index.df)
##           ID            Topic         Score           Token      
##  JPN501.txt:  1   education:145   Min.   :1.000   Min.   : 85.0  
##  JPN502.txt:  1   money    : 77   1st Qu.:3.000   1st Qu.:209.0  
##  JPN503.txt:  1   sports   :159   Median :3.000   Median :262.0  
##  JPN504.txt:  1                   Mean   :3.522   Mean   :275.4  
##  JPN505.txt:  1                   3rd Qu.:4.000   3rd Qu.:323.0  
##  JPN506.txt:  1                   Max.   :5.000   Max.   :728.0  
##  (Other)   :375                   NA's   :2                      
##       Type            TTR               GI              NoS       
##  Min.   : 49.0   Min.   :0.2531   Min.   : 4.566   Min.   : 7.00  
##  1st Qu.:101.0   1st Qu.:0.4230   1st Qu.: 6.947   1st Qu.:17.00  
##  Median :122.0   Median :0.4699   Median : 7.502   Median :21.00  
##  Mean   :125.6   Mean   :0.4697   Mean   : 7.582   Mean   :22.07  
##  3rd Qu.:146.0   3rd Qu.:0.5141   3rd Qu.: 8.279   3rd Qu.:26.00  
##  Max.   :251.0   Max.   :0.6581   Max.   :10.443   Max.   :51.00  
##                                                                   
##       ASL             AWL       
##  Min.   : 6.96   Min.   :3.507  
##  1st Qu.:10.82   1st Qu.:4.163  
##  Median :12.20   Median :4.395  
##  Mean   :12.70   Mean   :4.419  
##  3rd Qu.:14.08   3rd Qu.:4.652  
##  Max.   :24.00   Max.   :5.415  
## 
  • ScoreにNAが二つ入っている。

10.1.2 NAの削除

NNS.Index.df2 <- na.omit(NNS.Index.df)

summary(NNS.Index.df2)
##           ID            Topic         Score           Token      
##  JPN501.txt:  1   education:145   Min.   :1.000   Min.   : 85.0  
##  JPN502.txt:  1   money    : 77   1st Qu.:3.000   1st Qu.:209.0  
##  JPN503.txt:  1   sports   :157   Median :3.000   Median :262.0  
##  JPN504.txt:  1                   Mean   :3.522   Mean   :275.6  
##  JPN505.txt:  1                   3rd Qu.:4.000   3rd Qu.:322.5  
##  JPN506.txt:  1                   Max.   :5.000   Max.   :728.0  
##  (Other)   :373                                                  
##       Type            TTR               GI              NoS       
##  Min.   : 49.0   Min.   :0.2531   Min.   : 4.566   Min.   : 7.00  
##  1st Qu.:101.0   1st Qu.:0.4232   1st Qu.: 6.952   1st Qu.:17.00  
##  Median :122.0   Median :0.4699   Median : 7.503   Median :21.00  
##  Mean   :125.7   Mean   :0.4698   Mean   : 7.586   Mean   :22.08  
##  3rd Qu.:146.0   3rd Qu.:0.5137   3rd Qu.: 8.283   3rd Qu.:26.00  
##  Max.   :251.0   Max.   :0.6581   Max.   :10.443   Max.   :51.00  
##                                                                   
##       ASL             AWL       
##  Min.   : 6.96   Min.   :3.507  
##  1st Qu.:10.81   1st Qu.:4.163  
##  Median :12.21   Median :4.395  
##  Mean   :12.71   Mean   :4.420  
##  3rd Qu.:14.11   3rd Qu.:4.652  
##  Max.   :24.00   Max.   :5.415  
## 

10.2 分布の確認: descdist()

  • オプションで boot=500 を付ける
descdist(NNS.Index.df2$Score, boot=500)

## summary statistics
## ------
## min:  1   max:  5 
## median:  3 
## mean:  3.522427 
## estimated sd:  0.7638654 
## estimated skewness:  0.01361058 
## estimated kurtosis:  3.021945
  • 正規分布とみなせる

10.3 ランダム効果は入れないので、 glm()

  • 正規分布は、family=gaussian
model.glm.1 <- glm(Score ~ Type + NoS + ASL + AWL, data = NNS.Index.df2, family = gaussian)

summary(model.glm.1)
## 
## Call:
## glm(formula = Score ~ Type + NoS + ASL + AWL, family = gaussian, 
##     data = NNS.Index.df2)
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -2.163820   0.320158  -6.759 5.36e-11 ***
## Type         0.005424   0.001347   4.025 6.89e-05 ***
## NoS          0.063868   0.006150  10.385  < 2e-16 ***
## ASL          0.127821   0.011484  11.130  < 2e-16 ***
## AWL          0.445741   0.061970   7.193 3.49e-12 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for gaussian family taken to be 0.1495256)
## 
##     Null deviance: 220.559  on 378  degrees of freedom
## Residual deviance:  55.923  on 374  degrees of freedom
## AIC: 362.31
## 
## Number of Fisher Scoring iterations: 2

10.3.1 交互作用を入れてみる:

model.glm.2 <- glm(Score ~ Type * NoS * ASL * AWL, data = NNS.Index.df2, family = gaussian)

summary(model.glm.2)
## 
## Call:
## glm(formula = Score ~ Type * NoS * ASL * AWL, family = gaussian, 
##     data = NNS.Index.df2)
## 
## Coefficients:
##                    Estimate Std. Error t value Pr(>|t|)  
## (Intercept)      -2.519e+01  1.403e+01  -1.796   0.0733 .
## Type              1.536e-01  1.204e-01   1.276   0.2026  
## NoS               1.393e+00  6.831e-01   2.040   0.0421 *
## ASL               1.857e+00  1.019e+00   1.823   0.0691 .
## AWL               5.377e+00  3.244e+00   1.657   0.0983 .
## Type:NoS         -7.408e-03  4.973e-03  -1.490   0.1371  
## Type:ASL         -9.109e-03  8.453e-03  -1.078   0.2819  
## NoS:ASL          -1.127e-01  5.283e-02  -2.133   0.0336 *
## Type:AWL         -2.989e-02  2.767e-02  -1.080   0.2807  
## NoS:AWL          -2.969e-01  1.570e-01  -1.891   0.0595 .
## ASL:AWL          -3.863e-01  2.355e-01  -1.641   0.1017  
## Type:NoS:ASL      5.509e-04  3.650e-04   1.510   0.1320  
## Type:NoS:AWL      1.567e-03  1.143e-03   1.371   0.1712  
## Type:ASL:AWL      1.889e-03  1.946e-03   0.971   0.3323  
## NoS:ASL:AWL       2.614e-02  1.211e-02   2.158   0.0316 *
## Type:NoS:ASL:AWL -1.221e-04  8.400e-05  -1.453   0.1470  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for gaussian family taken to be 0.1369537)
## 
##     Null deviance: 220.559  on 378  degrees of freedom
## Residual deviance:  49.714  on 363  degrees of freedom
## AIC: 339.71
## 
## Number of Fisher Scoring iterations: 2

10.4 モデル選択:MuMIn()

library(MuMIn) 
options(na.action = "na.fail") 
 
 
dredge(model.glm.2, rank="AIC")
## Fixed term is "(Intercept)"
## Global model call: glm(formula = Score ~ Type * NoS * ASL * AWL, family = gaussian, 
##     data = NNS.Index.df2)
## ---
## Model selection table 
##            (Int)     ASL       AWL        NoS        Typ    ASL:AWL   ASL:NoS
## 624    -3.182000 0.11930  0.464700  0.0619000  0.0196000             0.004805
## 752    -2.158000 0.12410  0.230100  0.0141800  0.0189200             0.004523
## 1792   -8.813000 0.63180  1.765000  0.3610000  0.0183700 -0.1157000 -0.022530
## 880    -2.262000 0.12160  0.258100  0.0599100  0.0119400             0.004868
## 2048   -8.944000 0.60020  1.798000  0.3423000  0.0355900 -0.1089000 -0.026990
## 4720   -3.659000 0.15550  0.464400  0.0865900  0.0231900             0.002877
## 640    -3.484000 0.14280  0.531700  0.0630400  0.0194400 -0.0051990  0.004712
## 5888   -9.252000 0.67280  1.716000  0.3890000  0.0232700 -0.1137000 -0.025100
## 4848   -2.725000 0.17330  0.212200  0.0438900  0.0237100             0.001904
## 768    -1.920000 0.10880  0.177100  0.0115900  0.0190000  0.0034120  0.004573
## 1008   -2.139000 0.12410  0.226100  0.0154000  0.0185500             0.004534
## 4976   -2.776000 0.16650  0.241000  0.0901800  0.0157500             0.002497
## 896    -2.799000 0.17580  0.376800  0.0621800  0.0103200 -0.0118800  0.004666
## 4096   -6.916000 0.44180  1.330000  0.4551000  0.0005696 -0.0724600 -0.035980
## 6144   -9.443000 0.64350  1.746000  0.3721000  0.0423700 -0.1061000 -0.030200
## 10240  -8.481000 0.60280  1.688000  0.3207000  0.0323000 -0.1093000 -0.027460
## 4736   -3.911000 0.17520  0.522800  0.0869800  0.0229600 -0.0045310  0.002843
## 1728   -9.121000 0.65940  1.955000  0.3938000  0.0106000 -0.1301000 -0.025370
## 560    -2.582000 0.07901  0.462500  0.0807200  0.0114900             0.002929
## 4864   -2.385000 0.15190  0.131300  0.0410300  0.0239900  0.0051710  0.001887
## 5104   -2.695000 0.17340  0.205400  0.0460200  0.0230900             0.001916
## 688    -1.475000 0.08547  0.204900  0.0277500  0.0109900             0.002674
## 8192   -7.267000 0.47400  1.225000  0.4994000  0.0045930 -0.0657600 -0.040430
## 9200   -2.695000 0.12320  0.358300  0.0419200  0.0229900             0.004535
## 1024   -1.781000 0.09227  0.145400  0.0008868  0.0214800  0.0071270  0.004557
## 4992   -3.283000 0.21780  0.356000  0.0916800  0.0140800 -0.0114800  0.002356
## 1984   -9.270000 0.61850  1.988000  0.3688000  0.0324600 -0.1209000 -0.030810
## 2944   -5.108000 0.34880  0.909100  0.0608300  0.0299500 -0.0515200  0.004738
## 12288  -5.971000 0.43570  1.106000  0.4249000 -0.0075300 -0.0707800 -0.037410
## 14336  -9.144000 0.64450  1.677000  0.3580000  0.0402000 -0.1064000 -0.030450
## 576    -3.520000 0.15170  0.662400  0.0829800  0.0115200 -0.0155100  0.002765
## 816    -1.906000 0.07954  0.314500  0.0798400  0.0057820             0.002920
## 944    -1.754000 0.08716  0.260700  0.0105500  0.0163600             0.002596
## 13296  -3.496000 0.17460  0.388300  0.0844200  0.0295000             0.001787
## 5120   -2.192000 0.12880  0.084060  0.0261200  0.0277100  0.0106200  0.001803
## 704    -1.992000 0.11850  0.316700  0.0328200  0.0110400 -0.0071610  0.002618
## 3072   -4.432000 0.28990  0.757200 -0.0100600  0.0470400 -0.0381600  0.004628
## 32768 -25.190000 1.85700  5.377000  1.3930000  0.1536000 -0.3863000 -0.112700
## 16384  -6.500000 0.46830  1.048000  0.4741000 -0.0020090 -0.0645600 -0.041470
## 7040   -5.362000 0.37330  0.844200  0.0884800  0.0317900 -0.0477500  0.002576
## 9216   -2.320000 0.09700  0.273000  0.0261800  0.0247900  0.0058920  0.004554
## 832    -2.911000 0.18130  0.526900  0.0825900  0.0032420 -0.0216600  0.002687
## 10176  -9.418000 0.61760  2.023000  0.3757000  0.0335400 -0.1207000 -0.030650
## 7168   -4.655000 0.31210  0.660500  0.0145400  0.0513400 -0.0320000  0.002013
## 9136   -2.966000 0.08600  0.546400  0.0680900  0.0260400             0.002641
## 13312  -2.953000 0.13660  0.258800  0.0619600  0.0325000  0.0090190  0.001710
## 960    -1.649000 0.07787  0.237300  0.0063300  0.0172100  0.0020700  0.002600
## 11264  -5.047000 0.29670  0.902300  0.0176400  0.0508800 -0.0398900  0.004625
## 656    -1.618000 0.13530  0.107900  0.0354600  0.0112800                     
## 528    -3.132000 0.13300  0.443400  0.1080000  0.0120000                     
## 912    -2.120000 0.13570  0.215200  0.0035790  0.0210900                     
## 15360  -5.479000 0.32190  0.849800  0.0521900  0.0566000 -0.0340700  0.001917
## 672    -2.752000 0.20590  0.359200  0.0462700  0.0113800 -0.0158000          
## 9152   -3.027000 0.09027  0.560000  0.0706100  0.0257500 -0.0009527  0.002640
## 544    -4.694000 0.25300  0.789700  0.1092000  0.0119900 -0.0267300          
## 720    -1.743000 0.14690  0.101200  0.0341400  0.0125800                     
## 800    -3.921000 0.28520  0.622300  0.1079000  0.0020130 -0.0337400          
## 784    -2.420000 0.13330  0.288000  0.1070000  0.0059970                     
## 592    -3.245000 0.14190  0.442200  0.1078000  0.0130000                     
## 976    -2.212000 0.14590  0.205600  0.0035290  0.0218900                     
## 9104   -2.786000 0.13550  0.370800  0.0349400  0.0264200                     
## 928    -2.275000 0.14940  0.250000  0.0098660  0.0198200 -0.0030740          
## 736    -2.823000 0.21390  0.343800  0.0446700  0.0125500 -0.0152100          
## 608    -4.777000 0.25950  0.785400  0.1091000  0.0128600 -0.0264800          
## 864    -4.006000 0.29470  0.611500  0.1076000  0.0028450 -0.0336300          
## 848    -2.515000 0.14400  0.277900  0.1067000  0.0068570                     
## 9168   -2.733000 0.14510  0.329500  0.0283700  0.0260600                     
## 992    -2.359000 0.15880  0.238600  0.0094800  0.0206900 -0.0029090          
## 9120   -3.098000 0.15740  0.441600  0.0480300  0.0249100 -0.0049140          
## 2912   -4.950000 0.36560  0.828800  0.1074000  0.0107600 -0.0498300          
## 3040   -3.974000 0.27950  0.611000  0.0029130  0.0362000 -0.0305100          
## 9184   -3.013000 0.16450  0.393400  0.0401800  0.0247100 -0.0044010          
## 11232  -4.688000 0.28740  0.779700  0.0350900  0.0406800 -0.0325100          
## 1280   -9.217000 0.80090  2.071000  0.4228000  0.0093440 -0.1539000 -0.034260
## 1216   -9.410000 0.81100  2.188000  0.4423000  0.0044300 -0.1619000 -0.035610
## 1536   -9.343000 0.77100  2.103000  0.4051000  0.0257200 -0.1475000 -0.038530
## 1472   -9.546000 0.77510  2.218000  0.4201000  0.0240800 -0.1538000 -0.040560
## 3584   -7.514000 0.62850  1.682000  0.5071000 -0.0059070 -0.1147000 -0.046670
## 240    -0.255800 0.12610  0.023360 -0.0572800  0.0090730             0.002749
## 176     0.090520 0.10090  0.017580 -0.0447600  0.0044180             0.001637
## 144    -0.105000 0.13160 -0.031820 -0.0350000  0.0050420                     
## 368    -0.137400 0.12230  0.007903  0.0269600 -0.0065260             0.003397
## 496     0.004104 0.12590 -0.034920 -0.0384200  0.0036140             0.002929
## 256     0.482000 0.07798 -0.142300 -0.0650100  0.0093730  0.0107500  0.002918
## 192     0.315800 0.08611 -0.031590 -0.0468000  0.0044150  0.0031820  0.001665
## 432     0.170000 0.10030  0.001223 -0.0392500  0.0028030             0.001665
## 208    -0.200300 0.14080 -0.037340 -0.0361700  0.0060520                     
## 304     0.019650 0.09136  0.061530  0.0432100 -0.0101500             0.002039
## 400    -0.184900 0.13170 -0.013760 -0.0409600  0.0067270                     
## 160    -0.322700 0.14560  0.017270 -0.0331600  0.0050350 -0.0031230          
## 384    -1.053000 0.21220  0.207700  0.0311100 -0.0090180 -0.0197100  0.003078
## 320    -1.215000 0.21430  0.321100  0.0469100 -0.0130600 -0.0262000  0.001766
## 112    -2.061000 0.11690  0.460100  0.0286000  0.0094030             0.003121
## 2432   -5.394000 0.53430  1.204000  0.0291900  0.0279900 -0.0935200  0.003242
## 48     -1.725000 0.09093  0.458900  0.0423700  0.0046300             0.001984
## 288    -2.003000 0.28270  0.398200  0.0662300 -0.0129000 -0.0341200          
## 512     0.274600 0.10200 -0.095720 -0.0493600  0.0058140  0.0053640  0.002946
## 272    -0.482700 0.12920  0.059930  0.0652600 -0.0088900                     
## 448     0.268600 0.09154 -0.020810 -0.0432300  0.0036010  0.0019500  0.001668
## 464    -0.263700 0.14060 -0.022360 -0.0410400  0.0074130                     
## 224    -0.380000 0.15220  0.003764 -0.0346100  0.0060250 -0.0026070          
## 416    -0.257800 0.13810  0.002659 -0.0379900  0.0061270 -0.0014520          
## 2560   -4.465000 0.45130  0.992500 -0.0675500  0.0514100 -0.0747300  0.003110
## 16     -2.164000 0.12780  0.445700  0.0638700  0.0054240                     
## 1208   -9.227000 0.80860  2.165000  0.4357000            -0.1599000 -0.034390
## 128    -2.394000 0.14280  0.533700  0.0298600  0.0092320 -0.0057130  0.003018
## 64     -2.464000 0.14850  0.617000  0.0440300  0.0046270 -0.0122700  0.001851
## 352    -2.076000 0.29110  0.388600  0.0659800 -0.0121900 -0.0340100          
## 336    -0.564500 0.13860  0.050830  0.0650000 -0.0081480                     
## 32     -3.329000 0.21830  0.707000  0.0642200  0.0053320 -0.0201600          
## 480    -0.329200 0.14640 -0.007571 -0.0383800  0.0068720 -0.0013060          
## 80     -2.224000 0.13270  0.445100  0.0637200  0.0059630                     
## 2400   -5.251000 0.52680  1.115000  0.0659400  0.0144200 -0.0878700          
## 2528   -4.139000 0.42720  0.864800 -0.0524000  0.0433100 -0.0655200          
## 96     -3.370000 0.22160  0.704700  0.0641000  0.0057620 -0.0200200          
## 168     0.141900 0.10750  0.023920 -0.0481900                        0.002671
## 184     0.452300 0.08714 -0.043860 -0.0510000             0.0043860  0.002709
## 40     -1.842000 0.09696  0.506900  0.0469200                        0.003104
## 136    -0.195300 0.16420 -0.062960 -0.0315900                                
## 56     -2.596000 0.15570  0.668400  0.0486100            -0.0125300  0.002968
## 152    -0.652100 0.19340  0.040200 -0.0277400            -0.0065570          
## 8      -2.635000 0.16270  0.498400  0.0855100                                
## 24     -4.159000 0.28100  0.841100  0.0854900            -0.0265300          
## 622    -1.230000 0.12720            0.0610000  0.0215500             0.003734
## 4718   -1.782000 0.16910            0.0895100  0.0257000             0.001509
## 558    -0.668900 0.08893            0.0788900  0.0138500             0.001957
## 526    -1.095000 0.12520            0.0974200  0.0141200                     
## 590    -1.354000 0.14470            0.0971300  0.0163000                     
## 14     -0.102700 0.11990            0.0526400  0.0074700                     
## 46      0.157900 0.10060            0.0412400  0.0070870             0.001036
## 110    -0.150800 0.12480            0.0283900  0.0115500             0.002095
## 78     -0.303700 0.13540            0.0522200  0.0091660                     
## 38      0.289200 0.11200            0.0484100                        0.002691
## 6      -0.431500 0.16880            0.0819200                                
## 12     -0.452700 0.03999  0.282400             0.0176600                     
## 76     -0.979900 0.08136  0.280200             0.0218200                     
## 268    -0.121300 0.03990  0.207300             0.0149500                     
## 28     -0.795300 0.06666  0.359500             0.0176500 -0.0059730          
## 332    -0.489300 0.08277  0.164500             0.0178100                     
## 92     -1.257000 0.10290  0.343200             0.0218000 -0.0048760          
## 284    -0.508900 0.07882  0.294400             0.0140100 -0.0087270          
## 348    -0.875200 0.12150  0.251200             0.0168800 -0.0086900          
## 2396   -4.488000 0.38980  1.078000             0.0471300 -0.0699800          
## 527    -1.233000          0.376300  0.0450000  0.0219900                     
## 655    -0.874200          0.298200  0.0279300  0.0218600                     
## 783    -1.131000          0.354200  0.0448300  0.0211400                     
## 911    -1.045000          0.335100  0.0170200  0.0252300                     
## 9103   -3.635000          0.938700  0.1387000  0.0459100                     
## 15     -0.556200          0.380000  0.0134700  0.0167200                     
## 143     0.306400          0.183700 -0.0276300  0.0167000                     
## 271     0.342900          0.175400  0.0139100  0.0092150                     
## 74      0.164400 0.08942                       0.0220500                     
## 10      0.739200 0.04512                       0.0175900                     
## 399     0.436000          0.154200 -0.0179500  0.0139600                     
## 11     -0.276800          0.325600             0.0187800                     
## 267     0.245100          0.207000             0.0145100                     
## 525     0.416500                    0.0390900  0.0233100                     
## 9       1.151000                               0.0188700                     
## 13      1.132000                    0.0064760  0.0178900                     
## 7      -0.795400          0.627900  0.0698500                                
## 135     0.396700          0.356300  0.0129900                                
## 5       2.098000                    0.0645400                                
## 4       1.040000 0.11010  0.245200                                           
## 20     -0.662800 0.24220  0.627900                       -0.0296300          
## 2       2.070000 0.11430                                                     
## 3       1.879000          0.371700                                           
## 1       3.522000                                                             
##          ASL:Typ   AWL:NoS    AWL:Typ    NoS:Typ ASL:AWL:NoS ASL:AWL:Typ
## 624   -5.933e-04                      -3.595e-04                        
## 752   -5.776e-04  0.011230            -3.418e-04                        
## 1792  -5.701e-04 -0.068730            -3.198e-04    0.006166            
## 880   -6.173e-04            1.711e-03 -3.460e-04                        
## 2048  -5.488e-04 -0.064300 -3.930e-03 -3.203e-04    0.007155            
## 4720  -8.582e-04                      -5.340e-04                        
## 640   -5.812e-04                      -3.594e-04                        
## 5888  -9.352e-04 -0.067470            -5.571e-04    0.006151            
## 4848  -9.334e-04  0.012060            -5.757e-04                        
## 768   -5.849e-04  0.011660            -3.412e-04                        
## 1008  -5.792e-04  0.010910  8.766e-05 -3.416e-04                        
## 4976  -9.457e-04            1.850e-03 -5.601e-04                        
## 896   -5.936e-04            1.995e-03 -3.437e-04                        
## 4096   2.150e-03 -0.089660  4.031e-03 -3.212e-04    0.009170  -0.0006108
## 6144  -9.543e-04 -0.062550 -4.232e-03 -5.850e-04    0.007214            
## 10240 -5.541e-04 -0.059230 -3.145e-03 -1.388e-04    0.007262            
## 4736  -8.411e-04                      -5.296e-04                        
## 1728             -0.071770            -2.953e-04    0.006379            
## 560                                   -3.333e-04                        
## 4864  -9.571e-04  0.012750            -5.831e-04                        
## 5104  -9.370e-04  0.011540  1.467e-04 -5.760e-04                        
## 688               0.012330            -3.147e-04                        
## 8192   1.970e-03 -0.090250  4.491e-03 -6.144e-04    0.009436  -0.0006718
## 9200  -5.734e-04  0.004685 -9.640e-04 -5.413e-04                        
## 1024  -5.822e-04  0.014140 -5.626e-04 -3.418e-04                        
## 4992  -9.153e-04            2.121e-03 -5.529e-04                        
## 1984             -0.066090 -4.908e-03 -2.971e-04    0.007604            
## 2944  -2.046e-03           -2.491e-03 -3.401e-04               0.0003295
## 12288  2.321e-03 -0.082500  5.937e-03 -3.851e-06    0.009492  -0.0006518
## 14336 -9.520e-04 -0.059370 -3.731e-03 -4.665e-04    0.007281            
## 576                                   -3.347e-04                        
## 816                         1.225e-03 -3.230e-04                        
## 944               0.016540 -1.189e-03 -3.185e-04                        
## 13296 -9.467e-04  0.002911 -1.313e-03 -8.654e-04                        
## 5120  -9.615e-04  0.016380 -8.186e-04 -5.894e-04                        
## 704               0.011400            -3.168e-04                        
## 3072  -2.345e-03  0.016290 -6.403e-03 -3.372e-04               0.0004005
## 32768 -9.109e-03 -0.296900 -2.989e-02 -7.408e-03    0.026140   0.0018890
## 16384  2.112e-03 -0.084490  6.012e-03 -3.526e-04    0.009689  -0.0007033
## 7040  -2.222e-03           -1.990e-03 -5.357e-04               0.0003013
## 9216  -5.766e-04  0.008231 -1.353e-03 -5.132e-04                        
## 832                         1.777e-03 -3.202e-04                        
## 10176            -0.067720 -5.159e-03 -3.558e-04    0.007568            
## 7168  -2.594e-03  0.018280 -6.278e-03 -5.722e-04               0.0003752
## 9136              0.002920 -3.438e-03 -7.515e-04                        
## 13312 -9.661e-04  0.008271 -1.922e-03 -8.349e-04                        
## 960               0.017480 -1.380e-03 -3.185e-04                        
## 11264 -2.353e-03  0.009811 -7.319e-03 -5.256e-04               0.0004037
## 656               0.016160            -2.818e-04                        
## 528                                   -3.027e-04                        
## 912               0.023650 -2.175e-03 -2.904e-04                        
## 15360 -2.613e-03  0.009751 -7.489e-03 -8.307e-04               0.0003786
## 672               0.013920            -2.879e-04                        
## 9152              0.002349 -3.373e-03 -7.558e-04                        
## 544                                   -3.081e-04                        
## 720   -9.240e-05  0.016410            -2.825e-04                        
## 800                         2.139e-03 -2.915e-04                        
## 784                         1.287e-03 -2.919e-04                        
## 592   -7.061e-05                      -3.035e-04                        
## 976   -8.102e-05  0.023610 -2.099e-03 -2.907e-04                        
## 9104              0.016280 -3.413e-03 -5.267e-04                        
## 928               0.022230 -1.890e-03 -2.905e-04                        
## 736   -8.382e-05  0.014230            -2.883e-04                        
## 608   -6.072e-05                      -3.087e-04                        
## 864   -8.003e-05            2.205e-03 -2.918e-04                        
## 848   -8.416e-05            1.359e-03 -2.922e-04                        
## 9168  -7.541e-05  0.017770 -3.085e-03 -4.778e-04                        
## 992   -8.084e-05  0.022260 -1.830e-03 -2.908e-04                        
## 9120              0.013300 -3.077e-03 -5.497e-04                        
## 2912  -6.655e-04            3.871e-04 -2.901e-04               0.0001336
## 3040  -1.147e-03  0.023640 -5.388e-03 -2.875e-04               0.0002432
## 9184  -7.451e-05  0.015080 -2.788e-03 -4.990e-04                        
## 11232 -1.157e-03  0.016110 -6.453e-03 -5.065e-04               0.0002470
## 1280  -3.859e-04 -0.089400                          0.008491            
## 1216             -0.090400                          0.008516            
## 1536  -3.652e-04 -0.085210 -3.742e-03               0.009436            
## 1472             -0.085390 -4.419e-03               0.009630            
## 3584   2.071e-03 -0.108200  3.443e-03               0.011260  -0.0005513
## 240   -3.649e-04  0.020920                                              
## 176               0.021140                                              
## 144               0.023000                                              
## 368   -4.447e-04            3.748e-03                                   
## 496   -3.915e-04  0.016190  1.302e-03                                   
## 256   -3.893e-04  0.022230                                              
## 192               0.021530                                              
## 432               0.019820  3.628e-04                                   
## 208   -7.259e-05  0.023200                                              
## 304                         3.291e-03                                   
## 400               0.024340 -3.809e-04                                   
## 160               0.022580                                              
## 384   -4.074e-04            4.196e-03                                   
## 320                         3.938e-03                                   
## 112   -3.743e-04                                                        
## 2432  -3.120e-03           -4.215e-03                          0.0006147
## 48                                                                      
## 288                         4.052e-03                                   
## 512   -3.937e-04  0.018620  8.129e-04                                   
## 272                         3.193e-03                                   
## 448               0.020720  1.831e-04                                   
## 464   -7.080e-05  0.024310 -3.132e-04                                   
## 224   -7.104e-05  0.022850                                              
## 416               0.023670 -2.460e-04                                   
## 2560  -3.517e-03  0.022310 -9.553e-03                          0.0007086
## 16                                                                      
## 1208             -0.087840                          0.008475            
## 128   -3.611e-04                                                        
## 64                                                                      
## 352   -6.982e-05            4.111e-03                                   
## 336   -7.397e-05            3.258e-03                                   
## 32                                                                      
## 480   -7.072e-05  0.023700 -1.919e-04                                   
## 80    -3.866e-05                                                        
## 2400  -2.016e-03           -1.968e-03                          0.0004440
## 2528  -2.549e-03  0.026880 -8.509e-03                          0.0005655
## 96    -3.076e-05                                                        
## 168               0.023020                                              
## 184               0.023560                                              
## 40                                                                      
## 136               0.026820                                              
## 56                                                                      
## 152               0.025940                                              
## 8                                                                       
## 24                                                                      
## 622   -5.636e-04                      -3.521e-04                        
## 4718  -8.695e-04                      -5.536e-04                        
## 558                                   -3.273e-04                        
## 526                                   -3.068e-04                        
## 590   -1.541e-04                      -3.084e-04                        
## 14                                                                      
## 46                                                                      
## 110   -3.493e-04                                                        
## 78    -1.221e-04                                                        
## 38                                                                      
## 6                                                                       
## 12                                                                      
## 76    -3.153e-04                                                        
## 268                         6.158e-04                                   
## 28                                                                      
## 332   -3.272e-04            9.485e-04                                   
## 92    -3.137e-04                                                        
## 284                         8.258e-04                                   
## 348   -3.271e-04            1.158e-03                                   
## 2396  -2.541e-03           -5.755e-03                          0.0005051
## 527                                   -2.268e-04                        
## 655               0.003747            -2.216e-04                        
## 783                         1.825e-04 -2.252e-04                        
## 911               0.006295 -7.441e-04 -2.245e-04                        
## 9103             -0.022230 -5.558e-03 -1.142e-03                        
## 15                                                                      
## 143               0.009417                                              
## 271                         1.690e-03                                   
## 74    -3.379e-04                                                        
## 10                                                                      
## 399               0.007236  6.191e-04                                   
## 11                                                                      
## 267                         9.711e-04                                   
## 525                                   -2.340e-04                        
## 9                                                                       
## 13                                                                      
## 7                                                                       
## 135               0.013010                                              
## 5                                                                       
## 4                                                                       
## 20                                                                      
## 2                                                                       
## 3                                                                       
## 1                                                                       
##       ASL:NoS:Typ AWL:NoS:Typ ASL:AWL:NoS:Typ df   logLik   AIC  delta weight
## 624                                            9 -157.854 333.7   0.00  0.106
## 752                                           10 -157.073 334.1   0.44  0.085
## 1792                                          12 -155.074 334.1   0.44  0.085
## 880                                           10 -157.419 334.8   1.13  0.060
## 2048                                          13 -154.691 335.4   1.67  0.046
## 4720    1.329e-05                             10 -157.721 335.4   1.73  0.045
## 640                                           10 -157.825 335.7   1.94  0.040
## 5888    1.819e-05                             13 -154.828 335.7   1.95  0.040
## 4848    1.791e-05                             11 -156.834 335.7   1.96  0.040
## 768                                           11 -157.061 336.1   2.41  0.032
## 1008                                          11 -157.072 336.1   2.44  0.031
## 4976    1.638e-05                             11 -157.218 336.4   2.73  0.027
## 896                                           11 -157.280 336.6   2.85  0.026
## 4096                                          14 -154.361 336.7   3.01  0.024
## 6144    2.029e-05                             14 -154.387 336.8   3.07  0.023
## 10240              -4.245e-05                 14 -154.673 337.3   3.64  0.017
## 4736    1.296e-05                             11 -157.699 337.4   3.69  0.017
## 1728                                          11 -157.723 337.4   3.74  0.016
## 560                                            8 -160.804 337.6   3.90  0.015
## 4864    1.855e-05                             12 -156.808 337.6   3.91  0.015
## 5104    1.796e-05                             12 -156.832 337.7   3.96  0.015
## 688                                            9 -159.873 337.7   4.04  0.014
## 8192    2.247e-05                             15 -153.990 338.0   4.27  0.013
## 9200                4.679e-05                 12 -157.049 338.1   4.39  0.012
## 1024                                          12 -157.053 338.1   4.40  0.012
## 4992    1.601e-05                             12 -157.089 338.2   4.47  0.011
## 1984                                          12 -157.128 338.3   4.55  0.011
## 2944                                          12 -157.128 338.3   4.55  0.011
## 12288              -7.425e-05                 15 -154.308 338.6   4.91  0.009
## 14336   2.001e-05  -2.685e-05                 15 -154.379 338.8   5.05  0.009
## 576                                            9 -160.541 339.1   5.37  0.007
## 816                                            9 -160.582 339.2   5.46  0.007
## 944                                           10 -159.771 339.5   5.83  0.006
## 13296   1.885e-05   6.507e-05                 13 -156.788 339.6   5.87  0.006
## 5120    1.897e-05                             13 -156.790 339.6   5.87  0.006
## 704                                           10 -159.822 339.6   5.94  0.005
## 3072                                          13 -156.833 339.7   5.96  0.005
## 32768   5.509e-04   1.567e-03      -0.0001221 17 -152.857 339.7   6.01  0.005
## 16384   2.194e-05  -5.965e-05                 16 -153.956 339.9   6.20  0.005
## 7040    1.494e-05                             13 -156.963 339.9   6.22  0.005
## 9216                4.019e-05                 13 -157.036 340.1   6.36  0.004
## 832                                           10 -160.114 340.2   6.52  0.004
## 10176               1.371e-05                 13 -157.126 340.3   6.54  0.004
## 7168    1.798e-05                             14 -156.597 341.2   7.49  0.003
## 9136                1.013e-04                 11 -159.663 341.3   7.62  0.002
## 13312   1.958e-05   5.566e-05                 14 -156.758 341.5   7.81  0.002
## 960                                           11 -159.769 341.5   7.83  0.002
## 11264               4.417e-05                 14 -156.813 341.6   7.92  0.002
## 656                                            8 -162.889 341.8   8.07  0.002
## 528                                            7 -164.508 343.0   9.31  0.001
## 912                                            9 -162.546 343.1   9.38  0.001
## 15360   1.861e-05   5.863e-05                 15 -156.562 343.1   9.42  0.001
## 672                                            9 -162.639 343.3   9.57  0.001
## 9152                1.024e-04                 12 -159.663 343.3   9.62  0.001
## 544                                            8 -163.709 343.4   9.71  0.001
## 720                                            9 -162.783 343.6   9.86  0.001
## 800                                            9 -163.096 344.2  10.48  0.001
## 784                                            8 -164.268 344.5  10.83  0.000
## 592                                            8 -164.446 344.9  11.19  0.000
## 976                                           10 -162.464 344.9  11.22  0.000
## 9104                5.536e-05                 10 -162.514 345.0  11.32  0.000
## 928                                           10 -162.542 345.1  11.38  0.000
## 736                                           10 -162.552 345.1  11.40  0.000
## 608                                            9 -163.663 345.3  11.62  0.000
## 864                                           10 -163.017 346.0  12.33  0.000
## 848                                            9 -164.181 346.4  12.65  0.000
## 9168                4.385e-05                 11 -162.444 346.9  13.18  0.000
## 992                                           11 -162.461 346.9  13.21  0.000
## 9120                6.073e-05                 11 -162.505 347.0  13.30  0.000
## 2912                                          11 -162.992 348.0  14.28  0.000
## 3040                                          12 -162.381 348.8  15.05  0.000
## 9184                4.879e-05                 12 -162.437 348.9  15.17  0.000
## 11232               5.131e-05                 13 -162.355 350.7  17.00  0.000
## 1280                                          11 -166.006 354.0  20.30  0.000
## 1216                                          10 -167.185 354.4  20.66  0.000
## 1536                                          12 -165.679 355.4  21.65  0.000
## 1472                                          11 -166.726 355.5  21.74  0.000
## 3584                                          13 -165.425 356.8  23.14  0.000
## 240                                            9 -169.745 357.5  23.78  0.000
## 176                                            8 -170.827 357.7  23.95  0.000
## 144                                            7 -171.945 357.9  24.18  0.000
## 368                                            9 -170.350 358.7  24.99  0.000
## 496                                           10 -169.633 359.3  25.56  0.000
## 256                                           10 -169.640 359.3  25.57  0.000
## 192                                            9 -170.817 359.6  25.93  0.000
## 432                                            9 -170.818 359.6  25.93  0.000
## 208                                            8 -171.882 359.8  26.06  0.000
## 304                                            8 -171.919 359.8  26.13  0.000
## 400                                            8 -171.935 359.9  26.16  0.000
## 160                                            8 -171.935 359.9  26.16  0.000
## 384                                           10 -169.993 360.0  26.28  0.000
## 320                                            9 -171.274 360.5  26.84  0.000
## 112                                            8 -172.387 360.8  27.07  0.000
## 2432                                          11 -169.492 361.0  27.28  0.000
## 48                                             7 -173.510 361.0  27.31  0.000
## 288                                            8 -172.532 361.1  27.36  0.000
## 512                                           11 -169.623 361.2  27.54  0.000
## 272                                            7 -173.672 361.3  27.64  0.000
## 448                                           10 -170.816 361.6  27.93  0.000
## 464                                            9 -171.875 361.8  28.04  0.000
## 224                                            9 -171.876 361.8  28.04  0.000
## 416                                            9 -171.934 361.9  28.16  0.000
## 2560                                          12 -168.971 361.9  28.23  0.000
## 16                                             6 -175.157 362.3  28.61  0.000
## 1208                                           9 -172.253 362.5  28.80  0.000
## 128                                            9 -172.355 362.7  29.00  0.000
## 64                                             8 -173.356 362.7  29.00  0.000
## 352                                            9 -172.475 362.9  29.24  0.000
## 336                                            8 -173.608 363.2  29.51  0.000
## 32                                             7 -174.725 363.5  29.74  0.000
## 480                                           10 -171.875 363.7  30.04  0.000
## 80                                             7 -175.139 364.3  30.57  0.000
## 2400                                          10 -172.213 364.4  30.72  0.000
## 2528                                          11 -171.460 364.9  31.21  0.000
## 96                                             8 -174.714 365.4  31.72  0.000
## 168                                            7 -175.774 365.5  31.84  0.000
## 184                                            8 -175.757 367.5  33.81  0.000
## 40                                             6 -178.884 369.8  36.06  0.000
## 136                                            6 -178.940 369.9  36.17  0.000
## 56                                             7 -178.728 371.5  37.75  0.000
## 152                                            7 -178.899 371.8  38.09  0.000
## 8                                              5 -183.193 376.4  42.68  0.000
## 24                                             6 -182.472 376.9  43.24  0.000
## 622                                            8 -186.379 388.8  55.05  0.000
## 4718    1.535e-05                              9 -186.226 390.5  56.74  0.000
## 558                                            7 -188.673 391.3  57.64  0.000
## 526                                            6 -190.129 392.3  58.55  0.000
## 590                                            7 -189.871 393.7  60.03  0.000
## 14                                             5 -199.709 409.4  75.71  0.000
## 46                                             6 -199.310 410.6  76.91  0.000
## 110                                            7 -198.457 410.9  77.21  0.000
## 78                                             6 -199.554 411.1  77.40  0.000
## 38                                             5 -210.757 431.5  97.81  0.000
## 6                                              4 -213.511 435.0 101.32  0.000
## 12                                             5 -223.174 456.3 122.64  0.000
## 76                                             6 -222.249 456.5 122.79  0.000
## 268                                            6 -223.131 458.3 124.55  0.000
## 28                                             6 -223.145 458.3 124.58  0.000
## 332                                            7 -222.147 458.3 124.59  0.000
## 92                                             7 -222.230 458.5 124.75  0.000
## 284                                            7 -223.073 460.1 126.44  0.000
## 348                                            8 -222.089 460.2 126.47  0.000
## 2396                                           9 -221.829 461.7 127.95  0.000
## 527                                            6 -224.912 461.8 128.12  0.000
## 655                                            7 -224.848 463.7 129.99  0.000
## 783                                            7 -224.909 463.8 130.11  0.000
## 911                                            8 -224.819 465.6 131.93  0.000
## 9103                2.149e-04                  9 -224.471 466.9 133.23  0.000
## 15                                             5 -229.371 468.7 135.03  0.000
## 143                                            6 -228.963 469.9 136.22  0.000
## 271                                            6 -229.058 470.1 136.41  0.000
## 74                                             5 -230.690 471.4 137.67  0.000
## 10                                             4 -231.707 471.4 137.71  0.000
## 399                                            7 -228.943 471.9 138.18  0.000
## 11                                             4 -233.317 474.6 140.93  0.000
## 267                                            5 -233.214 476.4 142.72  0.000
## 525                                            5 -238.877 487.8 154.05  0.000
## 9                                              3 -244.215 494.4 160.72  0.000
## 13                                             4 -243.291 494.6 160.87  0.000
## 7                                              4 -331.686 671.4 337.66  0.000
## 135                                            5 -331.231 672.5 338.75  0.000
## 5                                              3 -354.349 714.7 380.99  0.000
## 4                                              4 -397.645 803.3 469.58  0.000
## 20                                             5 -397.356 804.7 471.00  0.000
## 2                                              3 -400.251 806.5 472.79  0.000
## 3                                              3 -430.077 866.2 532.45  0.000
## 1                                              2 -435.188 874.4 540.67  0.000
## Models ranked by AIC(x)

10.4.1 best model

Fixed term is "(Intercept)"
Global model call: glm(formula = Score ~ Type * NoS * ASL * AWL, family = gaussian, 
    data = NNS.Index.df2)
---
Model selection table 
           (Int)     ASL       AWL        NoS        Typ    ASL:AWL   ASL:NoS    ASL:Typ   AWL:NoS    AWL:Typ    NoS:Typ
624    -3.182000 0.11930  0.464700  0.0619000  0.0196000             0.004805 -5.933e-04                      -3.595e-04
model.glm.2.best <- glm(Score ~ Type + NoS + ASL + AWL + ASL:NoS + ASL:Type + NoS:Type, data = NNS.Index.df2, family = gaussian)

summary(model.glm.2.best)
## 
## Call:
## glm(formula = Score ~ Type + NoS + ASL + AWL + ASL:NoS + ASL:Type + 
##     NoS:Type, family = gaussian, data = NNS.Index.df2)
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -3.1819233  0.4886938  -6.511 2.43e-10 ***
## Type         0.0195963  0.0038642   5.071 6.26e-07 ***
## NoS          0.0619002  0.0168319   3.678  0.00027 ***
## ASL          0.1192692  0.0281447   4.238 2.85e-05 ***
## AWL          0.4647475  0.0598652   7.763 8.14e-14 ***
## NoS:ASL      0.0048050  0.0013258   3.624  0.00033 ***
## Type:ASL    -0.0005933  0.0002459  -2.413  0.01633 *  
## Type:NoS    -0.0003594  0.0000661  -5.438 9.79e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for gaussian family taken to be 0.1375812)
## 
##     Null deviance: 220.559  on 378  degrees of freedom
## Residual deviance:  51.043  on 371  degrees of freedom
## AIC: 333.71
## 
## Number of Fisher Scoring iterations: 2
plot(allEffects(model.glm.2.best))

10.5 モデルの妥当性チェック

check_model(model.glm.2.best)

check_normality(model.glm.2.best)
## There's no formal statistical test for normality for generalized linear
##   model.
##   Please use `plot()` on the return value of this function:
##   `plot(check_normality(model))`
plot(check_normality(model.glm.2.best))
## There's no formal statistical test for normality for generalized linear
##   model.
##   Please use `plot()` on the return value of this function:
##   `plot(check_normality(model))`
## For confidence bands, please install `qqplotr`.

10.6 「重回帰分析」lm()で交互作用を入れてみる

  • 分布は、正規分布を前提 family=gaussian

  • ランダム効果は入れない

  • glm(family=gaussian) = lm()

model.lm.2.best <- lm(Score ~ Type + NoS + ASL + AWL + ASL:NoS + ASL:Type + NoS:Type, data = NNS.Index.df2)

summary(model.lm.2.best)
## 
## Call:
## lm(formula = Score ~ Type + NoS + ASL + AWL + ASL:NoS + ASL:Type + 
##     NoS:Type, data = NNS.Index.df2)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -1.22198 -0.24595 -0.00821  0.27040  1.29764 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept) -3.1819233  0.4886938  -6.511 2.43e-10 ***
## Type         0.0195963  0.0038642   5.071 6.26e-07 ***
## NoS          0.0619002  0.0168319   3.678  0.00027 ***
## ASL          0.1192692  0.0281447   4.238 2.85e-05 ***
## AWL          0.4647475  0.0598652   7.763 8.14e-14 ***
## NoS:ASL      0.0048050  0.0013258   3.624  0.00033 ***
## Type:ASL    -0.0005933  0.0002459  -2.413  0.01633 *  
## Type:NoS    -0.0003594  0.0000661  -5.438 9.79e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.3709 on 371 degrees of freedom
## Multiple R-squared:  0.7686, Adjusted R-squared:  0.7642 
## F-statistic:   176 on 7 and 371 DF,  p-value: < 2.2e-16
plot(allEffects(model.lm.2.best))

10.7 モデルの妥当性チェック

check_model(model.lm.2.best)

check_normality(model.lm.2.best)
## OK: residuals appear as normally distributed (p = 0.296).
plot(check_normality(model.lm.2.best))