Free Statistics

of Irreproducible Research!

Author's title

Author*Unverified author*
R Software Modulerwasp_regression_trees1.wasp
Title produced by softwareRecursive Partitioning (Regression Trees)
Date of computationWed, 11 Dec 2013 11:19:14 -0500
Cite this page as followsStatistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?v=date/2013/Dec/11/t1386778903e6qlr55bh14gsve.htm/, Retrieved Fri, 29 Mar 2024 12:08:10 +0000
Statistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?pk=232087, Retrieved Fri, 29 Mar 2024 12:08:10 +0000
QR Codes:

Original text written by user:
IsPrivate?No (this computation is public)
User-defined keywords
Estimated Impact115
Family? (F = Feedback message, R = changed R code, M = changed R Module, P = changed Parameters, D = changed Data)
-     [Recursive Partitioning (Regression Trees)] [] [2010-12-05 18:59:57] [b98453cac15ba1066b407e146608df68]
- R PD  [Recursive Partitioning (Regression Trees)] [WS 10 - Recursive...] [2013-12-10 09:48:52] [e1b6e5c15a370139a1f66dc7648af660]
- R P       [Recursive Partitioning (Regression Trees)] [Ws 10 - Recursive...] [2013-12-11 16:19:14] [d41d8cd98f00b204e9800998ecf8427e] [Current]
Feedback Forum

Post a new message
Dataseries X:
1 119.992 157.302 74.997 0.00784 0.00007 0.0037 0.00554 0.04374 0.426 0.02971
1 122.4 148.65 113.819 0.00968 0.00008 0.00465 0.00696 0.06134 0.626 0.04368
1 116.682 131.111 111.555 0.0105 0.00009 0.00544 0.00781 0.05233 0.482 0.0359
1 116.676 137.871 111.366 0.00997 0.00009 0.00502 0.00698 0.05492 0.517 0.03772
1 116.014 141.781 110.655 0.01284 0.00011 0.00655 0.00908 0.06425 0.584 0.04465
1 120.552 131.162 113.787 0.00968 0.00008 0.00463 0.0075 0.04701 0.456 0.03243
1 120.267 137.244 114.82 0.00333 0.00003 0.00155 0.00202 0.01608 0.14 0.01351
1 107.332 113.84 104.315 0.0029 0.00003 0.00144 0.00182 0.01567 0.134 0.01256
1 95.73 132.068 91.754 0.00551 0.00006 0.00293 0.00332 0.02093 0.191 0.01717
1 95.056 120.103 91.226 0.00532 0.00006 0.00268 0.00332 0.02838 0.255 0.02444
1 88.333 112.24 84.072 0.00505 0.00006 0.00254 0.0033 0.02143 0.197 0.01892
1 91.904 115.871 86.292 0.0054 0.00006 0.00281 0.00336 0.02752 0.249 0.02214
1 136.926 159.866 131.276 0.00293 0.00002 0.00118 0.00153 0.01259 0.112 0.0114
1 139.173 179.139 76.556 0.0039 0.00003 0.00165 0.00208 0.01642 0.154 0.01797
1 152.845 163.305 75.836 0.00294 0.00002 0.00121 0.00149 0.01828 0.158 0.01246
1 142.167 217.455 83.159 0.00369 0.00003 0.00157 0.00203 0.01503 0.126 0.01359
1 144.188 349.259 82.764 0.00544 0.00004 0.00211 0.00292 0.02047 0.192 0.02074
1 168.778 232.181 75.603 0.00718 0.00004 0.00284 0.00387 0.03327 0.348 0.0343
1 153.046 175.829 68.623 0.00742 0.00005 0.00364 0.00432 0.05517 0.542 0.05767
1 156.405 189.398 142.822 0.00768 0.00005 0.00372 0.00399 0.03995 0.348 0.0431
1 153.848 165.738 65.782 0.0084 0.00005 0.00428 0.0045 0.0381 0.328 0.04055
1 153.88 172.86 78.128 0.0048 0.00003 0.00232 0.00267 0.04137 0.37 0.04525
1 167.93 193.221 79.068 0.00442 0.00003 0.0022 0.00247 0.04351 0.377 0.04246
1 173.917 192.735 86.18 0.00476 0.00003 0.00221 0.00258 0.04192 0.364 0.03772
1 163.656 200.841 76.779 0.00742 0.00005 0.0038 0.0039 0.01659 0.164 0.01497
1 104.4 206.002 77.968 0.00633 0.00006 0.00316 0.00375 0.03767 0.381 0.0378
1 171.041 208.313 75.501 0.00455 0.00003 0.0025 0.00234 0.01966 0.186 0.01872
1 146.845 208.701 81.737 0.00496 0.00003 0.0025 0.00275 0.01919 0.198 0.01826
1 155.358 227.383 80.055 0.0031 0.00002 0.00159 0.00176 0.01718 0.161 0.01661
1 162.568 198.346 77.63 0.00502 0.00003 0.0028 0.00253 0.01791 0.168 0.01799
0 197.076 206.896 192.055 0.00289 0.00001 0.00166 0.00168 0.01098 0.097 0.00802
0 199.228 209.512 192.091 0.00241 0.00001 0.00134 0.00138 0.01015 0.089 0.00762
0 198.383 215.203 193.104 0.00212 0.00001 0.00113 0.00135 0.01263 0.111 0.00951
0 202.266 211.604 197.079 0.0018 0.000009 0.00093 0.00107 0.00954 0.085 0.00719
0 203.184 211.526 196.16 0.00178 0.000009 0.00094 0.00106 0.00958 0.085 0.00726
0 201.464 210.565 195.708 0.00198 0.00001 0.00105 0.00115 0.01194 0.107 0.00957
1 177.876 192.921 168.013 0.00411 0.00002 0.00233 0.00241 0.02126 0.189 0.01612
1 176.17 185.604 163.564 0.00369 0.00002 0.00205 0.00218 0.01851 0.168 0.01491
1 180.198 201.249 175.456 0.00284 0.00002 0.00153 0.00166 0.01444 0.131 0.0119
1 187.733 202.324 173.015 0.00316 0.00002 0.00168 0.00182 0.01663 0.151 0.01366
1 186.163 197.724 177.584 0.00298 0.00002 0.00165 0.00175 0.01495 0.135 0.01233
1 184.055 196.537 166.977 0.00258 0.00001 0.00134 0.00147 0.01463 0.132 0.01234
0 237.226 247.326 225.227 0.00298 0.00001 0.00169 0.00182 0.01752 0.164 0.01133
0 241.404 248.834 232.483 0.00281 0.00001 0.00157 0.00173 0.0176 0.154 0.01251
0 243.439 250.912 232.435 0.0021 0.000009 0.00109 0.00137 0.01419 0.126 0.01033
0 242.852 255.034 227.911 0.00225 0.000009 0.00117 0.00139 0.01494 0.134 0.01014
0 245.51 262.09 231.848 0.00235 0.00001 0.00127 0.00148 0.01608 0.141 0.01149
0 252.455 261.487 182.786 0.00185 0.000007 0.00092 0.00113 0.01152 0.103 0.0086
0 122.188 128.611 115.765 0.00524 0.00004 0.00169 0.00203 0.01613 0.143 0.01433
0 122.964 130.049 114.676 0.00428 0.00003 0.00124 0.00155 0.01681 0.154 0.014
0 124.445 135.069 117.495 0.00431 0.00003 0.00141 0.00167 0.02184 0.197 0.01685
0 126.344 134.231 112.773 0.00448 0.00004 0.00131 0.00169 0.02033 0.185 0.01614
0 128.001 138.052 122.08 0.00436 0.00003 0.00137 0.00166 0.02297 0.21 0.01677
0 129.336 139.867 118.604 0.0049 0.00004 0.00165 0.00183 0.02498 0.228 0.01947
1 108.807 134.656 102.874 0.00761 0.00007 0.00349 0.00486 0.02719 0.255 0.02067
1 109.86 126.358 104.437 0.00874 0.00008 0.00398 0.00539 0.03209 0.307 0.02454
1 110.417 131.067 103.37 0.00784 0.00007 0.00352 0.00514 0.03715 0.334 0.02802
1 117.274 129.916 110.402 0.00752 0.00006 0.00299 0.00469 0.02293 0.221 0.01948
1 116.879 131.897 108.153 0.00788 0.00007 0.00334 0.00493 0.02645 0.265 0.02137
1 114.847 271.314 104.68 0.00867 0.00008 0.00373 0.0052 0.03225 0.35 0.02519
0 209.144 237.494 109.379 0.00282 0.00001 0.00147 0.00152 0.01861 0.17 0.01382
0 223.365 238.987 98.664 0.00264 0.00001 0.00154 0.00151 0.01906 0.165 0.0134
0 222.236 231.345 205.495 0.00266 0.00001 0.00152 0.00144 0.01643 0.145 0.012
0 228.832 234.619 223.634 0.00296 0.00001 0.00175 0.00155 0.01644 0.145 0.01179
0 229.401 252.221 221.156 0.00205 0.000009 0.00114 0.00113 0.01457 0.129 0.01016
0 228.969 239.541 113.201 0.00238 0.00001 0.00136 0.0014 0.01745 0.154 0.01234
1 140.341 159.774 67.021 0.00817 0.00006 0.0043 0.0044 0.03198 0.313 0.02428
1 136.969 166.607 66.004 0.00923 0.00007 0.00507 0.00463 0.03111 0.308 0.02603
1 143.533 162.215 65.809 0.01101 0.00008 0.00647 0.00467 0.05384 0.478 0.03392
1 148.09 162.824 67.343 0.00762 0.00005 0.00467 0.00354 0.05428 0.497 0.03635
1 142.729 162.408 65.476 0.00831 0.00006 0.00469 0.00419 0.03485 0.365 0.02949
1 136.358 176.595 65.75 0.00971 0.00007 0.00534 0.00478 0.04978 0.483 0.03736
1 120.08 139.71 111.208 0.00405 0.00003 0.0018 0.0022 0.01706 0.152 0.01345
1 112.014 588.518 107.024 0.00533 0.00005 0.00268 0.00329 0.02448 0.226 0.01956
1 110.793 128.101 107.316 0.00494 0.00004 0.0026 0.00283 0.02442 0.216 0.01831
1 110.707 122.611 105.007 0.00516 0.00005 0.00277 0.00289 0.02215 0.206 0.01715
1 112.876 148.826 106.981 0.005 0.00004 0.0027 0.00289 0.03999 0.35 0.02704
1 110.568 125.394 106.821 0.00462 0.00004 0.00226 0.0028 0.02199 0.197 0.01636
1 95.385 102.145 90.264 0.00608 0.00006 0.00331 0.00332 0.03202 0.263 0.02455
1 100.77 115.697 85.545 0.01038 0.0001 0.00622 0.00576 0.03121 0.361 0.02139
1 96.106 108.664 84.51 0.00694 0.00007 0.00389 0.00415 0.04024 0.364 0.02876
1 95.605 107.715 87.549 0.00702 0.00007 0.00428 0.00371 0.03156 0.296 0.0219
1 100.96 110.019 95.628 0.00606 0.00006 0.00351 0.00348 0.02427 0.216 0.01751
1 98.804 102.305 87.804 0.00432 0.00004 0.00247 0.00258 0.02223 0.202 0.01552
1 176.858 205.56 75.344 0.00747 0.00004 0.00418 0.0042 0.04795 0.435 0.0351
1 180.978 200.125 155.495 0.00406 0.00002 0.0022 0.00244 0.03852 0.331 0.02877
1 178.222 202.45 141.047 0.00321 0.00002 0.00163 0.00194 0.03759 0.327 0.02784
1 176.281 227.381 125.61 0.0052 0.00003 0.00287 0.00312 0.06511 0.58 0.04683
1 173.898 211.35 74.677 0.00448 0.00003 0.00237 0.00254 0.06727 0.65 0.04802
1 179.711 225.93 144.878 0.00709 0.00004 0.00391 0.00419 0.04313 0.442 0.03455
1 166.605 206.008 78.032 0.00742 0.00004 0.00387 0.00453 0.0664 0.634 0.05114
1 151.955 163.335 147.226 0.00419 0.00003 0.00224 0.00227 0.07959 0.772 0.0569
1 148.272 164.989 142.299 0.00459 0.00003 0.0025 0.00256 0.0419 0.383 0.03051
1 152.125 161.469 76.596 0.00382 0.00003 0.00191 0.00226 0.05925 0.637 0.04398
1 157.821 172.975 68.401 0.00358 0.00002 0.00196 0.00196 0.03716 0.307 0.02764
1 157.447 163.267 149.605 0.00369 0.00002 0.00201 0.00197 0.03272 0.283 0.02571
1 159.116 168.913 144.811 0.00342 0.00002 0.00178 0.00184 0.03381 0.307 0.02809
1 125.036 143.946 116.187 0.0128 0.0001 0.00743 0.00623 0.03886 0.342 0.03088
1 125.791 140.557 96.206 0.01378 0.00011 0.00826 0.00655 0.04689 0.422 0.03908
1 126.512 141.756 99.77 0.01936 0.00015 0.01159 0.0099 0.06734 0.659 0.05783
1 125.641 141.068 116.346 0.03316 0.00026 0.02144 0.01522 0.09178 0.891 0.06196
1 128.451 150.449 75.632 0.01551 0.00012 0.00905 0.00909 0.0617 0.584 0.05174
1 139.224 586.567 66.157 0.03011 0.00022 0.01854 0.01628 0.09419 0.93 0.06023
1 150.258 154.609 75.349 0.00248 0.00002 0.00105 0.00136 0.01131 0.107 0.01009
1 154.003 160.267 128.621 0.00183 0.00001 0.00076 0.001 0.0103 0.094 0.00871
1 149.689 160.368 133.608 0.00257 0.00002 0.00116 0.00134 0.01346 0.126 0.01059
1 155.078 163.736 144.148 0.00168 0.00001 0.00068 0.00092 0.01064 0.097 0.00928
1 151.884 157.765 133.751 0.00258 0.00002 0.00115 0.00122 0.0145 0.137 0.01267
1 151.989 157.339 132.857 0.00174 0.00001 0.00075 0.00096 0.01024 0.093 0.00993
1 193.03 208.9 80.297 0.00766 0.00004 0.0045 0.00389 0.03044 0.275 0.02084
1 200.714 223.982 89.686 0.00621 0.00003 0.00371 0.00337 0.02286 0.207 0.01852
1 208.519 220.315 199.02 0.00609 0.00003 0.00368 0.00339 0.01761 0.155 0.01307
1 204.664 221.3 189.621 0.00841 0.00004 0.00502 0.00485 0.02378 0.21 0.01767
1 210.141 232.706 185.258 0.00534 0.00003 0.00321 0.0028 0.0168 0.149 0.01301
1 206.327 226.355 92.02 0.00495 0.00002 0.00302 0.00246 0.02105 0.209 0.01604
1 151.872 492.892 69.085 0.00856 0.00006 0.00404 0.00385 0.01843 0.235 0.01271
1 158.219 442.557 71.948 0.00476 0.00003 0.00214 0.00207 0.01458 0.148 0.01312
1 170.756 450.247 79.032 0.00555 0.00003 0.00244 0.00261 0.01725 0.175 0.01652
1 178.285 442.824 82.063 0.00462 0.00003 0.00157 0.00194 0.01279 0.129 0.01151
1 217.116 233.481 93.978 0.00404 0.00002 0.00127 0.00128 0.01299 0.124 0.01075
1 128.94 479.697 88.251 0.00581 0.00005 0.00241 0.00314 0.02008 0.221 0.01734
1 176.824 215.293 83.961 0.0046 0.00003 0.00209 0.00221 0.01169 0.117 0.01104
1 138.19 203.522 83.34 0.00704 0.00005 0.00406 0.00398 0.04479 0.441 0.0322
1 182.018 197.173 79.187 0.00842 0.00005 0.00506 0.00449 0.02503 0.231 0.01931
1 156.239 195.107 79.82 0.00694 0.00004 0.00403 0.00395 0.02343 0.224 0.0172
1 145.174 198.109 80.637 0.00733 0.00005 0.00414 0.00422 0.02362 0.233 0.01944
1 138.145 197.238 81.114 0.00544 0.00004 0.00294 0.00327 0.02791 0.246 0.02259
1 166.888 198.966 79.512 0.00638 0.00004 0.00368 0.00351 0.02857 0.257 0.02301
1 119.031 127.533 109.216 0.0044 0.00004 0.00214 0.00192 0.01033 0.098 0.00811
1 120.078 126.632 105.667 0.0027 0.00002 0.00116 0.00135 0.01022 0.09 0.00903
1 120.289 128.143 100.209 0.00492 0.00004 0.00269 0.00238 0.01412 0.125 0.01194
1 120.256 125.306 104.773 0.00407 0.00003 0.00224 0.00205 0.01516 0.138 0.0131
1 119.056 125.213 86.795 0.00346 0.00003 0.00169 0.0017 0.01201 0.106 0.00915
1 118.747 123.723 109.836 0.00331 0.00003 0.00168 0.00171 0.01043 0.099 0.00903
1 106.516 112.777 93.105 0.00589 0.00006 0.00291 0.00319 0.04932 0.441 0.03651
1 110.453 127.611 105.554 0.00494 0.00004 0.00244 0.00315 0.04128 0.379 0.03316
1 113.4 133.344 107.816 0.00451 0.00004 0.00219 0.00283 0.04879 0.431 0.0437
1 113.166 130.27 100.673 0.00502 0.00004 0.00257 0.00312 0.05279 0.476 0.04134
1 112.239 126.609 104.095 0.00472 0.00004 0.00238 0.0029 0.05643 0.517 0.04451
1 116.15 131.731 109.815 0.00381 0.00003 0.00181 0.00232 0.03026 0.267 0.0277
1 170.368 268.796 79.543 0.00571 0.00003 0.00232 0.00269 0.03273 0.281 0.02824
1 208.083 253.792 91.802 0.00757 0.00004 0.00428 0.00428 0.06725 0.571 0.04464
1 198.458 219.29 148.691 0.00376 0.00002 0.00182 0.00215 0.03527 0.297 0.0253
1 202.805 231.508 86.232 0.0037 0.00002 0.00189 0.00211 0.01997 0.18 0.01506
1 202.544 241.35 164.168 0.00254 0.00001 0.001 0.00133 0.02662 0.228 0.02006
1 223.361 263.872 87.638 0.00352 0.00002 0.00169 0.00188 0.02536 0.225 0.01909
1 169.774 191.759 151.451 0.01568 0.00009 0.00863 0.00946 0.08143 0.821 0.08808
1 183.52 216.814 161.34 0.01466 0.00008 0.00849 0.00819 0.0605 0.618 0.06359
1 188.62 216.302 165.982 0.01719 0.00009 0.00996 0.01027 0.07118 0.722 0.06824
1 202.632 565.74 177.258 0.01627 0.00008 0.00919 0.00963 0.0717 0.833 0.0646
1 186.695 211.961 149.442 0.01872 0.0001 0.01075 0.01154 0.0583 0.784 0.06259
1 192.818 224.429 168.793 0.03107 0.00016 0.018 0.01958 0.11908 1.302 0.13778
1 198.116 233.099 174.478 0.02714 0.00014 0.01568 0.01699 0.08684 1.018 0.08318
1 121.345 139.644 98.25 0.00684 0.00006 0.00388 0.00332 0.02534 0.241 0.02056
1 119.1 128.442 88.833 0.00692 0.00006 0.00393 0.003 0.02682 0.236 0.02018
1 117.87 127.349 95.654 0.00647 0.00005 0.00356 0.003 0.03087 0.276 0.02402
1 122.336 142.369 94.794 0.00727 0.00006 0.00415 0.00339 0.02293 0.223 0.01771
1 117.963 134.209 100.757 0.01813 0.00015 0.01117 0.00718 0.04912 0.438 0.02916
1 126.144 154.284 97.543 0.00975 0.00008 0.00593 0.00454 0.02852 0.266 0.02157
1 127.93 138.752 112.173 0.00605 0.00005 0.00321 0.00318 0.03235 0.339 0.03105
1 114.238 124.393 77.022 0.00581 0.00005 0.00299 0.00316 0.04009 0.406 0.04114
1 115.322 135.738 107.802 0.00619 0.00005 0.00352 0.00329 0.03273 0.325 0.02931
1 114.554 126.778 91.121 0.00651 0.00006 0.00366 0.0034 0.03658 0.369 0.03091
1 112.15 131.669 97.527 0.00519 0.00005 0.00291 0.00284 0.01756 0.155 0.01363
1 102.273 142.83 85.902 0.00907 0.00009 0.00493 0.00461 0.02814 0.272 0.02073
0 236.2 244.663 102.137 0.00277 0.00001 0.00154 0.00153 0.02448 0.217 0.01621
0 237.323 243.709 229.256 0.00303 0.00001 0.00173 0.00159 0.01242 0.116 0.00882
0 260.105 264.919 237.303 0.00339 0.00001 0.00205 0.00186 0.0203 0.197 0.01367
0 197.569 217.627 90.794 0.00803 0.00004 0.0049 0.00448 0.02177 0.189 0.01439
0 240.301 245.135 219.783 0.00517 0.00002 0.00316 0.00283 0.02018 0.212 0.01344
0 244.99 272.21 239.17 0.00451 0.00002 0.00279 0.00237 0.01897 0.181 0.01255
0 112.547 133.374 105.715 0.00355 0.00003 0.00166 0.0019 0.01358 0.129 0.0114
0 110.739 113.597 100.139 0.00356 0.00003 0.0017 0.002 0.01484 0.133 0.01285
0 113.715 116.443 96.913 0.00349 0.00003 0.00171 0.00203 0.01472 0.133 0.01148
0 117.004 144.466 99.923 0.00353 0.00003 0.00176 0.00218 0.01657 0.145 0.01318
0 115.38 123.109 108.634 0.00332 0.00003 0.0016 0.00199 0.01503 0.137 0.01133
0 116.388 129.038 108.97 0.00346 0.00003 0.00169 0.00213 0.01725 0.155 0.01331
1 151.737 190.204 129.859 0.00314 0.00002 0.00135 0.00162 0.01469 0.132 0.0123
1 148.79 158.359 138.99 0.00309 0.00002 0.00152 0.00186 0.01574 0.142 0.01309
1 148.143 155.982 135.041 0.00392 0.00003 0.00204 0.00231 0.0145 0.131 0.01263
1 150.44 163.441 144.736 0.00396 0.00003 0.00206 0.00233 0.02551 0.237 0.02148
1 148.462 161.078 141.998 0.00397 0.00003 0.00202 0.00235 0.01831 0.163 0.01559
1 149.818 163.417 144.786 0.00336 0.00002 0.00174 0.00198 0.02145 0.198 0.01666
0 117.226 123.925 106.656 0.00417 0.00004 0.00186 0.0027 0.01909 0.171 0.01949
0 116.848 217.552 99.503 0.00531 0.00005 0.0026 0.00346 0.01795 0.163 0.01756
0 116.286 177.291 96.983 0.00314 0.00003 0.00134 0.00192 0.01564 0.136 0.01691
0 116.556 592.03 86.228 0.00496 0.00004 0.00254 0.00263 0.0166 0.154 0.01491
0 116.342 581.289 94.246 0.00267 0.00002 0.00115 0.00148 0.013 0.117 0.01144
0 114.563 119.167 86.647 0.00327 0.00003 0.00146 0.00184 0.01185 0.106 0.01095
0 201.774 262.707 78.228 0.00694 0.00003 0.00412 0.00396 0.02574 0.255 0.01758
0 174.188 230.978 94.261 0.00459 0.00003 0.00263 0.00259 0.04087 0.405 0.02745
0 209.516 253.017 89.488 0.00564 0.00003 0.00331 0.00292 0.02751 0.263 0.01879
0 174.688 240.005 74.287 0.0136 0.00008 0.00624 0.00564 0.02308 0.256 0.01667
0 198.764 396.961 74.904 0.0074 0.00004 0.0037 0.0039 0.02296 0.241 0.01588
0 214.289 260.277 77.973 0.00567 0.00003 0.00295 0.00317 0.01884 0.19 0.01373




Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time19 seconds
R Server'Sir Maurice George Kendall' @ kendall.wessa.net

\begin{tabular}{lllllllll}
\hline
Summary of computational transaction \tabularnewline
Raw Input & view raw input (R code)  \tabularnewline
Raw Output & view raw output of R engine  \tabularnewline
Computing time & 19 seconds \tabularnewline
R Server & 'Sir Maurice George Kendall' @ kendall.wessa.net \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=232087&T=0

[TABLE]
[ROW][C]Summary of computational transaction[/C][/ROW]
[ROW][C]Raw Input[/C][C]view raw input (R code) [/C][/ROW]
[ROW][C]Raw Output[/C][C]view raw output of R engine [/C][/ROW]
[ROW][C]Computing time[/C][C]19 seconds[/C][/ROW]
[ROW][C]R Server[/C][C]'Sir Maurice George Kendall' @ kendall.wessa.net[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=232087&T=0

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=232087&T=0

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time19 seconds
R Server'Sir Maurice George Kendall' @ kendall.wessa.net







10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C12032200.479923340.4035
C23712730.971851550.9688
Overall--0.8517--0.8203

\begin{tabular}{lllllllll}
\hline
10-Fold Cross Validation \tabularnewline
 & Prediction (training) & Prediction (testing) \tabularnewline
Actual & C1 & C2 & CV & C1 & C2 & CV \tabularnewline
C1 & 203 & 220 & 0.4799 & 23 & 34 & 0.4035 \tabularnewline
C2 & 37 & 1273 & 0.9718 & 5 & 155 & 0.9688 \tabularnewline
Overall & - & - & 0.8517 & - & - & 0.8203 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=232087&T=1

[TABLE]
[ROW][C]10-Fold Cross Validation[/C][/ROW]
[ROW][C][/C][C]Prediction (training)[/C][C]Prediction (testing)[/C][/ROW]
[ROW][C]Actual[/C][C]C1[/C][C]C2[/C][C]CV[/C][C]C1[/C][C]C2[/C][C]CV[/C][/ROW]
[ROW][C]C1[/C][C]203[/C][C]220[/C][C]0.4799[/C][C]23[/C][C]34[/C][C]0.4035[/C][/ROW]
[ROW][C]C2[/C][C]37[/C][C]1273[/C][C]0.9718[/C][C]5[/C][C]155[/C][C]0.9688[/C][/ROW]
[ROW][C]Overall[/C][C]-[/C][C]-[/C][C]0.8517[/C][C]-[/C][C]-[/C][C]0.8203[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=232087&T=1

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=232087&T=1

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C12032200.479923340.4035
C23712730.971851550.9688
Overall--0.8517--0.8203







Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C13612
C26141

\begin{tabular}{lllllllll}
\hline
Confusion Matrix (predicted in columns / actuals in rows) \tabularnewline
 & C1 & C2 \tabularnewline
C1 & 36 & 12 \tabularnewline
C2 & 6 & 141 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=232087&T=2

[TABLE]
[ROW][C]Confusion Matrix (predicted in columns / actuals in rows)[/C][/ROW]
[ROW][C][/C][C]C1[/C][C]C2[/C][/ROW]
[ROW][C]C1[/C][C]36[/C][C]12[/C][/ROW]
[ROW][C]C2[/C][C]6[/C][C]141[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=232087&T=2

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=232087&T=2

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C13612
C26141



Parameters (Session):
par1 = 1 ; par2 = equal ; par3 = 2 ; par4 = yes ;
Parameters (R input):
par1 = 1 ; par2 = equal ; par3 = 2 ; par4 = yes ;
R code (references can be found in the software module):
library(party)
library(Hmisc)
par1 <- as.numeric(par1)
par3 <- as.numeric(par3)
x <- data.frame(t(y))
is.data.frame(x)
x <- x[!is.na(x[,par1]),]
k <- length(x[1,])
n <- length(x[,1])
colnames(x)[par1]
x[,par1]
if (par2 == 'kmeans') {
cl <- kmeans(x[,par1], par3)
print(cl)
clm <- matrix(cbind(cl$centers,1:par3),ncol=2)
clm <- clm[sort.list(clm[,1]),]
for (i in 1:par3) {
cl$cluster[cl$cluster==clm[i,2]] <- paste('C',i,sep='')
}
cl$cluster <- as.factor(cl$cluster)
print(cl$cluster)
x[,par1] <- cl$cluster
}
if (par2 == 'quantiles') {
x[,par1] <- cut2(x[,par1],g=par3)
}
if (par2 == 'hclust') {
hc <- hclust(dist(x[,par1])^2, 'cen')
print(hc)
memb <- cutree(hc, k = par3)
dum <- c(mean(x[memb==1,par1]))
for (i in 2:par3) {
dum <- c(dum, mean(x[memb==i,par1]))
}
hcm <- matrix(cbind(dum,1:par3),ncol=2)
hcm <- hcm[sort.list(hcm[,1]),]
for (i in 1:par3) {
memb[memb==hcm[i,2]] <- paste('C',i,sep='')
}
memb <- as.factor(memb)
print(memb)
x[,par1] <- memb
}
if (par2=='equal') {
ed <- cut(as.numeric(x[,par1]),par3,labels=paste('C',1:par3,sep=''))
x[,par1] <- as.factor(ed)
}
table(x[,par1])
colnames(x)
colnames(x)[par1]
x[,par1]
if (par2 == 'none') {
m <- ctree(as.formula(paste(colnames(x)[par1],' ~ .',sep='')),data = x)
}
load(file='createtable')
if (par2 != 'none') {
m <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data = x)
if (par4=='yes') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'10-Fold Cross Validation',3+2*par3,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
a<-table.element(a,'Prediction (training)',par3+1,TRUE)
a<-table.element(a,'Prediction (testing)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Actual',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
a<-table.row.end(a)
for (i in 1:10) {
ind <- sample(2, nrow(x), replace=T, prob=c(0.9,0.1))
m.ct <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data =x[ind==1,])
if (i==1) {
m.ct.i.pred <- predict(m.ct, newdata=x[ind==1,])
m.ct.i.actu <- x[ind==1,par1]
m.ct.x.pred <- predict(m.ct, newdata=x[ind==2,])
m.ct.x.actu <- x[ind==2,par1]
} else {
m.ct.i.pred <- c(m.ct.i.pred,predict(m.ct, newdata=x[ind==1,]))
m.ct.i.actu <- c(m.ct.i.actu,x[ind==1,par1])
m.ct.x.pred <- c(m.ct.x.pred,predict(m.ct, newdata=x[ind==2,]))
m.ct.x.actu <- c(m.ct.x.actu,x[ind==2,par1])
}
}
print(m.ct.i.tab <- table(m.ct.i.actu,m.ct.i.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.i.tab[i,i] / sum(m.ct.i.tab[i,]))
numer <- numer + m.ct.i.tab[i,i]
}
print(m.ct.i.cp <- numer / sum(m.ct.i.tab))
print(m.ct.x.tab <- table(m.ct.x.actu,m.ct.x.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.x.tab[i,i] / sum(m.ct.x.tab[i,]))
numer <- numer + m.ct.x.tab[i,i]
}
print(m.ct.x.cp <- numer / sum(m.ct.x.tab))
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (jjj in 1:par3) a<-table.element(a,m.ct.i.tab[i,jjj])
a<-table.element(a,round(m.ct.i.tab[i,i]/sum(m.ct.i.tab[i,]),4))
for (jjj in 1:par3) a<-table.element(a,m.ct.x.tab[i,jjj])
a<-table.element(a,round(m.ct.x.tab[i,i]/sum(m.ct.x.tab[i,]),4))
a<-table.row.end(a)
}
a<-table.row.start(a)
a<-table.element(a,'Overall',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.i.cp,4))
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.x.cp,4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable3.tab')
}
}
m
bitmap(file='test1.png')
plot(m)
dev.off()
bitmap(file='test1a.png')
plot(x[,par1] ~ as.factor(where(m)),main='Response by Terminal Node',xlab='Terminal Node',ylab='Response')
dev.off()
if (par2 == 'none') {
forec <- predict(m)
result <- as.data.frame(cbind(x[,par1],forec,x[,par1]-forec))
colnames(result) <- c('Actuals','Forecasts','Residuals')
print(result)
}
if (par2 != 'none') {
print(cbind(as.factor(x[,par1]),predict(m)))
myt <- table(as.factor(x[,par1]),predict(m))
print(myt)
}
bitmap(file='test2.png')
if(par2=='none') {
op <- par(mfrow=c(2,2))
plot(density(result$Actuals),main='Kernel Density Plot of Actuals')
plot(density(result$Residuals),main='Kernel Density Plot of Residuals')
plot(result$Forecasts,result$Actuals,main='Actuals versus Predictions',xlab='Predictions',ylab='Actuals')
plot(density(result$Forecasts),main='Kernel Density Plot of Predictions')
par(op)
}
if(par2!='none') {
plot(myt,main='Confusion Matrix',xlab='Actual',ylab='Predicted')
}
dev.off()
if (par2 == 'none') {
detcoef <- cor(result$Forecasts,result$Actuals)
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Goodness of Fit',2,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Correlation',1,TRUE)
a<-table.element(a,round(detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'R-squared',1,TRUE)
a<-table.element(a,round(detcoef*detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'RMSE',1,TRUE)
a<-table.element(a,round(sqrt(mean((result$Residuals)^2)),4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable1.tab')
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Actuals, Predictions, and Residuals',4,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'#',header=TRUE)
a<-table.element(a,'Actuals',header=TRUE)
a<-table.element(a,'Forecasts',header=TRUE)
a<-table.element(a,'Residuals',header=TRUE)
a<-table.row.end(a)
for (i in 1:length(result$Actuals)) {
a<-table.row.start(a)
a<-table.element(a,i,header=TRUE)
a<-table.element(a,result$Actuals[i])
a<-table.element(a,result$Forecasts[i])
a<-table.element(a,result$Residuals[i])
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable.tab')
}
if (par2 != 'none') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Confusion Matrix (predicted in columns / actuals in rows)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
for (i in 1:par3) {
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
}
a<-table.row.end(a)
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (j in 1:par3) {
a<-table.element(a,myt[i,j])
}
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable2.tab')
}