Free Statistics

of Irreproducible Research!

Author's title

Author*The author of this computation has been verified*
R Software Modulerwasp_regression_trees1.wasp
Title produced by softwareRecursive Partitioning (Regression Trees)
Date of computationTue, 11 Dec 2012 11:05:21 -0500
Cite this page as followsStatistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?v=date/2012/Dec/11/t1355242044z4tsabkaeuj4kom.htm/, Retrieved Fri, 29 Mar 2024 11:11:44 +0000
Statistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?pk=198553, Retrieved Fri, 29 Mar 2024 11:11:44 +0000
QR Codes:

Original text written by user:
IsPrivate?No (this computation is public)
User-defined keywords
Estimated Impact83
Family? (F = Feedback message, R = changed R code, M = changed R Module, P = changed Parameters, D = changed Data)
-     [Recursive Partitioning (Regression Trees)] [] [2010-12-05 19:50:12] [b98453cac15ba1066b407e146608df68]
- R PD  [Recursive Partitioning (Regression Trees)] [] [2011-12-11 14:45:14] [71981af30b475eff259f311228330cd7]
-           [Recursive Partitioning (Regression Trees)] [ws 10] [2012-12-11 16:05:21] [9b45d6b988914ec0ffbfae116e7bcb98] [Current]
Feedback Forum

Post a new message
Dataseries X:
210907	56	396	81	3	79	30
120982	56	297	55	4	58	28
176508	54	559	50	12	60	38
179321	89	967	125	2	108	30
123185	40	270	40	1	49	22
52746	25	143	37	3	0	26
385534	92	1562	63	0	121	25
33170	18	109	44	0	1	18
101645	63	371	88	0	20	11
149061	44	656	66	5	43	26
165446	33	511	57	0	69	25
237213	84	655	74	0	78	38
173326	88	465	49	7	86	44
133131	55	525	52	7	44	30
258873	60	885	88	3	104	40
180083	66	497	36	9	63	34
324799	154	1436	108	0	158	47
230964	53	612	43	4	102	30
236785	119	865	75	3	77	31
135473	41	385	32	0	82	23
202925	61	567	44	7	115	36
215147	58	639	85	0	101	36
344297	75	963	86	1	80	30
153935	33	398	56	5	50	25
132943	40	410	50	7	83	39
174724	92	966	135	0	123	34
174415	100	801	63	0	73	31
225548	112	892	81	5	81	31
223632	73	513	52	0	105	33
124817	40	469	44	0	47	25
221698	45	683	113	0	105	33
210767	60	643	39	3	94	35
170266	62	535	73	4	44	42
260561	75	625	48	1	114	43
84853	31	264	33	4	38	30
294424	77	992	59	2	107	33
101011	34	238	41	0	30	13
215641	46	818	69	0	71	32
325107	99	937	64	0	84	36
7176	17	70	1	0	0	0
167542	66	507	59	2	59	28
106408	30	260	32	1	33	14
96560	76	503	129	0	42	17
265769	146	927	37	2	96	32
269651	67	1269	31	10	106	30
149112	56	537	65	6	56	35
175824	107	910	107	0	57	20
152871	58	532	74	5	59	28
111665	34	345	54	4	39	28
116408	61	918	76	1	34	39
362301	119	1635	715	2	76	34
78800	42	330	57	2	20	26
183167	66	557	66	0	91	39
277965	89	1178	106	8	115	39
150629	44	740	54	3	85	33
168809	66	452	32	0	76	28
24188	24	218	20	0	8	4
329267	259	764	71	8	79	39
65029	17	255	21	5	21	18
101097	64	454	70	3	30	14
218946	41	866	112	1	76	29
244052	68	574	66	5	101	44
341570	168	1276	190	1	94	21
103597	43	379	66	1	27	16
233328	132	825	165	5	92	28
256462	105	798	56	0	123	35
206161	71	663	61	12	75	28
311473	112	1069	53	8	128	38
235800	94	921	127	8	105	23
177939	82	858	63	8	55	36
207176	70	711	38	8	56	32
196553	57	503	50	2	41	29
174184	53	382	52	0	72	25
143246	103	464	42	5	67	27
187559	121	717	76	8	75	36
187681	62	690	67	2	114	28
119016	52	462	50	5	118	23
182192	52	657	53	12	77	40
73566	32	385	39	6	22	23
194979	62	577	50	7	66	40
167488	45	619	77	2	69	28
143756	46	479	57	0	105	34
275541	63	817	73	4	116	33
243199	75	752	34	3	88	28
182999	88	430	39	6	73	34
135649	46	451	46	2	99	30
152299	53	537	63	0	62	33
120221	37	519	35	1	53	22
346485	90	1000	106	0	118	38
145790	63	637	43	5	30	26
193339	78	465	47	2	100	35
80953	25	437	31	0	49	8
122774	45	711	162	0	24	24
130585	46	299	57	5	67	29
112611	41	248	36	0	46	20
286468	144	1162	263	1	57	29
241066	82	714	78	0	75	45
148446	91	905	63	1	135	37
204713	71	649	54	1	68	33
182079	63	512	63	2	124	33
140344	53	472	77	6	33	25
220516	62	905	79	1	98	32
243060	63	786	110	4	58	29
162765	32	489	56	2	68	28
182613	39	479	56	3	81	28
232138	62	617	43	0	131	31
265318	117	925	111	10	110	52
85574	34	351	71	0	37	21
310839	92	1144	62	9	130	24
225060	93	669	56	7	93	41
232317	54	707	74	0	118	33
144966	144	458	60	0	39	32
43287	14	214	43	4	13	19
155754	61	599	68	4	74	20
164709	109	572	53	0	81	31
201940	38	897	87	0	109	31
235454	73	819	46	0	151	32
220801	75	720	105	1	51	18
99466	50	273	32	0	28	23
92661	61	508	133	1	40	17
133328	55	506	79	0	56	20
61361	77	451	51	0	27	12
125930	75	699	207	4	37	17
100750	72	407	67	0	83	30
224549	50	465	47	4	54	31
82316	32	245	34	4	27	10
102010	53	370	66	3	28	13
101523	42	316	76	0	59	22
243511	71	603	65	0	133	42
22938	10	154	9	0	12	1
41566	35	229	42	5	0	9
152474	65	577	45	0	106	32
61857	25	192	25	4	23	11
99923	66	617	115	0	44	25
132487	41	411	97	0	71	36
317394	86	975	53	1	116	31
21054	16	146	2	0	4	0
209641	42	705	52	5	62	24
22648	19	184	44	0	12	13
31414	19	200	22	0	18	8
46698	45	274	35	0	14	13
131698	65	502	74	0	60	19
91735	35	382	103	0	7	18
244749	95	964	144	2	98	33
184510	49	537	60	7	64	40
79863	37	438	134	1	29	22
128423	64	369	89	8	32	38
97839	38	417	42	2	25	24
38214	34	276	52	0	16	8
151101	32	514	98	2	48	35
272458	65	822	99	0	100	43
172494	52	389	52	0	46	43
108043	62	466	29	1	45	14
328107	65	1255	125	3	129	41
250579	83	694	106	0	130	38
351067	95	1024	95	3	136	45
158015	29	400	40	0	59	31
98866	18	397	140	0	25	13
85439	33	350	43	0	32	28
229242	247	719	128	4	63	31
351619	139	1277	142	4	95	40
84207	29	356	73	11	14	30
120445	118	457	72	0	36	16
324598	110	1402	128	0	113	37
131069	67	600	61	4	47	30
204271	42	480	73	0	92	35
165543	65	595	148	1	70	32
141722	94	436	64	0	19	27
116048	64	230	45	0	50	20
250047	81	651	58	0	41	18
299775	95	1367	97	9	91	31
195838	67	564	50	1	111	31
173260	63	716	37	3	41	21
254488	83	747	50	10	120	39
104389	45	467	105	5	135	41
136084	30	671	69	0	27	13
199476	70	861	46	2	87	32
92499	32	319	57	0	25	18
224330	83	612	52	1	131	39
135781	31	433	98	2	45	14
74408	67	434	61	4	29	7
81240	66	503	89	0	58	17
14688	10	85	0	0	4	0
181633	70	564	48	2	47	30
271856	103	824	91	1	109	37
7199	5	74	0	0	7	0
46660	20	259	7	0	12	5
17547	5	69	3	0	0	1
133368	36	535	54	1	37	16
95227	34	239	70	0	37	32
152601	48	438	36	2	46	24
98146	40	459	37	0	15	17
79619	43	426	123	3	42	11
59194	31	288	247	6	7	24
139942	42	498	46	0	54	22
118612	46	454	72	2	54	12
72880	33	376	41	0	14	19
65475	18	225	24	2	16	13
99643	55	555	45	1	33	17
71965	35	252	33	1	32	15
77272	59	208	27	2	21	16
49289	19	130	36	1	15	24
135131	66	481	87	0	38	15
108446	60	389	90	1	22	17
89746	36	565	114	3	28	18
44296	25	173	31	0	10	20
77648	47	278	45	0	31	16
181528	54	609	69	0	32	16
134019	53	422	51	0	32	18
124064	40	445	34	1	43	22
92630	40	387	60	4	27	8
121848	39	339	45	0	37	17
52915	14	181	54	0	20	18
81872	45	245	25	0	32	16
58981	36	384	38	7	0	23
53515	28	212	52	2	5	22
60812	44	399	67	0	26	13
56375	30	229	74	7	10	13
65490	22	224	38	3	27	16
80949	17	203	30	0	11	16
76302	31	333	26	0	29	20
104011	55	384	67	6	25	22
98104	54	636	132	2	55	17
67989	21	185	42	0	23	18
30989	14	93	35	0	5	17
135458	81	581	118	3	43	12
73504	35	248	68	0	23	7
63123	43	304	43	1	34	17
61254	46	344	76	1	36	14
74914	30	407	64	0	35	23
31774	23	170	48	1	0	17
81437	38	312	64	0	37	14
87186	54	507	56	0	28	15
50090	20	224	71	0	16	17
65745	53	340	75	0	26	21
56653	45	168	39	0	38	18
158399	39	443	42	0	23	18
46455	20	204	39	0	22	17
73624	24	367	93	0	30	17
38395	31	210	38	0	16	16
91899	35	335	60	0	18	15
139526	151	364	71	0	28	21
52164	52	178	52	0	32	16
51567	30	206	27	2	21	14
70551	31	279	59	0	23	15
84856	29	387	40	1	29	17
102538	57	490	79	1	50	15
86678	40	238	44	0	12	15
85709	44	343	65	0	21	10
34662	25	232	10	0	18	6
150580	77	530	124	0	27	22
99611	35	291	81	0	41	21
19349	11	67	15	0	13	1
99373	63	397	92	1	12	18
86230	44	467	42	0	21	17
30837	19	178	10	0	8	4
31706	13	175	24	0	26	10
89806	42	299	64	0	27	16
62088	38	154	45	1	13	16
40151	29	106	22	0	16	9
27634	20	189	56	0	2	16
76990	27	194	94	0	42	17
37460	20	135	19	0	5	7
54157	19	201	35	0	37	15
49862	37	207	32	0	17	14
84337	26	280	35	0	38	14
64175	42	260	48	0	37	18
59382	49	227	49	0	29	12
119308	30	239	48	0	32	16
76702	49	333	62	0	35	21
103425	67	428	96	1	17	19
70344	28	230	45	0	20	16
43410	19	292	63	0	7	1
104838	49	350	71	1	46	16
62215	27	186	26	0	24	10
69304	30	326	48	6	40	19
53117	22	155	29	3	3	12
19764	12	75	19	1	10	2
86680	31	361	45	2	37	14
84105	20	261	45	0	17	17
77945	20	299	67	0	28	19
89113	39	300	30	0	19	14
91005	29	450	36	3	29	11
40248	16	183	34	1	8	4
64187	27	238	36	0	10	16
50857	21	165	34	0	15	20
56613	19	234	37	1	15	12
62792	35	176	46	0	28	15
72535	14	329	44	0	17	16




Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time7 seconds
R Server'Gertrude Mary Cox' @ cox.wessa.net

\begin{tabular}{lllllllll}
\hline
Summary of computational transaction \tabularnewline
Raw Input & view raw input (R code)  \tabularnewline
Raw Output & view raw output of R engine  \tabularnewline
Computing time & 7 seconds \tabularnewline
R Server & 'Gertrude Mary Cox' @ cox.wessa.net \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=198553&T=0

[TABLE]
[ROW][C]Summary of computational transaction[/C][/ROW]
[ROW][C]Raw Input[/C][C]view raw input (R code) [/C][/ROW]
[ROW][C]Raw Output[/C][C]view raw output of R engine [/C][/ROW]
[ROW][C]Computing time[/C][C]7 seconds[/C][/ROW]
[ROW][C]R Server[/C][C]'Gertrude Mary Cox' @ cox.wessa.net[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=198553&T=0

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=198553&T=0

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time7 seconds
R Server'Gertrude Mary Cox' @ cox.wessa.net







10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C112121050.9203121120.9098
C212311700.9049211260.8571
Overall--0.9126--0.8821

\begin{tabular}{lllllllll}
\hline
10-Fold Cross Validation \tabularnewline
 & Prediction (training) & Prediction (testing) \tabularnewline
Actual & C1 & C2 & CV & C1 & C2 & CV \tabularnewline
C1 & 1212 & 105 & 0.9203 & 121 & 12 & 0.9098 \tabularnewline
C2 & 123 & 1170 & 0.9049 & 21 & 126 & 0.8571 \tabularnewline
Overall & - & - & 0.9126 & - & - & 0.8821 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=198553&T=1

[TABLE]
[ROW][C]10-Fold Cross Validation[/C][/ROW]
[ROW][C][/C][C]Prediction (training)[/C][C]Prediction (testing)[/C][/ROW]
[ROW][C]Actual[/C][C]C1[/C][C]C2[/C][C]CV[/C][C]C1[/C][C]C2[/C][C]CV[/C][/ROW]
[ROW][C]C1[/C][C]1212[/C][C]105[/C][C]0.9203[/C][C]121[/C][C]12[/C][C]0.9098[/C][/ROW]
[ROW][C]C2[/C][C]123[/C][C]1170[/C][C]0.9049[/C][C]21[/C][C]126[/C][C]0.8571[/C][/ROW]
[ROW][C]Overall[/C][C]-[/C][C]-[/C][C]0.9126[/C][C]-[/C][C]-[/C][C]0.8821[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=198553&T=1

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=198553&T=1

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C112121050.9203121120.9098
C212311700.9049211260.8571
Overall--0.9126--0.8821







Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C113411
C212132

\begin{tabular}{lllllllll}
\hline
Confusion Matrix (predicted in columns / actuals in rows) \tabularnewline
 & C1 & C2 \tabularnewline
C1 & 134 & 11 \tabularnewline
C2 & 12 & 132 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=198553&T=2

[TABLE]
[ROW][C]Confusion Matrix (predicted in columns / actuals in rows)[/C][/ROW]
[ROW][C][/C][C]C1[/C][C]C2[/C][/ROW]
[ROW][C]C1[/C][C]134[/C][C]11[/C][/ROW]
[ROW][C]C2[/C][C]12[/C][C]132[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=198553&T=2

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=198553&T=2

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C113411
C212132



Parameters (Session):
par1 = 1 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
Parameters (R input):
par1 = 1 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
R code (references can be found in the software module):
library(party)
library(Hmisc)
par1 <- as.numeric(par1)
par3 <- as.numeric(par3)
x <- data.frame(t(y))
is.data.frame(x)
x <- x[!is.na(x[,par1]),]
k <- length(x[1,])
n <- length(x[,1])
colnames(x)[par1]
x[,par1]
if (par2 == 'kmeans') {
cl <- kmeans(x[,par1], par3)
print(cl)
clm <- matrix(cbind(cl$centers,1:par3),ncol=2)
clm <- clm[sort.list(clm[,1]),]
for (i in 1:par3) {
cl$cluster[cl$cluster==clm[i,2]] <- paste('C',i,sep='')
}
cl$cluster <- as.factor(cl$cluster)
print(cl$cluster)
x[,par1] <- cl$cluster
}
if (par2 == 'quantiles') {
x[,par1] <- cut2(x[,par1],g=par3)
}
if (par2 == 'hclust') {
hc <- hclust(dist(x[,par1])^2, 'cen')
print(hc)
memb <- cutree(hc, k = par3)
dum <- c(mean(x[memb==1,par1]))
for (i in 2:par3) {
dum <- c(dum, mean(x[memb==i,par1]))
}
hcm <- matrix(cbind(dum,1:par3),ncol=2)
hcm <- hcm[sort.list(hcm[,1]),]
for (i in 1:par3) {
memb[memb==hcm[i,2]] <- paste('C',i,sep='')
}
memb <- as.factor(memb)
print(memb)
x[,par1] <- memb
}
if (par2=='equal') {
ed <- cut(as.numeric(x[,par1]),par3,labels=paste('C',1:par3,sep=''))
x[,par1] <- as.factor(ed)
}
table(x[,par1])
colnames(x)
colnames(x)[par1]
x[,par1]
if (par2 == 'none') {
m <- ctree(as.formula(paste(colnames(x)[par1],' ~ .',sep='')),data = x)
}
load(file='createtable')
if (par2 != 'none') {
m <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data = x)
if (par4=='yes') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'10-Fold Cross Validation',3+2*par3,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
a<-table.element(a,'Prediction (training)',par3+1,TRUE)
a<-table.element(a,'Prediction (testing)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Actual',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
a<-table.row.end(a)
for (i in 1:10) {
ind <- sample(2, nrow(x), replace=T, prob=c(0.9,0.1))
m.ct <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data =x[ind==1,])
if (i==1) {
m.ct.i.pred <- predict(m.ct, newdata=x[ind==1,])
m.ct.i.actu <- x[ind==1,par1]
m.ct.x.pred <- predict(m.ct, newdata=x[ind==2,])
m.ct.x.actu <- x[ind==2,par1]
} else {
m.ct.i.pred <- c(m.ct.i.pred,predict(m.ct, newdata=x[ind==1,]))
m.ct.i.actu <- c(m.ct.i.actu,x[ind==1,par1])
m.ct.x.pred <- c(m.ct.x.pred,predict(m.ct, newdata=x[ind==2,]))
m.ct.x.actu <- c(m.ct.x.actu,x[ind==2,par1])
}
}
print(m.ct.i.tab <- table(m.ct.i.actu,m.ct.i.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.i.tab[i,i] / sum(m.ct.i.tab[i,]))
numer <- numer + m.ct.i.tab[i,i]
}
print(m.ct.i.cp <- numer / sum(m.ct.i.tab))
print(m.ct.x.tab <- table(m.ct.x.actu,m.ct.x.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.x.tab[i,i] / sum(m.ct.x.tab[i,]))
numer <- numer + m.ct.x.tab[i,i]
}
print(m.ct.x.cp <- numer / sum(m.ct.x.tab))
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (jjj in 1:par3) a<-table.element(a,m.ct.i.tab[i,jjj])
a<-table.element(a,round(m.ct.i.tab[i,i]/sum(m.ct.i.tab[i,]),4))
for (jjj in 1:par3) a<-table.element(a,m.ct.x.tab[i,jjj])
a<-table.element(a,round(m.ct.x.tab[i,i]/sum(m.ct.x.tab[i,]),4))
a<-table.row.end(a)
}
a<-table.row.start(a)
a<-table.element(a,'Overall',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.i.cp,4))
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.x.cp,4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable3.tab')
}
}
m
bitmap(file='test1.png')
plot(m)
dev.off()
bitmap(file='test1a.png')
plot(x[,par1] ~ as.factor(where(m)),main='Response by Terminal Node',xlab='Terminal Node',ylab='Response')
dev.off()
if (par2 == 'none') {
forec <- predict(m)
result <- as.data.frame(cbind(x[,par1],forec,x[,par1]-forec))
colnames(result) <- c('Actuals','Forecasts','Residuals')
print(result)
}
if (par2 != 'none') {
print(cbind(as.factor(x[,par1]),predict(m)))
myt <- table(as.factor(x[,par1]),predict(m))
print(myt)
}
bitmap(file='test2.png')
if(par2=='none') {
op <- par(mfrow=c(2,2))
plot(density(result$Actuals),main='Kernel Density Plot of Actuals')
plot(density(result$Residuals),main='Kernel Density Plot of Residuals')
plot(result$Forecasts,result$Actuals,main='Actuals versus Predictions',xlab='Predictions',ylab='Actuals')
plot(density(result$Forecasts),main='Kernel Density Plot of Predictions')
par(op)
}
if(par2!='none') {
plot(myt,main='Confusion Matrix',xlab='Actual',ylab='Predicted')
}
dev.off()
if (par2 == 'none') {
detcoef <- cor(result$Forecasts,result$Actuals)
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Goodness of Fit',2,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Correlation',1,TRUE)
a<-table.element(a,round(detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'R-squared',1,TRUE)
a<-table.element(a,round(detcoef*detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'RMSE',1,TRUE)
a<-table.element(a,round(sqrt(mean((result$Residuals)^2)),4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable1.tab')
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Actuals, Predictions, and Residuals',4,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'#',header=TRUE)
a<-table.element(a,'Actuals',header=TRUE)
a<-table.element(a,'Forecasts',header=TRUE)
a<-table.element(a,'Residuals',header=TRUE)
a<-table.row.end(a)
for (i in 1:length(result$Actuals)) {
a<-table.row.start(a)
a<-table.element(a,i,header=TRUE)
a<-table.element(a,result$Actuals[i])
a<-table.element(a,result$Forecasts[i])
a<-table.element(a,result$Residuals[i])
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable.tab')
}
if (par2 != 'none') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Confusion Matrix (predicted in columns / actuals in rows)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
for (i in 1:par3) {
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
}
a<-table.row.end(a)
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (j in 1:par3) {
a<-table.element(a,myt[i,j])
}
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable2.tab')
}