Free Statistics

of Irreproducible Research!

Author's title

Author*The author of this computation has been verified*
R Software Modulerwasp_regression_trees1.wasp
Title produced by softwareRecursive Partitioning (Regression Trees)
Date of computationWed, 21 Dec 2011 13:52:52 -0500
Cite this page as followsStatistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?v=date/2011/Dec/21/t1324493597o9kx0udp816qz5f.htm/, Retrieved Tue, 07 May 2024 11:56:40 +0000
Statistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?pk=158947, Retrieved Tue, 07 May 2024 11:56:40 +0000
QR Codes:

Original text written by user:
IsPrivate?No (this computation is public)
User-defined keywords
Estimated Impact79
Family? (F = Feedback message, R = changed R code, M = changed R Module, P = changed Parameters, D = changed Data)
-     [Multiple Regression] [multiple regression] [2011-12-18 15:37:09] [9631d8669dd1906475401d4d7f07aac5]
- RMPD  [Recursive Partitioning (Regression Trees)] [regression tree -...] [2011-12-21 17:16:58] [cd6e7488bdf368f344c8d209e8917833]
-   P       [Recursive Partitioning (Regression Trees)] [regression trees ...] [2011-12-21 18:52:52] [c38c32477296496b546025b407c5c736] [Current]
Feedback Forum

Post a new message
Dataseries X:
1418	210907	56	396	81	3
869	120982	56	297	55	4
1530	176508	54	559	50	12
2172	179321	89	967	125	2
901	123185	40	270	40	1
463	52746	25	143	37	3
3201	385534	92	1562	63	0
371	33170	18	109	44	0
1192	101645	63	371	88	0
1583	149061	44	656	66	5
1439	165446	33	511	57	0
1764	237213	84	655	74	0
1495	173326	88	465	49	7
1373	133131	55	525	52	7
2187	258873	60	885	88	3
1491	180083	66	497	36	9
4041	324799	154	1436	108	0
1706	230964	53	612	43	4
2152	236785	119	865	75	3
1036	135473	41	385	32	0
1882	202925	61	567	44	7
1929	215147	58	639	85	0
2242	344297	75	963	86	1
1220	153935	33	398	56	5
1289	132943	40	410	50	7
2515	174724	92	966	135	0
2147	174415	100	801	63	0
2352	225548	112	892	81	5
1638	223632	73	513	52	0
1222	124817	40	469	44	0
1812	221698	45	683	113	0
1677	210767	60	643	39	3
1579	170266	62	535	73	4
1731	260561	75	625	48	1
807	84853	31	264	33	4
2452	294424	77	992	59	2
829	101011	34	238	41	0
1940	215641	46	818	69	0
2662	325107	99	937	64	0
186	7176	17	70	1	0
1499	167542	66	507	59	2
865	106408	30	260	32	1
1793	96560	76	503	129	0
2527	265769	146	927	37	2
2747	269651	67	1269	31	10
1324	149112	56	537	65	6
2702	175824	107	910	107	0
1383	152871	58	532	74	5
1179	111665	34	345	54	4
2099	116408	61	918	76	1
4308	362301	119	1635	715	2
918	78800	42	330	57	2
1831	183167	66	557	66	0
3373	277965	89	1178	106	8
1713	150629	44	740	54	3
1438	168809	66	452	32	0
496	24188	24	218	20	0
2253	329267	259	764	71	8
744	65029	17	255	21	5
1161	101097	64	454	70	3
2352	218946	41	866	112	1
2144	244052	68	574	66	5
4691	341570	168	1276	190	1
1112	103597	43	379	66	1
2694	233328	132	825	165	5
1973	256462	105	798	56	0
1769	206161	71	663	61	12
3148	311473	112	1069	53	8
2474	235800	94	921	127	8
2084	177939	82	858	63	8
1954	207176	70	711	38	8
1226	196553	57	503	50	2
1389	174184	53	382	52	0
1496	143246	103	464	42	5
2269	187559	121	717	76	8
1833	187681	62	690	67	2
1268	119016	52	462	50	5
1943	182192	52	657	53	12
893	73566	32	385	39	6
1762	194979	62	577	50	7
1403	167488	45	619	77	2
1425	143756	46	479	57	0
1857	275541	63	817	73	4
1840	243199	75	752	34	3
1502	182999	88	430	39	6
1441	135649	46	451	46	2
1420	152299	53	537	63	0
1416	120221	37	519	35	1
2970	346485	90	1000	106	0
1317	145790	63	637	43	5
1644	193339	78	465	47	2
870	80953	25	437	31	0
1654	122774	45	711	162	0
1054	130585	46	299	57	5
937	112611	41	248	36	0
3004	286468	144	1162	263	1
2008	241066	82	714	78	0
2547	148446	91	905	63	1
1885	204713	71	649	54	1
1626	182079	63	512	63	2
1468	140344	53	472	77	6
2445	220516	62	905	79	1
1964	243060	63	786	110	4
1381	162765	32	489	56	2
1369	182613	39	479	56	3
1659	232138	62	617	43	0
2888	265318	117	925	111	10
1290	85574	34	351	71	0
2845	310839	92	1144	62	9
1982	225060	93	669	56	7
1904	232317	54	707	74	0
1391	144966	144	458	60	0
602	43287	14	214	43	4
1743	155754	61	599	68	4
1559	164709	109	572	53	0
2014	201940	38	897	87	0
2143	235454	73	819	46	0
2146	220801	75	720	105	1
874	99466	50	273	32	0
1590	92661	61	508	133	1
1590	133328	55	506	79	0
1210	61361	77	451	51	0
2072	125930	75	699	207	4
1281	100750	72	407	67	0
1401	224549	50	465	47	4
834	82316	32	245	34	4
1105	102010	53	370	66	3
1272	101523	42	316	76	0
1944	243511	71	603	65	0
391	22938	10	154	9	0
761	41566	35	229	42	5
1605	152474	65	577	45	0
530	61857	25	192	25	4
1988	99923	66	617	115	0
1386	132487	41	411	97	0
2395	317394	86	975	53	1
387	21054	16	146	2	0
1742	209641	42	705	52	5
620	22648	19	184	44	0
449	31414	19	200	22	0
800	46698	45	274	35	0
1684	131698	65	502	74	0
1050	91735	35	382	103	0
2699	244749	95	964	144	2
1606	184510	49	537	60	7
1502	79863	37	438	134	1
1204	128423	64	369	89	8
1138	97839	38	417	42	2
568	38214	34	276	52	0
1459	151101	32	514	98	2
2158	272458	65	822	99	0
1111	172494	52	389	52	0
1421	108043	62	466	29	1
2833	328107	65	1255	125	3
1955	250579	83	694	106	0
2922	351067	95	1024	95	3
1002	158015	29	400	40	0
1060	98866	18	397	140	0
956	85439	33	350	43	0
2186	229242	247	719	128	4
3604	351619	139	1277	142	4
1035	84207	29	356	73	11
1417	120445	118	457	72	0
3261	324598	110	1402	128	0
1587	131069	67	600	61	4
1424	204271	42	480	73	0
1701	165543	65	595	148	1
1249	141722	94	436	64	0
946	116048	64	230	45	0
1926	250047	81	651	58	0
3352	299775	95	1367	97	9
1641	195838	67	564	50	1
2035	173260	63	716	37	3
2312	254488	83	747	50	10
1369	104389	45	467	105	5
1577	136084	30	671	69	0
2201	199476	70	861	46	2
961	92499	32	319	57	0
1900	224330	83	612	52	1
1254	135781	31	433	98	2
1335	74408	67	434	61	4
1597	81240	66	503	89	0
207	14688	10	85	0	0
1645	181633	70	564	48	2
2429	271856	103	824	91	1
151	7199	5	74	0	0
474	46660	20	259	7	0
141	17547	5	69	3	0
1639	133368	36	535	54	1
872	95227	34	239	70	0
1318	152601	48	438	36	2
1018	98146	40	459	37	0
1383	79619	43	426	123	3
1314	59194	31	288	247	6
1335	139942	42	498	46	0
1403	118612	46	454	72	2
910	72880	33	376	41	0
616	65475	18	225	24	2
1407	99643	55	555	45	1
771	71965	35	252	33	1
766	77272	59	208	27	2
473	49289	19	130	36	1
1376	135131	66	481	87	0
1232	108446	60	389	90	1
1521	89746	36	565	114	3
572	44296	25	173	31	0
1059	77648	47	278	45	0
1544	181528	54	609	69	0
1230	134019	53	422	51	0
1206	124064	40	445	34	1
1205	92630	40	387	60	4
1255	121848	39	339	45	0
613	52915	14	181	54	0
721	81872	45	245	25	0
1109	58981	36	384	38	7
740	53515	28	212	52	2
1126	60812	44	399	67	0
728	56375	30	229	74	7
689	65490	22	224	38	3
592	80949	17	203	30	0
995	76302	31	333	26	0
1613	104011	55	384	67	6
2048	98104	54	636	132	2
705	67989	21	185	42	0
301	30989	14	93	35	0
1803	135458	81	581	118	3
799	73504	35	248	68	0
861	63123	43	304	43	1
1186	61254	46	344	76	1
1451	74914	30	407	64	0
628	31774	23	170	48	1
1161	81437	38	312	64	0
1463	87186	54	507	56	0
742	50090	20	224	71	0
979	65745	53	340	75	0
675	56653	45	168	39	0
1241	158399	39	443	42	0
676	46455	20	204	39	0
1049	73624	24	367	93	0
620	38395	31	210	38	0
1081	91899	35	335	60	0
1688	139526	151	364	71	0
736	52164	52	178	52	0
617	51567	30	206	27	2
812	70551	31	279	59	0
1051	84856	29	387	40	1
1656	102538	57	490	79	1
705	86678	40	238	44	0
945	85709	44	343	65	0
554	34662	25	232	10	0
1597	150580	77	530	124	0
982	99611	35	291	81	0
222	19349	11	67	15	0
1212	99373	63	397	92	1
1143	86230	44	467	42	0
435	30837	19	178	10	0
532	31706	13	175	24	0
882	89806	42	299	64	0
608	62088	38	154	45	1
459	40151	29	106	22	0
578	27634	20	189	56	0
826	76990	27	194	94	0
509	37460	20	135	19	0
717	54157	19	201	35	0
637	49862	37	207	32	0
857	84337	26	280	35	0
830	64175	42	260	48	0
652	59382	49	227	49	0
707	119308	30	239	48	0
954	76702	49	333	62	0
1461	103425	67	428	96	1
672	70344	28	230	45	0
778	43410	19	292	63	0
1141	104838	49	350	71	1
680	62215	27	186	26	0
1090	69304	30	326	48	6
616	53117	22	155	29	3
285	19764	12	75	19	1
1145	86680	31	361	45	2
733	84105	20	261	45	0
888	77945	20	299	67	0
849	89113	39	300	30	0
1182	91005	29	450	36	3
528	40248	16	183	34	1
642	64187	27	238	36	0
947	50857	21	165	34	0
819	56613	19	234	37	1
757	62792	35	176	46	0
894	72535	14	329	44	0




Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time5 seconds
R Server'Gertrude Mary Cox' @ cox.wessa.net

\begin{tabular}{lllllllll}
\hline
Summary of computational transaction \tabularnewline
Raw Input & view raw input (R code)  \tabularnewline
Raw Output & view raw output of R engine  \tabularnewline
Computing time & 5 seconds \tabularnewline
R Server & 'Gertrude Mary Cox' @ cox.wessa.net \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=158947&T=0

[TABLE]
[ROW][C]Summary of computational transaction[/C][/ROW]
[ROW][C]Raw Input[/C][C]view raw input (R code) [/C][/ROW]
[ROW][C]Raw Output[/C][C]view raw output of R engine [/C][/ROW]
[ROW][C]Computing time[/C][C]5 seconds[/C][/ROW]
[ROW][C]R Server[/C][C]'Gertrude Mary Cox' @ cox.wessa.net[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=158947&T=0

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=158947&T=0

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time5 seconds
R Server'Gertrude Mary Cox' @ cox.wessa.net







10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C111531590.8788116220.8406
C211811810.9092171240.8794
Overall--0.8939--0.8602

\begin{tabular}{lllllllll}
\hline
10-Fold Cross Validation \tabularnewline
 & Prediction (training) & Prediction (testing) \tabularnewline
Actual & C1 & C2 & CV & C1 & C2 & CV \tabularnewline
C1 & 1153 & 159 & 0.8788 & 116 & 22 & 0.8406 \tabularnewline
C2 & 118 & 1181 & 0.9092 & 17 & 124 & 0.8794 \tabularnewline
Overall & - & - & 0.8939 & - & - & 0.8602 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=158947&T=1

[TABLE]
[ROW][C]10-Fold Cross Validation[/C][/ROW]
[ROW][C][/C][C]Prediction (training)[/C][C]Prediction (testing)[/C][/ROW]
[ROW][C]Actual[/C][C]C1[/C][C]C2[/C][C]CV[/C][C]C1[/C][C]C2[/C][C]CV[/C][/ROW]
[ROW][C]C1[/C][C]1153[/C][C]159[/C][C]0.8788[/C][C]116[/C][C]22[/C][C]0.8406[/C][/ROW]
[ROW][C]C2[/C][C]118[/C][C]1181[/C][C]0.9092[/C][C]17[/C][C]124[/C][C]0.8794[/C][/ROW]
[ROW][C]Overall[/C][C]-[/C][C]-[/C][C]0.8939[/C][C]-[/C][C]-[/C][C]0.8602[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=158947&T=1

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=158947&T=1

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C111531590.8788116220.8406
C211811810.9092171240.8794
Overall--0.8939--0.8602







Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C111431
C25139

\begin{tabular}{lllllllll}
\hline
Confusion Matrix (predicted in columns / actuals in rows) \tabularnewline
 & C1 & C2 \tabularnewline
C1 & 114 & 31 \tabularnewline
C2 & 5 & 139 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=158947&T=2

[TABLE]
[ROW][C]Confusion Matrix (predicted in columns / actuals in rows)[/C][/ROW]
[ROW][C][/C][C]C1[/C][C]C2[/C][/ROW]
[ROW][C]C1[/C][C]114[/C][C]31[/C][/ROW]
[ROW][C]C2[/C][C]5[/C][C]139[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=158947&T=2

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=158947&T=2

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C111431
C25139



Parameters (Session):
par1 = 2 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
Parameters (R input):
par1 = 2 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
R code (references can be found in the software module):
library(party)
library(Hmisc)
par1 <- as.numeric(par1)
par3 <- as.numeric(par3)
x <- data.frame(t(y))
is.data.frame(x)
x <- x[!is.na(x[,par1]),]
k <- length(x[1,])
n <- length(x[,1])
colnames(x)[par1]
x[,par1]
if (par2 == 'kmeans') {
cl <- kmeans(x[,par1], par3)
print(cl)
clm <- matrix(cbind(cl$centers,1:par3),ncol=2)
clm <- clm[sort.list(clm[,1]),]
for (i in 1:par3) {
cl$cluster[cl$cluster==clm[i,2]] <- paste('C',i,sep='')
}
cl$cluster <- as.factor(cl$cluster)
print(cl$cluster)
x[,par1] <- cl$cluster
}
if (par2 == 'quantiles') {
x[,par1] <- cut2(x[,par1],g=par3)
}
if (par2 == 'hclust') {
hc <- hclust(dist(x[,par1])^2, 'cen')
print(hc)
memb <- cutree(hc, k = par3)
dum <- c(mean(x[memb==1,par1]))
for (i in 2:par3) {
dum <- c(dum, mean(x[memb==i,par1]))
}
hcm <- matrix(cbind(dum,1:par3),ncol=2)
hcm <- hcm[sort.list(hcm[,1]),]
for (i in 1:par3) {
memb[memb==hcm[i,2]] <- paste('C',i,sep='')
}
memb <- as.factor(memb)
print(memb)
x[,par1] <- memb
}
if (par2=='equal') {
ed <- cut(as.numeric(x[,par1]),par3,labels=paste('C',1:par3,sep=''))
x[,par1] <- as.factor(ed)
}
table(x[,par1])
colnames(x)
colnames(x)[par1]
x[,par1]
if (par2 == 'none') {
m <- ctree(as.formula(paste(colnames(x)[par1],' ~ .',sep='')),data = x)
}
load(file='createtable')
if (par2 != 'none') {
m <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data = x)
if (par4=='yes') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'10-Fold Cross Validation',3+2*par3,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
a<-table.element(a,'Prediction (training)',par3+1,TRUE)
a<-table.element(a,'Prediction (testing)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Actual',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
a<-table.row.end(a)
for (i in 1:10) {
ind <- sample(2, nrow(x), replace=T, prob=c(0.9,0.1))
m.ct <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data =x[ind==1,])
if (i==1) {
m.ct.i.pred <- predict(m.ct, newdata=x[ind==1,])
m.ct.i.actu <- x[ind==1,par1]
m.ct.x.pred <- predict(m.ct, newdata=x[ind==2,])
m.ct.x.actu <- x[ind==2,par1]
} else {
m.ct.i.pred <- c(m.ct.i.pred,predict(m.ct, newdata=x[ind==1,]))
m.ct.i.actu <- c(m.ct.i.actu,x[ind==1,par1])
m.ct.x.pred <- c(m.ct.x.pred,predict(m.ct, newdata=x[ind==2,]))
m.ct.x.actu <- c(m.ct.x.actu,x[ind==2,par1])
}
}
print(m.ct.i.tab <- table(m.ct.i.actu,m.ct.i.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.i.tab[i,i] / sum(m.ct.i.tab[i,]))
numer <- numer + m.ct.i.tab[i,i]
}
print(m.ct.i.cp <- numer / sum(m.ct.i.tab))
print(m.ct.x.tab <- table(m.ct.x.actu,m.ct.x.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.x.tab[i,i] / sum(m.ct.x.tab[i,]))
numer <- numer + m.ct.x.tab[i,i]
}
print(m.ct.x.cp <- numer / sum(m.ct.x.tab))
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (jjj in 1:par3) a<-table.element(a,m.ct.i.tab[i,jjj])
a<-table.element(a,round(m.ct.i.tab[i,i]/sum(m.ct.i.tab[i,]),4))
for (jjj in 1:par3) a<-table.element(a,m.ct.x.tab[i,jjj])
a<-table.element(a,round(m.ct.x.tab[i,i]/sum(m.ct.x.tab[i,]),4))
a<-table.row.end(a)
}
a<-table.row.start(a)
a<-table.element(a,'Overall',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.i.cp,4))
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.x.cp,4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable3.tab')
}
}
m
bitmap(file='test1.png')
plot(m)
dev.off()
bitmap(file='test1a.png')
plot(x[,par1] ~ as.factor(where(m)),main='Response by Terminal Node',xlab='Terminal Node',ylab='Response')
dev.off()
if (par2 == 'none') {
forec <- predict(m)
result <- as.data.frame(cbind(x[,par1],forec,x[,par1]-forec))
colnames(result) <- c('Actuals','Forecasts','Residuals')
print(result)
}
if (par2 != 'none') {
print(cbind(as.factor(x[,par1]),predict(m)))
myt <- table(as.factor(x[,par1]),predict(m))
print(myt)
}
bitmap(file='test2.png')
if(par2=='none') {
op <- par(mfrow=c(2,2))
plot(density(result$Actuals),main='Kernel Density Plot of Actuals')
plot(density(result$Residuals),main='Kernel Density Plot of Residuals')
plot(result$Forecasts,result$Actuals,main='Actuals versus Predictions',xlab='Predictions',ylab='Actuals')
plot(density(result$Forecasts),main='Kernel Density Plot of Predictions')
par(op)
}
if(par2!='none') {
plot(myt,main='Confusion Matrix',xlab='Actual',ylab='Predicted')
}
dev.off()
if (par2 == 'none') {
detcoef <- cor(result$Forecasts,result$Actuals)
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Goodness of Fit',2,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Correlation',1,TRUE)
a<-table.element(a,round(detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'R-squared',1,TRUE)
a<-table.element(a,round(detcoef*detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'RMSE',1,TRUE)
a<-table.element(a,round(sqrt(mean((result$Residuals)^2)),4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable1.tab')
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Actuals, Predictions, and Residuals',4,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'#',header=TRUE)
a<-table.element(a,'Actuals',header=TRUE)
a<-table.element(a,'Forecasts',header=TRUE)
a<-table.element(a,'Residuals',header=TRUE)
a<-table.row.end(a)
for (i in 1:length(result$Actuals)) {
a<-table.row.start(a)
a<-table.element(a,i,header=TRUE)
a<-table.element(a,result$Actuals[i])
a<-table.element(a,result$Forecasts[i])
a<-table.element(a,result$Residuals[i])
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable.tab')
}
if (par2 != 'none') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Confusion Matrix (predicted in columns / actuals in rows)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
for (i in 1:par3) {
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
}
a<-table.row.end(a)
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (j in 1:par3) {
a<-table.element(a,myt[i,j])
}
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable2.tab')
}