Free Statistics

of Irreproducible Research!

Author's title

Author*The author of this computation has been verified*
R Software Modulerwasp_regression_trees1.wasp
Title produced by softwareRecursive Partitioning (Regression Trees)
Date of computationTue, 21 Dec 2010 12:30:57 +0000
Cite this page as followsStatistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?v=date/2010/Dec/21/t1292934545m9996usztxywapu.htm/, Retrieved Wed, 08 May 2024 08:08:54 +0000
Statistical Computations at FreeStatistics.org, Office for Research Development and Education, URL https://freestatistics.org/blog/index.php?pk=113393, Retrieved Wed, 08 May 2024 08:08:54 +0000
QR Codes:

Original text written by user:
IsPrivate?No (this computation is public)
User-defined keywords
Estimated Impact174
Family? (F = Feedback message, R = changed R code, M = changed R Module, P = changed Parameters, D = changed Data)
-     [Recursive Partitioning (Regression Trees)] [] [2010-12-05 20:06:20] [b98453cac15ba1066b407e146608df68]
-   PD    [Recursive Partitioning (Regression Trees)] [group CV ws10] [2010-12-21 12:30:57] [4c854bb223ec27caaa7bcfc5e77b0dbd] [Current]
Feedback Forum

Post a new message
Dataseries X:
4321023	29790	444	81767	1	1
4111912	87550	412	153198	1	1
223193	84738	428	-26007	0	1
1491348	54660	315	126942	1	1
1629616	42634	168	157214	1	0
1398893	40949	263	129352	0	1
1926517	45187	267	234817	1	0
983660	37704	228	60448	1	1
1443586	16275	129	47818	1	1
1073089	25830	104	245546	0	1
984885	12679	122	48020	0	1
1405225	18014	393	-1710	1	1
227132	43556	190	32648	0	1
929118	24811	280	95350	1	1
1071292	6575	63	151352	0	0
638830	7123	102	288170	0	0
856956	21950	265	114337	1	1
992426	37597	234	37884	1	1
444477	17821	277	122844	0	1
857217	12988	73	82340	1	1
711969	22330	67	79801	1	0
702380	13326	103	165548	0	0
358589	16189	290	116384	0	1
297978	7146	83	134028	0	1
585715	15824	56	63838	0	1
657954	27664	236	74996	1	1
209458	11920	73	31080	0	1
786690	8568	34	32168	0	1
439798	14416	139	49857	0	1
688779	3369	26	87161	1	1
574339	11819	70	106113	1	1
741409	6984	40	80570	1	0
597793	4519	42	102129	1	1
644190	2220	12	301670	0	1
377934	18562	211	102313	0	1
640273	10327	74	88577	0	1
697458	5336	80	112477	1	1
550608	2365	83	191778	1	1
207393	4069	131	79804	0	1
301607	8636	203	128294	0	0
345783	13718	56	96448	0	1
501749	4525	89	93811	0	0
379983	6869	88	117520	0	0
387475	4628	39	69159	0	0
377305	3689	25	101792	1	1
370837	4891	49	210568	1	1
430866	7489	149	136996	1	1
469107	4901	58	121920	0	1
194493	2284	41	76403	0	1
530670	3160	90	108094	1	1
518365	4150	136	134759	1	1
491303	7285	97	188873	1	1
527021	1134	63	146216	1	0
233773	4658	114	156608	1	1
405972	2384	77	61348	0	0
652925	3748	6	50350	0	1
446211	5371	47	87720	0	0
341340	1285	51	99489	0	1
387699	9327	85	87419	1	1
493408	5565	43	94355	1	0
146494	1528	32	60326	0	0
414462	3122	25	94670	1	1
364304	7561	77	82425	1	0
355178	2675	54	59017	0	0
357760	13253	251	90829	0	1
261216	880	15	80791	0	1
397144	2053	44	100423	1	1
374943	1424	73	131116	0	1
424898	4036	85	100269	1	1
202055	3045	49	27330	1	1
378525	5119	38	39039	0	0
310768	1431	35	106885	0	0
325738	554	9	79285	0	1
394510	1975	34	118881	0	1
247060	1765	20	77623	1	1
368078	1012	29	114768	0	0
236761	810	11	74015	0	0
312378	1280	52	69465	0	1
339836	666	13	117869	1	1
347385	1380	29	60982	0	1
426280	4677	66	90131	1	0
352850	876	33	138971	0	1
301881	814	15	39625	0	0
377516	514	15	102725	0	1
357312	5692	68	64239	1	1
458343	3642	100	90262	0	1
354228	540	13	103960	0	0
308636	2099	45	106611	0	0
386212	567	14	103345	0	0
393343	2001	36	95551	0	0
378509	2949	40	82903	1	1
452469	2253	68	63593	0	1
364839	6533	29	126910	1	1
358649	1889	43	37527	0	1
376641	3055	30	60247	1	0
429112	272	9	112995	0	1
330546	1414	22	70184	1	1
403560	2564	19	130140	0	0
317892	1383	9	73221	1	1
307528	1261	31	76114	1	1
235133	975	19	90534	0	1
299243	3366	55	108479	0	1
314073	576	8	113761	0	1
368186	1686	28	68696	0	1
269661	746	29	71561	0	1
125390	3192	48	59831	1	1
510834	2045	16	97890	1	1
321896	5702	47	101481	0	0
249898	1932	20	72954	1	1
408881	936	22	67939	0	1
158492	3437	33	48022	1	1
292154	5131	44	86111	0	0
289513	2397	13	74020	1	1
378049	1389	6	57530	1	1
343466	1503	35	56364	0	0
332743	402	8	84990	0	0
442882	2239	17	88590	0	0
214215	2234	11	77200	1	1
315688	837	21	61262	0	1
375195	10579	92	110309	0	1
334280	875	12	67000	0	0
355864	1585	112	93099	0	1
480382	1659	25	107577	1	1
353058	2647	17	62920	1	1
217193	3294	23	75832	1	0
315380	0	0	60720	0	0
314533	94	10	60793	0	0
318056	422	23	57935	0	0
315380	0	0	60720	0	1
314353	34	7	60630	0	0
369448	1558	25	55637	0	1
315380	0	1	60720	0	1
312846	43	20	60887	0	0
312075	645	4	60720	0	0
315009	316	4	60505	0	0
318903	115	10	60945	0	1
314887	5	1	60720	0	0
314913	897	4	60720	0	0
315380	0	0	60720	0	0
325506	389	8	58990	0	0
315380	0	0	60720	0	1
298568	1002	11	56750	0	0
315834	36	4	60894	0	0
329784	460	15	63346	0	1
312878	309	9	56535	0	0
315380	0	0	60720	0	1
314987	9	7	60835	0	0
325249	271	2	60720	0	1
315877	14	0	61016	0	1
291650	520	7	58650	0	0
305959	1766	46	60438	0	0
315380	0	5	60720	0	0
297765	458	7	58625	0	1
315245	20	2	60938	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	0
315236	98	2	61490	0	0
336425	405	5	60845	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
315380	0	0	60720	0	0
306268	483	7	60830	0	0
302187	454	24	63261	0	1
314882	47	1	60720	0	0
315380	0	0	60720	0	1
382712	757	18	45689	0	1
341570	4655	55	60720	0	1
315380	0	0	60720	0	0
315380	0	0	60720	0	0
312412	36	3	61564	0	1
315380	0	0	60720	0	1
309596	203	9	61938	0	1
315380	0	0	60720	0	1
315547	126	8	60951	0	0
313267	400	113	60720	0	0
316176	71	0	60745	1	1
315380	0	0	60720	0	0
315380	0	0	60720	0	0
359335	972	19	71642	0	1
330068	531	11	71641	1	0
314289	2461	25	55792	0	0
297413	378	16	71873	1	0
314806	23	5	62555	1	1
333210	638	11	60370	1	1
352108	2300	23	64873	1	0
313332	149	6	62041	0	0
291787	226	5	65745	0	1
315380	0	0	60720	0	0
318745	275	7	59500	0	0
315380	0	0	60720	0	0
315366	141	7	61630	0	0
315380	0	0	60720	0	1
315688	28	3	60890	0	0
315380	0	0	60720	1	1
409642	4980	89	113521	1	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
269587	472	19	80045	1	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
300962	203	12	50804	1	0
325479	496	12	87390	1	1
316155	10	5	61656	1	1
318574	63	2	65688	1	1
315380	0	0	60720	1	1
343613	1136	26	48522	1	0
306948	265	3	60720	1	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
330059	267	11	57640	0	1
288985	474	10	61977	0	1
304485	534	5	62620	0	1
315380	0	2	60720	0	1
315688	15	6	60831	0	0
317736	397	7	60646	0	0
315380	0	2	60720	0	0
322331	1866	28	56225	0	0
296656	288	3	60510	0	1
315380	0	0	60720	0	1
315354	3	1	60698	0	1
312161	468	20	60720	0	0
315576	20	1	60805	0	0
314922	278	22	61404	0	0
314551	61	9	60720	0	0
315380	0	0	60720	0	1
312339	192	2	65276	0	1
315380	0	0	60720	0	0
298700	317	7	63915	0	1
321376	738	9	60720	0	1
315380	0	0	60720	0	1
303230	368	13	61686	0	0
315380	0	0	60720	0	0
315487	2	0	60743	0	0
315380	0	0	60720	0	0
315793	53	6	60349	0	0
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	0
312887	94	3	61360	0	1
315380	0	0	60720	0	0
315637	24	7	59818	0	1
324385	2332	2	72680	0	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
308989	131	15	61808	0	0
315380	0	0	60720	1	1
315380	0	0	60720	1	0
296702	206	9	53110	0	1
315380	0	0	60720	1	1
307322	167	1	64245	0	1
304376	622	38	73007	0	0
253588	2328	57	82732	0	0
315380	0	0	60720	0	0
309560	365	7	54820	0	0
298466	364	26	47705	0	0
315380	0	0	60720	1	1
315380	0	0	60720	0	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
343929	226	13	72835	0	0
331955	307	10	58856	0	1
315380	0	0	60720	1	1
315380	0	0	60720	0	1
315380	0	0	60720	1	0
381180	188	9	77655	0	1
315380	0	0	60720	1	1
331420	138	26	69817	0	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
315380	0	0	60720	1	1
310201	125	19	60798	0	0
315380	0	0	60720	0	0
320016	282	12	62452	0	1
320398	335	23	64175	0	0
315380	0	0	60720	0	1
291841	1324	29	67440	1	1
310670	176	8	68136	0	1
315380	0	0	60720	1	1
315380	0	0	60720	0	0
313491	249	26	56726	0	1
315380	0	0	60720	0	0
331323	333	9	70811	0	1
315380	0	0	60720	0	1
319210	601	5	60720	1	1
318098	30	3	62045	0	0
315380	0	0	60720	1	0
292754	249	13	54323	0	0
315380	0	0	60720	1	1
325176	165	12	62841	0	0
365959	453	19	81125	0	1
315380	0	0	60720	0	0
302409	53	10	59506	0	1
340968	382	9	59365	1	1
315380	0	0	60720	0	0
315380	0	0	60720	1	0
315380	0	0	60720	0	1
315380	0	9	60720	0	0
313164	30	4	60798	1	1
301164	290	1	58790	0	1
315380	0	1	60720	0	1
315380	0	0	60720	1	1
344425	366	14	61808	0	0
315394	2	12	60735	0	0
315380	0	0	60720	1	1
316647	209	19	64016	1	1
309836	384	17	54683	0	0
315380	0	0	60720	1	1
315380	0	0	60720	1	1
346611	365	32	87192	0	1
315380	0	0	60720	1	1
322031	49	14	64107	1	0
315656	3	8	60761	0	0
339445	133	4	65990	0	0
314964	32	0	59988	0	0
297141	368	20	61167	0	0
315372	1	5	60719	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	1	0
315380	0	0	60720	0	1
312502	22	1	60722	0	1
315380	0	0	60720	1	0
315380	0	0	60720	1	1
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	0
315380	0	0	60720	0	1
313729	96	4	60379	0	1
315388	1	1	60727	0	0
315371	314	4	60720	0	0
296139	844	20	60925	0	1
315380	0	0	60720	0	0
313880	26	1	60896	0	0
317698	125	10	59734	0	1
295580	304	12	62969	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
308256	621	13	60720	0	0
315380	0	0	60720	0	0
303677	119	3	59118	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
319369	1595	10	60720	0	0
318690	312	3	58598	0	1
314049	60	7	61124	0	0
325699	587	10	59595	0	1
314210	135	1	62065	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	0
322378	514	15	78780	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	0
315380	0	0	60720	0	1
315398	1	4	60722	0	0
315380	0	0	60720	0	1
315380	0	0	60720	0	0
308336	1763	28	61600	1	1
316386	180	9	59635	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	0
315380	0	0	60720	0	1
315553	218	7	60720	0	0
315380	0	0	60720	0	1
323361	448	7	59781	0	1
336639	227	7	76644	0	0
307424	174	3	64820	0	1
315380	0	0	60720	0	1
315380	0	0	60720	0	1
295370	121	11	56178	0	1
322340	607	7	60436	0	0
319864	2212	10	60720	0	0
315380	0	0	60720	0	1
315380	0	0	60720	0	1
317291	530	18	73433	0	0
280398	571	14	41477	0	0
315380	0	0	60720	0	1
317330	78	12	62700	0	0
238125	2489	29	67804	0	1
327071	131	3	59661	0	0
309038	923	6	58620	0	1
314210	72	3	60398	0	0
307930	572	8	58580	0	1
322327	397	10	62710	0	1
292136	450	6	59325	0	0
263276	622	8	60950	0	1
367655	694	6	68060	0	0
283910	3425	9	83620	1	0
283587	562	8	58456	0	1
243650	4917	26	52811	0	1
438493	1442	239	121173	1	1
296261	529	7	63870	0	1
230621	2126	41	21001	1	1
304252	1061	3	70415	0	0
333505	776	8	64230	0	0
296919	611	6	59190	0	0
278990	1526	21	69351	1	0
276898	592	7	64270	0	0
327007	1182	11	70694	0	0
317046	621	11	68005	0	1
304555	989	12	58930	0	1
298096	438	9	58320	0	0
231861	726	3	69980	0	1
309422	1303	57	69863	0	1
286963	7419	21	63255	1	1
269753	1164	15	57320	1	1
448243	3310	32	75230	1	1
165404	1920	11	79420	0	1
204325	965	2	73490	0	0
407159	3256	23	35250	0	1
290476	1135	20	62285	1	0
275311	1270	24	69206	0	0
246541	661	1	65920	0	0
253468	1013	1	69770	0	0
240897	2844	74	72683	0	1
-83265	11528	68	-14545	1	1
-42143	6526	20	55830	0	1
272713	2264	20	55174	0	1
215362	5109	82	67038	1	1
42754	3999	21	51252	0	1
306275	35624	244	157278	0	1
253537	9252	32	79510	0	1
372631	15236	86	77440	0	1
-7170	18073	69	27284	0	1




Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time10 seconds
R Server'Gwilym Jenkins' @ 72.249.127.135
R Framework error message
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.

\begin{tabular}{lllllllll}
\hline
Summary of computational transaction \tabularnewline
Raw Input & view raw input (R code)  \tabularnewline
Raw Output & view raw output of R engine  \tabularnewline
Computing time & 10 seconds \tabularnewline
R Server & 'Gwilym Jenkins' @ 72.249.127.135 \tabularnewline
R Framework error message & 
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.
\tabularnewline \hline \end{tabular} %Source: https://freestatistics.org/blog/index.php?pk=113393&T=0

[TABLE]
[ROW][C]Summary of computational transaction[/C][/ROW]
[ROW][C]Raw Input[/C][C]view raw input (R code) [/C][/ROW]
[ROW][C]Raw Output[/C][C]view raw output of R engine [/C][/ROW]
[ROW][C]Computing time[/C][C]10 seconds[/C][/ROW]
[ROW][C]R Server[/C][C]'Gwilym Jenkins' @ 72.249.127.135[/C][/ROW]
[ROW][C]R Framework error message[/C][C]
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.
[/C][/ROW] [/TABLE] Source: https://freestatistics.org/blog/index.php?pk=113393&T=0

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=113393&T=0

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Summary of computational transaction
Raw Inputview raw input (R code)
Raw Outputview raw output of R engine
Computing time10 seconds
R Server'Gwilym Jenkins' @ 72.249.127.135
R Framework error message
The field 'Names of X columns' contains a hard return which cannot be interpreted.
Please, resubmit your request without hard returns in the 'Names of X columns'.







10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C126981010.9639300110.9646
C28762090.192687180.1714
Overall--0.7485--0.7644

\begin{tabular}{lllllllll}
\hline
10-Fold Cross Validation \tabularnewline
 & Prediction (training) & Prediction (testing) \tabularnewline
Actual & C1 & C2 & CV & C1 & C2 & CV \tabularnewline
C1 & 2698 & 101 & 0.9639 & 300 & 11 & 0.9646 \tabularnewline
C2 & 876 & 209 & 0.1926 & 87 & 18 & 0.1714 \tabularnewline
Overall & - & - & 0.7485 & - & - & 0.7644 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=113393&T=1

[TABLE]
[ROW][C]10-Fold Cross Validation[/C][/ROW]
[ROW][C][/C][C]Prediction (training)[/C][C]Prediction (testing)[/C][/ROW]
[ROW][C]Actual[/C][C]C1[/C][C]C2[/C][C]CV[/C][C]C1[/C][C]C2[/C][C]CV[/C][/ROW]
[ROW][C]C1[/C][C]2698[/C][C]101[/C][C]0.9639[/C][C]300[/C][C]11[/C][C]0.9646[/C][/ROW]
[ROW][C]C2[/C][C]876[/C][C]209[/C][C]0.1926[/C][C]87[/C][C]18[/C][C]0.1714[/C][/ROW]
[ROW][C]Overall[/C][C]-[/C][C]-[/C][C]0.7485[/C][C]-[/C][C]-[/C][C]0.7644[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=113393&T=1

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=113393&T=1

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

10-Fold Cross Validation
Prediction (training)Prediction (testing)
ActualC1C2CVC1C2CV
C126981010.9639300110.9646
C28762090.192687180.1714
Overall--0.7485--0.7644







Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C13038
C29821

\begin{tabular}{lllllllll}
\hline
Confusion Matrix (predicted in columns / actuals in rows) \tabularnewline
 & C1 & C2 \tabularnewline
C1 & 303 & 8 \tabularnewline
C2 & 98 & 21 \tabularnewline
\hline
\end{tabular}
%Source: https://freestatistics.org/blog/index.php?pk=113393&T=2

[TABLE]
[ROW][C]Confusion Matrix (predicted in columns / actuals in rows)[/C][/ROW]
[ROW][C][/C][C]C1[/C][C]C2[/C][/ROW]
[ROW][C]C1[/C][C]303[/C][C]8[/C][/ROW]
[ROW][C]C2[/C][C]98[/C][C]21[/C][/ROW]
[/TABLE]
Source: https://freestatistics.org/blog/index.php?pk=113393&T=2

Globally Unique Identifier (entire table): ba.freestatistics.org/blog/index.php?pk=113393&T=2

As an alternative you can also use a QR Code:  

The GUIDs for individual cells are displayed in the table below:

Confusion Matrix (predicted in columns / actuals in rows)
C1C2
C13038
C29821



Parameters (Session):
par1 = 1 ; par2 = quantiles ; par3 = 2 ; par4 = no ;
Parameters (R input):
par1 = 5 ; par2 = quantiles ; par3 = 2 ; par4 = yes ;
R code (references can be found in the software module):
library(party)
library(Hmisc)
par1 <- as.numeric(par1)
par3 <- as.numeric(par3)
x <- data.frame(t(y))
is.data.frame(x)
x <- x[!is.na(x[,par1]),]
k <- length(x[1,])
n <- length(x[,1])
colnames(x)[par1]
x[,par1]
if (par2 == 'kmeans') {
cl <- kmeans(x[,par1], par3)
print(cl)
clm <- matrix(cbind(cl$centers,1:par3),ncol=2)
clm <- clm[sort.list(clm[,1]),]
for (i in 1:par3) {
cl$cluster[cl$cluster==clm[i,2]] <- paste('C',i,sep='')
}
cl$cluster <- as.factor(cl$cluster)
print(cl$cluster)
x[,par1] <- cl$cluster
}
if (par2 == 'quantiles') {
x[,par1] <- cut2(x[,par1],g=par3)
}
if (par2 == 'hclust') {
hc <- hclust(dist(x[,par1])^2, 'cen')
print(hc)
memb <- cutree(hc, k = par3)
dum <- c(mean(x[memb==1,par1]))
for (i in 2:par3) {
dum <- c(dum, mean(x[memb==i,par1]))
}
hcm <- matrix(cbind(dum,1:par3),ncol=2)
hcm <- hcm[sort.list(hcm[,1]),]
for (i in 1:par3) {
memb[memb==hcm[i,2]] <- paste('C',i,sep='')
}
memb <- as.factor(memb)
print(memb)
x[,par1] <- memb
}
if (par2=='equal') {
ed <- cut(as.numeric(x[,par1]),par3,labels=paste('C',1:par3,sep=''))
x[,par1] <- as.factor(ed)
}
table(x[,par1])
colnames(x)
colnames(x)[par1]
x[,par1]
if (par2 == 'none') {
m <- ctree(as.formula(paste(colnames(x)[par1],' ~ .',sep='')),data = x)
}
load(file='createtable')
if (par2 != 'none') {
m <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data = x)
if (par4=='yes') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'10-Fold Cross Validation',3+2*par3,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
a<-table.element(a,'Prediction (training)',par3+1,TRUE)
a<-table.element(a,'Prediction (testing)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Actual',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,paste('C',jjj,sep=''),1,TRUE)
a<-table.element(a,'CV',1,TRUE)
a<-table.row.end(a)
for (i in 1:10) {
ind <- sample(2, nrow(x), replace=T, prob=c(0.9,0.1))
m.ct <- ctree(as.formula(paste('as.factor(',colnames(x)[par1],') ~ .',sep='')),data =x[ind==1,])
if (i==1) {
m.ct.i.pred <- predict(m.ct, newdata=x[ind==1,])
m.ct.i.actu <- x[ind==1,par1]
m.ct.x.pred <- predict(m.ct, newdata=x[ind==2,])
m.ct.x.actu <- x[ind==2,par1]
} else {
m.ct.i.pred <- c(m.ct.i.pred,predict(m.ct, newdata=x[ind==1,]))
m.ct.i.actu <- c(m.ct.i.actu,x[ind==1,par1])
m.ct.x.pred <- c(m.ct.x.pred,predict(m.ct, newdata=x[ind==2,]))
m.ct.x.actu <- c(m.ct.x.actu,x[ind==2,par1])
}
}
print(m.ct.i.tab <- table(m.ct.i.actu,m.ct.i.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.i.tab[i,i] / sum(m.ct.i.tab[i,]))
numer <- numer + m.ct.i.tab[i,i]
}
print(m.ct.i.cp <- numer / sum(m.ct.i.tab))
print(m.ct.x.tab <- table(m.ct.x.actu,m.ct.x.pred))
numer <- 0
for (i in 1:par3) {
print(m.ct.x.tab[i,i] / sum(m.ct.x.tab[i,]))
numer <- numer + m.ct.x.tab[i,i]
}
print(m.ct.x.cp <- numer / sum(m.ct.x.tab))
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (jjj in 1:par3) a<-table.element(a,m.ct.i.tab[i,jjj])
a<-table.element(a,round(m.ct.i.tab[i,i]/sum(m.ct.i.tab[i,]),4))
for (jjj in 1:par3) a<-table.element(a,m.ct.x.tab[i,jjj])
a<-table.element(a,round(m.ct.x.tab[i,i]/sum(m.ct.x.tab[i,]),4))
a<-table.row.end(a)
}
a<-table.row.start(a)
a<-table.element(a,'Overall',1,TRUE)
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.i.cp,4))
for (jjj in 1:par3) a<-table.element(a,'-')
a<-table.element(a,round(m.ct.x.cp,4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable3.tab')
}
}
m
bitmap(file='test1.png')
plot(m)
dev.off()
bitmap(file='test1a.png')
plot(x[,par1] ~ as.factor(where(m)),main='Response by Terminal Node',xlab='Terminal Node',ylab='Response')
dev.off()
if (par2 == 'none') {
forec <- predict(m)
result <- as.data.frame(cbind(x[,par1],forec,x[,par1]-forec))
colnames(result) <- c('Actuals','Forecasts','Residuals')
print(result)
}
if (par2 != 'none') {
print(cbind(as.factor(x[,par1]),predict(m)))
myt <- table(as.factor(x[,par1]),predict(m))
print(myt)
}
bitmap(file='test2.png')
if(par2=='none') {
op <- par(mfrow=c(2,2))
plot(density(result$Actuals),main='Kernel Density Plot of Actuals')
plot(density(result$Residuals),main='Kernel Density Plot of Residuals')
plot(result$Forecasts,result$Actuals,main='Actuals versus Predictions',xlab='Predictions',ylab='Actuals')
plot(density(result$Forecasts),main='Kernel Density Plot of Predictions')
par(op)
}
if(par2!='none') {
plot(myt,main='Confusion Matrix',xlab='Actual',ylab='Predicted')
}
dev.off()
if (par2 == 'none') {
detcoef <- cor(result$Forecasts,result$Actuals)
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Goodness of Fit',2,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'Correlation',1,TRUE)
a<-table.element(a,round(detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'R-squared',1,TRUE)
a<-table.element(a,round(detcoef*detcoef,4))
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'RMSE',1,TRUE)
a<-table.element(a,round(sqrt(mean((result$Residuals)^2)),4))
a<-table.row.end(a)
a<-table.end(a)
table.save(a,file='mytable1.tab')
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Actuals, Predictions, and Residuals',4,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'#',header=TRUE)
a<-table.element(a,'Actuals',header=TRUE)
a<-table.element(a,'Forecasts',header=TRUE)
a<-table.element(a,'Residuals',header=TRUE)
a<-table.row.end(a)
for (i in 1:length(result$Actuals)) {
a<-table.row.start(a)
a<-table.element(a,i,header=TRUE)
a<-table.element(a,result$Actuals[i])
a<-table.element(a,result$Forecasts[i])
a<-table.element(a,result$Residuals[i])
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable.tab')
}
if (par2 != 'none') {
a<-table.start()
a<-table.row.start(a)
a<-table.element(a,'Confusion Matrix (predicted in columns / actuals in rows)',par3+1,TRUE)
a<-table.row.end(a)
a<-table.row.start(a)
a<-table.element(a,'',1,TRUE)
for (i in 1:par3) {
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
}
a<-table.row.end(a)
for (i in 1:par3) {
a<-table.row.start(a)
a<-table.element(a,paste('C',i,sep=''),1,TRUE)
for (j in 1:par3) {
a<-table.element(a,myt[i,j])
}
a<-table.row.end(a)
}
a<-table.end(a)
table.save(a,file='mytable2.tab')
}