timereg/0000755000176200001440000000000013520020332011674 5ustar liggesuserstimereg/NAMESPACE0000644000176200001440000001456213517764002013142 0ustar liggesusersuseDynLib(timereg) import(survival) import(numDeriv) import(lava) export("aalen", ## {{{ "aalen.des", "aalen.des2", "aalenBase", "aalenBaseC", ### "additive.pls", "additive.compSs", ### "additive.plsR", "check.missing", "rm.missing", "cluster.index.timereg", "coefBase", "cox.ipw", "coefcox", "cox.marg", "comp.risk", "comprisk.ipw", "const", "cox", "cox.aalen", "cox.aalenBase", "cum.residuals", "Cpred", "Csmooth2B", "CsmoothB", "cum.residuals", ### "cv.folds", "dynreg", "dynregBase", "Event", "event.split", "as.matrix.Event", "as.character.Event", "format.Event", "print.Event", "summary.Event", "rbind.Event", "[.Event", "Gprop.odds", ### "Gprop.odds.subdist", "is.diag", "invsubdist", "subdist", "krylow.pls", "kmplot", "localTimeReg", "nameestimate", "namematrix", "names2formula", "pava.pred", "pe.sasieni", "percen", "plotConfregion", "pred.cum", "pred.des", "predictpropodds", ### "summary.pls", "prep.comp.risk", "pre.cifs", "prep.glm.comprisk", "pred.stratKM", "prop", "prop.excess", "prop.excessBase", "prop.odds", ### "prop.odds.gam", "prop.odds.subdist", "prop.odds.subdist.ipw", "pval", "pava", "qcut", "res.mean", "rchaz", "rcrisk", "recurrent.marginal.mean", "recurrent.marginal.coxmean", "read.design", "read.surv", "read.fit", "residualsTimereg", "restricted.residual.mean", "semiaalen", "semicox", "semiregBase", "sindex.prodlim", "simsubdist", ### "pls.surv.cv", "pc.hazard", ### "pchazard.sim", "simrchaz", "sim.cif", "sim.cifs", "sim.cox", "sim.cause.cox", "timecox", "timecoxBase", "timetest", "timereg.formula", "two.stage", "wald.test" ) ## }}} S3method(coef,comprisk.ipw) S3method(print,comprisk.ipw) S3method(summary,comprisk.ipw) export("coef.comprisk.ipw","print.comprisk.ipw","summary.comprisk.ipw") S3method(coef,cox.ipw) S3method(print,cox.ipw) S3method(summary,cox.ipw) export("coef.cox.ipw","print.cox.ipw","summary.cox.ipw") S3method(coef,cox.marg) S3method(print,cox.marg) S3method(summary,cox.marg) export("coef.cox.marg","print.cox.marg","summary.cox.marg") S3method(coef,resmean) S3method(plot,resmean) S3method(print,resmean) S3method(summary,resmean) export("coef.resmean","plot.resmean","print.resmean","summary.resmean") S3method(plot,restricted.residual.mean) S3method(summary,restricted.residual.mean) export("plot.restricted.residual.mean","summary.restricted.residual.mean") ###S3method(print,restricted.residual.resmean) ###export("plot.restricted.residual.mean","print.restricted.residual.mean","summary.restricted.residual.mean") S3method(plot,aalen) S3method(plot,comprisk) S3method(plot,cum.residuals) S3method(plot,cox.aalen) S3method(plot,dynreg) S3method(plot,predict.timereg) S3method(plot,prop.excess) S3method(plot,timecox) S3method(plot,two.stage) S3method(plot,predict.timereg) S3method(print,aalen) S3method(print,cox.aalen) S3method(print,comprisk) S3method(print,cum.residuals) S3method(print,dynreg) S3method(print,pe.sasieni) S3method(print,predict.timereg) S3method(print,prop.excess) S3method(print,timecox) S3method(print,two.stage) ###S3method(print,pls) S3method(summary,aalen) S3method(summary,cox.aalen) S3method(summary,comprisk) S3method(summary,cum.residuals) S3method(summary,dynreg) S3method(summary,pe.sasieni) S3method(summary,predict.timereg) S3method(summary,prop.excess) S3method(summary,timecox) S3method(summary,two.stage) ###S3method(summary,pls) S3method(predict,aalen) S3method(predict,comprisk) S3method(predict,cox.aalen) S3method(predict,two.stage) ###S3method(predict,pls) S3method(coef,aalen) S3method(coef,cox.aalen) S3method(coef,dynreg) S3method(coef,timecox) S3method(coef,two.stage) S3method(print,Event) S3method(summary,Event) S3method(rbind,Event) S3method(as.matrix,Event) S3method(as.character,Event) S3method(format,Event) S3method("[",Event) S3method(coef,comprisk) S3method(predict,timereg) S3method(plot,cums) S3method(vcov,aalen) S3method(vcov,cox.aalen) ###S3method(coef,dynreg) ###S3method(coef,timecox) S3method(vcov,two.stage) S3method(vcov,comp.risk) export("plot.aalen", "plot.comprisk", "plot.cum.residuals", "plot.cox.aalen", "plot.cums", "plotScore", "plot.dynreg", "plot.predict.timereg", "plot.prop.excess", "plot.timecox", "plot.two.stage", "print.aalen", "print.cox.aalen", "print.comprisk", "print.cum.residuals", "print.dynreg", "print.pe.sasieni", "print.predict.timereg", "print.prop.excess", "print.timecox", "print.two.stage", ### "print.pls", "summary.aalen", "summary.cox.aalen", "summary.comprisk", "summary.cum.residuals", "summary.dynreg", "summary.pe.sasieni", ### "summary.pls", "summary.predict.timereg", "summary.prop.excess", "summary.timecox", "summary.two.stage", "predict.aalen", ### "predict.pls", "predict.comprisk", "predict.timereg", "predict.cox.aalen", "predict.two.stage", "predict.timereg", "coef.aalen", "coef.cox.aalen", "coef.comprisk", "coef.dynreg", "coef.timecox", "coef.two.stage", "vcov.two.stage", "vcov.comp.risk", "vcov.cox.aalen", "vcov.aalen" ) importFrom("grDevices", "col2rgb", "rgb") importFrom("graphics", "abline", "legend", "lines", "matplot", "plot", "points", "polygon", "title") importFrom("stats", "approx", "as.formula", "coef", "delete.response", "formula", "glm", "model.extract", "model.frame", "model.matrix", "na.omit","pchisq", "pnorm", "predict", "qnorm", "quantile", "runif", "terms", "time", "update") importFrom("utils", "head","tail") importFrom("stats", "rexp") importFrom("stats", "rbinom") importFrom("stats", "get_all_vars") importFrom("stats", "vcov") importFrom("methods", "hasArg") timereg/data/0000755000176200001440000000000013520007035012612 5ustar liggesuserstimereg/data/TRACE.txt.gz0000644000176200001440000006664513520007035014651 0ustar liggesusers]KD9$6U ى%u=jI`|aGfzKc0?S__?_???֯wu??Z__I+֯7oέmT٪߶?/~+*s>[s߷Oy_u_&o,{|`c 6[s˟a/|qg;b_ۿ:?CYMAW]~}*ܹ~]ws؛§i㯮_::[F_]lω?Okow7}}S7ψVja/bSdV]RSL}[c窝1zQcݏcVo|e{=C1cn,o~|ʮSl-f{[ݢo{y_^}R]p𧚾w:쎹xؚCO_uUUY ~2q?NkDPu7&?;}W ěߵwLؕݺ?nǶQ}xKt^:uouI6 ҟG7J=2i{fxa8u=w=޿xז65xM-Yzaˋ SkwԂZp8$QÃoxᡏFYjmRՌ:56#m?_LϸΦ.iEt8fmZsmR$^T{RFє͇^s;NWmKzᵎ=YtOlo,հ$Zbx*[zJϊdښ/A h mfS7%8zJ?TnŪq,5P"_p{!FSü;8g_2vˬP^6x2o|tΣ=ont~ep~mKʳēUzvHx^Ď1-gm@!42yAe܊Jcs*x '8SǔA^Dq&Xկ`} 5[p#B9׫~:;{=دٓ6c:^+Mۯ~R]33 K>.aZqwFZ8^ؚ̒ߦW K\_`Dx+jg!#WL FptݰꍯX6'ބ^ .j3r A@Evfi%FV=dVǯG\[_Yj;x/ CKW/3OcTװZb+FG\ D$҅F@I]j[lэS˟q2G"<~C=;t m[!_\V:'m$j?}nxQCv~-{M,6]s̃F׼zf+]h'5ݵ1xN~64P=ϙnCk4V;hhxخb(3fރ=|ێya­ YzXE3"Dn;߸abB[a"9aqalmϩa'p- gSTZ3-^nK슮%R c4 5FIFgSx#V-"]M=ˆXS^PnlFƈFFqmG>?f^T׻%G/olvl}S-t5Jzf\Y3Ǩ~KqffG DzUY$PWR*r}Xڐ^7(4/na1Ys[_D,Frm҈@qә\-t}#QV.^7P*zO(MF~\Dp_2 AsҼ둮``4âp<\xVO\iݜ.ᔕsnً Fr,#7;oB@H[oH:HZY]o<\S<].۝꩑v732zZ9´0"M^־-#=>b i,Fx扐f"Y: PLj q>xnjT?hxDﮖyq^Jb;[5 'mm}C,Z wO$vj_- 9²lQو d8B_ԌhLy4V5 ‡neK.!ki? =kR4$\0x݂ z kqLF*l[/[ 1,lz5S[cwvL&Z^]ٳ^ ˝2^;¿1Mc"ے-8_}!zP*WZBAF[ɪZ5V[U^a?ñi4.f qp8\ ō:U!'k,eEn+ >W50"< fyidzu{Y7=cؚqY&b}' <6Y8 r+lw\KùiO[> Hċ(bM0O")8V3 tCqޟ߿ײI1:p̨$}LDstnX8JF7dG$#RGf%}[obV97U`Dia.+:Sds8sBQ؂ Biv>%F*zq%n*7Ym)]'͡&g0;')xW㇞@K tR k,:M9' "9D:C)Akz.#2C3IW} p2%4Ɩ̇ړzd >%HKyʤ3΂A[<“ (ϣS% AXzdqU$Qǭ +6.0W̘V=Z&*ШQsپf/hGù7nș :JXn+#K˲q[1J4(60S?0 VpvM;-v1" @X[94^K7Bښ Cofݧ#Taǀi^֓ Vd[/ cԼԤa_"ŹpO5ޣKm͸E:x뇼TˡmY-.% m,m}?ǂdb<ۍ:+FEf|/ Q_-5U'A>/`D>#)u :ɨ\sFg+DdV'F9E62uT wgĉl WA-W^3#4%5ix@Nva&EyV" Q=t<"5G3#Ǐܳ d^0.g5r ăn*<%Ն5 斅dpBov/MX:Iغ` R~ȷmڲu&LwlJPA'=,b،nio`KvvqGٓGY$moyTQw 9J rֻV2r'5/7jOqh"Ge ^U;+<0kޙXlM>tH5$ 8?uZ8ң^Wi5ŸAi4Xeb_Sq@rnWx8}+f`֌Ś]YVVgQlGjhaj{q?d~ (Ѹ16! LoBk w^ygSNců[Ƭ3)OE2

QYp{2qg'JSL:70} S}j@V^F"#`pUQZH&ƀ(SXPh9{ G4Up+blaZMQqL27Kv0tZ:M> (C ŌKHL1C` [oxh*|( P@(z};k/"ǯ w͍T;)m}[ ̈́bISelCDoG5C5J Lj>i}]}C)ؕ3XOat6y̚D Rp*eY&ulb/з(-iMTl/_td#v+'j;AsW^ C4dvYap_dh_( []6JɞEP:^O?Ub#{(M=/hr(Yg;S^-'l׎U4-Dd"gj"[96V蒒.k~9to~GnfU 2*n/i^BzsobICNB&=;wI`jz*m.MPSFr2gV4] 5(lt}ee̝ܛGUm~r](d^F>3?tՏK@|ž2@sDoZ DiƵ1V3MS9)5OÞ y-괊a@Qg:jI㉦bDEvF$k)Sc- ©y_j=A~y]9 0vۺLB=%PbNrAVU!Zly[1*$hAZ Eu`uӠoe]|IIֱ]\ka/j@lF,!4!Ɔu1P6O2R"vBM %]!Z_>"Zhof+%dɬ]0;a".9 xD02d35+A&(LU&]=Ξ3Kc$5 d"  _E6qɖW0v7phmeG 0*ԗpb]rU9wCY]Y{yAzέQheKBPsL[F!2_-wZlfܹcH`  tj" 0}P(!YQl,s ve [~DJzs4B /)n )l3KgF L{JcH3I3~͔%ctOGމZ,+S&{!iԑ?܎U[ Ǔ2-CR@ۛbWfK:*A9bsu@H.gg!j{-ZzM9:{T5-4´.=kH"4@-,˂2cJ?2jjLTF`vB=lk{|-'Bix}<~IDN:);4熴4y[n`lٱ<$Q=n}xTNؤ~wX 4ėAp*A)">>$[]/UH1W8gJ)bhK疼ҩs̝F E'#N)w果c8H6,AӅQ|G5}#}A_aDesƨ9=X_{d(鏸nl0m͘3"ҩ8rnPgJ^` icO͋HFt1L&)V ߋ?T(a7߻ !ȑs~OW<\ FRR;mDi,˥r=8'Ռ=Ghhȇ!,u  ]iA[ѾތV8)~BL޾-x #ٿ7 ^v6Yu6ejEvQkD-)xK,?j۬R&@dF 6*Qc[27w,rL*|S8jy8}9"[Δi1`pWi*!-ӧD@QqqSBwʤݏv*XQ@LG$H :}:ibJ.6QF39"QUKJsaٙNn-iU'Dd 7eT^E8S#ȉ M7 D|NUPD+|~*yEl[ ,+8Aϐ2-pJ Ƹ&u|H@einR'l.ΚT{jd.fL#}-MV_@TDA 9p/>᾽ iP'ULqt4(4r#ki#sִNV}ddj6շҜ\ A|}3 :Q2P#+=c;d0_P6 $@\sÞ-d|udwǎ[EaIUUqUY BqvNo a"˟<Qf ,0T+WץI.7[[@1AG!7fb b5+h{:5(k8s*m+l`Zw5lÎYPc(hA=%M,2kT/63>bq[#Ld5~5B0֛-e%+#{IbXXvd2L" jEzPA[2Wkh8z2JC?G MkQ WQ1c|*LI 6=UzhʳʂKHBT5Q nkcc"km2Mavl ȸ>lc0vd~dvh~y3 Tfl1F̧jbtID*[MoOPS8G]/ΘWI 9eS>%TߪՄ6L̐X_@-֪M2ɢT9ShV Y^jWv`}ZߕSP9̝u% ja6SBw'>ZVйq"}vMv f!ՁQ&HZe~tj E_ FH> , :I^*yP߫9h 8bm0 {k @Ԏ^3j Y)YI+$LKc<ΑBλ;Ix7E陜gd򡿽y{jXv@#y@{ 0N=GʜRh噈Yx|܃>`Q`,]@(TͣĽ\Yk͡Kٵ>Y\9w!c)L4Šӡ e0ZoRnaҨ ^} OWA_9sʓܔǧu}߫1Ad@("ٲ@!;[W`jh)c'd/`d nc LP\]btFFfks2ғO5]0ÜDC:I&=Y8a&˖.gT" #uHeD7K{&bF]'gÄ6P63~;+* + J O])܆7j_A jzl1"^O=יzDB88'&DbPŌQ&cEp ;Q92'4_-Vԝ% 7 +A$+wCkv@vztR+_FY?<ݐȬqPR YyvV;J@">}L[(2v YwEjL#{ ||)дu:;i#hؤh 6{S~N @~Oqk7n & [ ZH_.Q;26Ej&gĨ$/yu KW6cVd*r3{SPk{ a!`R6(K=fϬ`]\[;@#iv%\fjb4=*#RP?I'lבPXixf!dCu}jt<hJg"=*g¶ԇ*񠎬(*:陚G:zR2^ C!vL(Xkuf8uEaOD(f:l*aM97QY9.M ,"u|֭?^;1J>9']sN7Η.=Δdˡ! มiANn H!dM954 tw_9Vf\aN tt)SiAA0f;!*R2ɼAZL<]vzl+x_:y[S4r)\(Qs*vޱ (Ϝr Ja)2I(t|TiwJ>Q%b欑QU*ȣhQUj*:|xVzL`({0 YhCv)$aLSfTؗ0Wi50$M]S4˼8.~md08An;&}Q5oH,֜`. uC짐ЖH.ё tFo]y4+,K qUCy ''R~u ֚R7'kV`T$H= l?sdUr~&Z NL:]Tܩ0O _uޖJD)݅S:{]qtZ33EƱ)ۙ3URRDW}!Z;}='.YбMU4tk>vQ*8b#.E(+o!ۧ~fHI!$e#&'bF #_㽐`-zL;v~ph$MN1r9MC<3rݩS | Ŕ=&d0\>]^#DC/8pi <@07R#ԧ$3q _-w'Q\>ChǮ>tPMR߶^\aa=nkg}v ^ʬG50Ѿf@Xe^lJR·g=iDV}FaˤWaI R銶ve{fTG}^W\I;JgdTf2/UUh:˙ LoXHuW5v73Z9v"+wfUnq. ]"PJCK.4V6qqNhH[ȯM6 )s¹@ de&)^ZsMZgMr\FR0;I"uIfz֯?ڒ|SWu'<(aժF[t&ke`jp)VĪǔK|4JKz_a,G>#WӲp *ꭩI6_W:Y8a+p1x7ܔ<Ā5Ra#؆ YP/myL噒{>Gym%[*ڵ0#MZ-e=-w3[`Wt51b$D0*kR_ %roAW*iՋd+`Xy9#g V1X OjQd9B>tNyِT6esA} Vͼ^7(Gp Qښ8t*U~/fg.G3mgEi2['l²l3(Va+0.\ZؕRPUe\be 勀gGD]J/L탤4NǟUrl#Ty]WM~jdA[ohM9CW1r{KS/U@$}y'&ڝO=c0K\h~&}fm<ׄ-M&*>'PHKrlh?e>û r4ߣtE *v`<t&VEk$F]ЇI=S.{}{i(y-|%ɟIIx==c=5E;͹T?9sDnv]oDJϧ)c c:O9yZoTq" #og^ &[G; =ٹZ6&gNiv4hP$(rB=rV76QN"O˼LRt!}=i c0$qYzB M +VÐB^SZk vA ;}zr6LNmdt^îkɭ.[j[!YF(3p Ē3d掙{u!me7csgw@fMoI*CY҅%÷V0ܟdBV(Ҽ0fU9AcEG/գGyUz31qLQnMh঍uc\5wGYD 1ɱ0iLBf%9UGs£Z"ۨIGXKmjYYu̐^4%EJ(!qZ@O1xI,'t=gOeҶ{^OjzqCScstt}cӑz2 $В=[xRƴ~[8p֑>bΫe;5(XfA' ;!xZa&%o0^&f`qzqO׸ĵhQ4]gbJm=kgx[|)5zZ􋊲 C u#[ u ^??ʡt9+x uQ0>GYs?0MxQ?~جpޤ8 #(]q.?cҎԜ? k8NISA?YU\ {I(DmzJ2pYw,f/:Hy Vr>S8vj"ĹHJe@68g X6}gRnLt۠sQDeL Ne-6*ʃH!Y9k"jC4 rqӏ:PO[-V' BnIEs9.I[#fCtz^)g/n~)RD0y`w0~j^#ea~$3 KZO-԰xj]6uM,h٧6^OҊz5 0 (Qu ALPӼ馃gRGLc;LO0Q0A5pbA::ZO"*h:G8?16e_m'.4 RYپ1rV]gi*yXFT :ЅJ"$an}$VU-ʸ,cAU +] Ϭn(΄K!CCSw{){$v@l6VЄ6yӾCR=ӵOkAeP,9 qZǽaJIՐhDdb-.e)`PԒ /^'Yz$ &1#;^(e9/!z@T(f<{O$4uz6|3vpSr-̘4?x&lQ !hŤՓI S bIХ$r%ne+GCJ&&tFkM1Q,MVuF%zj`M%;BCPd+z$;fb;:VJ[62 &ɯt#dSr̠ zTH\j:RAö^`t[߲3˕:zg" qZ6 n\|< (=FMٽ5_oLl<)

GzEjTs$Z 0^V9`3ّcH؁lW_ 9neY)*{YMV^"Ń+;-0jPPnw9FsB4J9]VK0 e*0x (hsA}؃bZ>»3QD_y|&D 86OQp3teD9kVH"[-"V(Qպ.po*q ml1ÕI M/'l H"UWss zyg$V]FԓMC^l^\QOTId>h b1'l,Py1y z`c37d휿f\4#FwԪ $6^]`\ѨEJ2½ "?^6{AO IZดH2gq e*Q:6&8)0r~9+ۄQv>oӘzRu,Qv|Kc(Y:wjx\tk& /(L6P@KV2k&Y6.<CtV)O&QJ׿/%Dvu@ XO)?h'KKy\+v[>oh=h2DIJE 9\5ې_IܨԄCgVfdU<{J|*zL65LZg'Cv>ȡ*s/;s ;*]m>9DG}0K0 ~%}8JWy-+rfFDxO[k6qÈ H߁MmhtSFӨ*Ww0Z)i##H@xZۧԲDѯ 3!3qQ祉ך7!ܞ<+jKajJM9YG{s\wTP 10!_H"pYԘyIE耑.6Vc|Ji0MИxg P  Thr]EIpoHiz3rEW#Wba@o+j& WQ2;l=ur]^ro-G 7=XL:F!`۽{&^󆖚Ґp6gUS}TRS=B4ґ}:y[vxvSAiƛԳY&68Q+H~#i[9nr&oL⺜$mG\RpÎD/[}jn xfu"v2 d0&PN7y OYUc/T$u c!ʁ&̩z:;A[{=Avg7 5) o MMO&gVCFgЌ$]$qI+̘+B޴B=1SLAgӧJJRG0\/#,;\~o~cL/7nt" Y#@^r߂[O5p9/[4Ԍmkt2&jbb<_l?LΆ)*^ 1GN3R7)8e]*ԊbG <_M=/4V +tק(7v9}z4Q׆S>..p Q&L˘zQCmbzpu*lS֛ ,+SdoCŔLMP a닝@žxt6c!Lza5LvT8kb5(6sٻc1H`rDԍV9lu@ &mٽo,}Ƕ4;wדu6{К.ޚ;<ᘁ`eMGd[ĺզekcmAe`QF_団v!|躇}=SIfK Gl`[rd T~ؑ&F {˄RmS:K!.fUP:K ymZzv"̸/#cE`_9;יĥ?M(pMgEG\U $ zLevm@`txn#7xf'H MnJi Z4 )Gr5 ޼>+?|7h9UT׺yְ06B7pu[oA1SplLڗoQ6W5,IlxgF!() ]UJ#S: aaL`87hȝaU=M!yx-kYݕwѡrq˱*u}vrМA|1J&} & Pad21I1|2#@=ߕOL/\DmQ ob_"9N #&%DM15(¦˕FlL&iLAU+y&gC|ra/Ɩ0A ۜmQ̭>&leHj6%s+%hoFqECloFRӉ OjN{< 2kM`p^ܪ i?4@;8u\M c '~ ùM?O4 żILӜ5ajT Ntr4;: N? 12m{sJrdz i9ќ WŢyowO+b*xp i!qiht6߽1?1&Hj mqآ)MhfibHC}ڐRHhi/$rWϧǑQdSzC#k8S*!)񶒽~. +4aJIfKMCt<:/IJltJeV eH+sd 5Ol(s\rބAE LW8 +7P(Tݶ܇840?IȵF 40W:mQ26y-)Kc0pSu0Tgߩo3jM}>:ikm瓨W3M{5j}W g7gqJ d˖^&e+ so1萑x`jCNm*bbtHš$+j.^qQK^gY+$YNݰ`8PFQ!V˷=8VK 9-3Q\%&򧳥 vB \!fI#ѶVt~q%nF0χ=n~`l3 )l[%đ+G6X$&ukm^ 4rMhz^JxVSiry|OWZ|g{Ċ813~~GL(`Eg)-!U*~}(XcƍsE;E|bQ?x4SRx#^9B ֔mRT3Pե&R?rpq)n x6aӽ蘥K!^q}:\>#t ؚ Be;l6Sq';#H&n'$(C|[yp}r;'3C$DK%P#{nD@K9kp8<mJ/ \'mG.zOv.i%&)];ӱ\Adk(htq͜bE Xv gqcZЭ L28^f faD[tϞ\&`&(WA΋YB)P9)\0X0ѴD~VKrAĉ!qnoF[ 05ӚF;DGz^=\4Ҕ/JE+Ax(u>b}ZW;͐VzF'*UsmddE_Nʬ`RoEP'xXԔ=i @WA='(m{z}>1 ~䤭f -K2e)Do(F9W ij̫֠h+4'hiw+SAb9N)T P oٌCg@D}mGȇhnL7wݶY|^jۭ)vѭ  ZMuN-зZbJ 0c[)Sm'X2S@ MX5UcvtZ&m@);prYG[fBuZn)I؏VP&kIg1~ 4 UpYd-[CIHSRUTi`1VNPjqkLT?C]{'jM^, nd١|pŢAc\ VLR4lJ|P5Zķ_x* ~> Rr<F*޴д@p,40BGGzqSI?S"~.he(2RzM-㙐.') 8CϨ ܖǻ,0#"2(1v[8BơjVj>\ZAţ&Pp2фY~\[gO/[)'.nGp@ XQ\%d7bF, M f6% nل nd S}xP3jxɖPe#4Mz{7Їq`cH$`Ru vPoB+ ꤆ (^!j\/GIf~ &oxaD>/Dgg;sj 3dHqK&Ɠ%;|ӢDs3v0!R3]3 jֈt,La9wt%` t 9i(̠Bp+A"滧ShA%+P;Hk?`C.bKc.eRQMf t::j,3&ԥit@i/Cpָ0/b7G6$LVΰQeTK١aqAa#(RdyO |?:VXC `L܉O2= R^;TH#jIgnviJnm uFh"whSC LY K{"U @kI0([ <{ZRh`!lf"'╋D'nk2ft r-Pt aahMWdZHK;eKKhZxNԅKNL;\Bs嵊ed|rfZUYP2XR<1,KOVuY4f*X#a0qx-] ;ݎ=FA҇X1$HN+U/Y M 1}+%0$ݱXM0va:@{ (Uhb}DtnuXfNBK0yv'%Qơю>XpoF'Zc!38X5c4wC3RE=4jU%*6KPN|3Zdz棸Kߠ}n=Vzhaj\a( w賅|[%$HUgK3hRk҉BSenC^/|cnk1Β IOJGIW'pעSCU'OC0=.a! k"ø]X(4VfuK$vh@yAK%V|`Q4(΢z '9!'u{$Mc-F?|RYy$mCp\0vưE4F;3J,ITKQND-7XQXSTQR)JC"WD+4 I 7R~T-椄ME;HXB-sv/M*hpJ w3"D̦C~\ v}e |:L,xǀf[0t(Ϯ(P 'K#OT4ZvH6 In97m;զ!|BkTj|P-4gܩUFPbԢ#gD,ٲB4g"-@׷DP7@t~Oh-u}rhч9"pdzL4dl7GNwZf$S kA{C2k_dSRs8Ia =/WLmk~,tL0Wty*h[aO,4AN0$rG[(2?[/əRٲ5E\3+b#(GM?^:HQnvrA 2wIJqT8Ӊ]XכȒK헮5pD$wW)y9>=\Mlr΄W5[^brE*gE^Brht: [y?;>HCt>=bE:"LzN~b>dDXS_2_W^Q.6,~$(;'a?{:uȚݓ3$}DMˣ\őnAct`6+&%>~t3Hn~!{QSe˕lT[!LNASqVw?|&5."߄A/ =!!:n"B>ŠV Ld$Yg'z+B<ہT—bY5, XvB1a\g/0n6ͤ'YG w2O%`7m؉SEQ;CGu)gZN[ Ah$He78i>|Ĥ_ cQqT'BͲlXmVTD~n܏gTa{n-$]\xûKbe60: z˴"XHqwAH900"n3vmT.Fɽ=1H`k2c>)²l FH˭f<YnY6~"E)!*4 x"{ϓZYL[M}HG))g۷lWoJ+b8x"#doN*0-&΀xR35$+6^*ڌ*wwbzyn?_!NJA>S}eD2 1< &WRKh D FV*3N=tCa`]sDzZ&򑆬gܚHF!`t=L՝'pqR[ABF*voa>q:0WBHF?sw}T!L.^Ol{ʓ&Sغ@_7*l68ZWiU뢛R49@&x+cGg?" V&/?:2drGNX+r Vi7 Ep Aؒ љ/^XSvy:Rd=བྷeXQ}Q3dF +(b<L)t'/9*MDmڑp$P J:쭫o~.iJW΋% E(yFtHTY yKh39b́| Rƾ77Hy!{Ah^ 1cw H8A<#Hh~fMZH~9ib G3XNڅ^kUxEKbSK_UK#h5[fЗ$K8^I5`YlL#Th_YiH2d>M=WҠmM*Drլ)RZˊ$j !;DV9FCy4~oq\dx!{bNa3/j=dJ+ӸxY=ł|| r_id vry(֛B/֪:mc[a7b*G. ] Kk.]"}\4ǜ5*&ܼFg-4`Gb=E+#ULh$Fw׌#W3.Jr\Q"%YSw2Ou3Fk6LC 0´T莔|U]#Jj* "Lx QfhoDSM2%+]5oqW䤨v0lWUDgwP,,E-is"'6z ƒHI Pܚ8Bh"#EvbpT4+l#lQslBG8);w14^Du,Mcܹ$\4`pz"!?7SDɬ\zV}.r$>7ȶ3 ijBj6ztK$bm}Tp QjO'% 2{A^ ,E`>Zx!jU}9qqOkOVJb|XK@A$'䰜`k̐1Qh ,c]*k}8c#] inW،Irz\rAB{sr` 1^oN8wЧ`l}i~qPm]]aO=olp`! U8fU4ې`Wj[U> 1\DaVM*kI#!%i|l7娗ysA%#19Sux\G *chlO`(ރYߌӎFB} c#F (IRt|M .R0ԗVLVQ S:{'EʢVoբD qa6- f&GPŀh;*eeUab O"̌ؼY(e/ P赺%SPM#C:Ra@/%6ZP T0%aED )9*3CQa_S@R~RhY[ r]B,h^ր4xE 8gĀyً߉ IfsxD*Skv'?:Tz*5y21)F2&(Փ) aPbbN!w$Īs>qB.[E|jZ.nTUaj>n!Y_bLD%K=f-:w:0> e%]k %_DQ;#lk-VV!DM'2@I<C8߸DUZMMD՘ ZV8q 4Cpn+|#k+"$it1`c%+K(LnI/I]Ly<|*ȺU!Zj%n8S3/q:j=YLdE nq;e9$k?>GCT.ślroB"sK?sPS Dd]~Hک*Tar? %{d.,3"^wLvW(J5J฻U2I8SBN=0[#is@`[cU#qQ׎M~(k|'C.*tđd=ΖA S1f q2)Ky$PTf0?_&?Fse@7g\r|}J1-kf!TMlY'8?̫źe"6=7w硒¹HgȰv 5x 鲣{puQJjhğFu痕 ]˿qjS,UQf#O3)KX] й.\vBf+[U=:Ycxףۭn5ئ _f\}5DWOkiepChn$:Aﴼ;I_nh3YlI.Q^$Ue[ONz586m=՟x޲}^_^7Lt'/=/ cCȤy=]U/kH͕3Zh(^Rtfqvh$*Ir?u/K&C2 a]:Sڷٵ/2Fo-,o-k-9R&k:(S-[G #Pgؚw'oLFcr?i :IC.ӏǼ.Bw^J? ~ǻsnb;f6jUH}C۶FSSi8v.Wбo5Og,rFe ˛S(?Iʾʲbi3/KA E~PXm QvLxBȞPNհyQHT&Hɵވ{,dWY/Y"tnh<9[+!9e~Gt[(q*VWrҷA-~ ԋwŮWDjd[V ۻTM-BT .yWyZ~HniK^d-Dd8[1,md9=M]l$o折>glM? 3MEfX7w5WLYqU~)V1 q&A[m]#z ׅQ{H[Ѯ5QlQjORߗN*]_:!2I ֢.5ָ/[L(M4Ηq}7#m~DlOI F}Kʲ{*Ϫ]%kUAjM?D_-(9-*s&}J~yv{/TˀҘbv:ƙtDh/} -HdIW o5r9k$&75CIPĺlW<פU+y`$ˊfB:O2 E_(gŋy!9Q<6_Kg۫ĺY2 ăS\bNOx5ݖ]Fj+qq-~$<해L΅|[U7 LB6w'3 %1A-LG@;Ij^d 5zÊC?ڣze lˊ mR"ef2|>ʢk:ӳt?+=5]B˔);ݏSl*xy?AGfU}LB*b2*ܷiA!iٟMUL !s$qn׼4loK.Ҟ!_]yY!<Րs%~;cE]}y,z1L^ᗏqiPh/&xl=^'$@6<޼E :LfR (`Vk*5G/K n" Ndq >p3f7l$h+~y,IL8:B>YTO $_R(z/u {l8Oٸ }JD `lB@`g6),BFb/?*.[V/W;I(rzt!t SW3~Yd˲zxv Wq?KH0 E7I(#$ˀ\#ܤ6˪T"t *3,u;uq|GgS!'h ā~HOﳙrQsU>i?U!#9 g0ޙckzzFtimereg/data/csl.txt.gz0000644000176200001440000012247713520007035014570 0ustar liggesusers[9(^睜&В6ti6 _e2:-T/]G/?o&??|zO?zjHeWpхjz|/?=🟠P]sEÀk_%tN~T"O粇uS^Uڵu=kt{ON'=] AW:E-bHG\c W+A ֊ϗ/{"1ó4뫤E71Ëû9_nd bHW+,/ M[qXZL=ҟ|<qSKk1:_W 0∀S<W[fF^\oN%kc׼(d*? ;>_`3'GxuU~~bK73Ňsߜ1.\*0X8cLjTޠB{޴g^%gUv?# F be<^ųItY g BwB[w" =" [17]ژCUjҥK^l7 ~ȟuTrܿ},,r^q]AAz0+g:bqd31﯌A̐y(7:Bd!MyUO]v 9S1CȦEˊv[!t}^{x7L1Ch|8 3#Oj sS50By4mE'Nj`Tu*n 0.0. y|^7V2p{?/lb9"/x"f= z7uz!!8օ 0a$0na$RlB_\(wY$e+Eni  };e}W lsDap@$>/=vߘGc>s7|&1D 4_o~c1| k㣕C gZӅO"H,խa.X;K92%)TAS͗'H!iA[°ER|^뒖J_3[_ NXoZq6|gbYIA8?k#~$^$>_-_/)Ƕjl{/kcD@bwMqb¬׼HH^'0stR#8$2{2QHC|. Ze0`p 2A$iS$:N[c1Z#RRޛ[!{!8yίYed3ŇZHdP9B+IsClԄ<@F,t}4f{'ĭKGCQL<(Cơ4Ǻ!OO}#F/EzoOCw1Ce'՟\ fn56.HrKW=_-e H4<4!9-e|Rlc@N*K _B-;I_t fL@ 3 FPq(h|2QhPsh Pɒ7~|-H,)3ڮn^Đw#ytQws%!nEYA EјTʒ(zؖl<%fPF#uP ;'`b8_$kS:|4}o=W_8A ' 5,`!P ~2$kSog-: -Yd_ ī87W\]3ٍe S@a#YY|%B~#F!0a .2?鳰:@ "|cm%.uBS =Zl+KT)#s홎|#k! ?{:J f :uAjc{1ӳ>sU_ ]FPu6Rl>ԝX`Gm :쐹, 1.!IT4uat 9A1*p]g$axAJ iXoHˆB eΚBYe0ІB!7 pװYD_>B[X'G%Rvlm,%(t <[WkC}W,gxT1iB$Ilq u6ĶQC*?o\(_&)|?N|ʭC+" )'<:، @_/kU" O$?gD" eJs80x;`P̋sq(_jh}cS׻\y|r$ϲi9]p0!bC; [_%DAt9='"83~c|k3CF$O˚gVgK3#j;Y" |~' LbM_XoLb Q-ޘolbJ/b1"m{BBV1EɕΔ?%(_n|t -Os 0!A 5pL&ߎ?YlnPa=:7)~*S?XY"su:扌 ^MdS-)mM |N*F.=_$ގޫ`}&1DfN//< $^vse<_͌/w[,˳ČNp85)}'jJJKs8.FŘ~ "0ȏ{ǘĐU#q3PEL' >q,>׬YLԠ%OnO#e pkj绤I'#h&(*u)^Y#ce̟.L](z5vז ?fyVflDѢh22c? ܘe4whۗ9O-,pphG{Gm. iI9ޘnuai$Qd{SeY7ot~}fÄ~/:-ԏꒊ`LML/kC>̉_ѵΚ{ QܝٚF7o#0^qs· hEyӒԚ, O3nyn&k]r\"tx)emF1&1DzpoX 6qRb!xoz183}r]Ǔ2` "FKWa" QIahH>t~$ꭾa0d"zg/yia-i&0A3rD"Bq}F7 `Lg/1e*?νȿ&2JGdMqm+bTt2\i ."^6Nwc50׏~LcCQuZO.Sd|G,\݅R̋W}}f$zt^B5>JN!(o Rf;Qf4Bm8(kaQ Os8QFi~tu襁Q#|>rǢ7%k3O'n6 1CQq7]Q9Ӑ*Y g[uHL> {Grʉf1E]\u!;ygfbn[ѕ>᥿dG##-bqf;ݡZ K gP9Dײoixؙi0AB; Ճ}F\[DˁF4N1PC\~{,@Aʴp_"|#7ǯ hlS 9EVedGplA?N4}\1(,oQ*hHՕtz ,m~D,e^\P(uǍ&Ώ *CBy :0!doSQ0EyUQд6[ _(W}n6B:3 &wT;2G|,L.4h_ifoBH?z!O{ }ka1mWPG} {:LY@h 5.t5)Cs.b |:L5XV/NidfT;u(8(d7I%kcq{gVP.@A:7L6 .@Af&{dcDA: Y{Jpmfis9k "'%bV,Ի J+UU bgdmS$oΙMZDN$`tlF$$H y [mWжttbofIH~v1uo֠dN2rảcMUA^ҠDYw5!Ck@/&sዓE5:ѵ/?d&/V2<̋s Ż̄8Hbg?\Vvٟ|n#x?Cw?tP:M >mn|=𽵉!—LEO-…4셒9 ضfЁ5f9A1mYޝX:#ϙw*vX ԑ4D)e bQPP0C 6*]B²5*ۚ DwtmoQ ?ޘ 3](Tk3/pwaH7!R!sbr7{l]cH]&W\e KtA gHq*cg\U^3W#8S/4iD%xq:]LBTߐ5Ysǟ,48_^okoCGnd߻~$ @25BF׻BFʭM8*R QUBN~G-~Th @@`qٟBl&f?^7~2x37?&1| =al蕟eS?q]dH!gnI<sÛM x+g K gY3E~eO 8ELM\2ӡᆦ6Hڄ"aZ@DƮsz -f1 ^ 2t8{n @"B3nD+_YƫH~F[O̴ .x[ä4F<62^MEdBZ6r?^u~a[>L܉7սDo (o9/!}&o'Nw1*nN >)f)PhBNN Ho'"R8YjXz5!*n~Elb d,|.kC 0_@Q xwJ(`b`+'O%3gc+&Pw fq~-iaʢBUdƣƪcDB8{6 S[fE2#o˞D5 -SkԲ0`T|i]H o.݅so>Ţ4Ad]a("QMIܵBYY8&^*#? ͧA&9gA3[ocO@m~S{K@yp_:( i jEqXи@.cfOeװQHFteIV]L  TclOY 98h(k2 >6K4%o&nfW푚 `7g nkc`۳V1E pU.kC PU Pnawg2 h*yE~~Q nL '9Q^ 'k`Vr盫 5M4ouB߄66J@"[Y(+l3讐~Fm1C^AtYLBV`ֽU @Qu|ן\Kj&}ınpgx!|ırfLeS~Zۏ\_2GJ= mZ|c(ѿF1D~=Y_~`|\)㶬F~"{eO.˦D⍀e kEn: r#갻%3S g 4ꭑ8 åK 4j8v'KM)_5P$fDs5O O?@&1|>7&13v}+pt!a،<@yN6qYQ,e|I\F3>"f^]h]>x0,HDq&p7_˕NnhG۟u xUqy4 3Ol7cI㺬i iim+=л҈'m%W1n+1|0)Js6c3cR+#"pC$`" 4jXWLjw>i`v-<0AH/+S&Vd& SO8/j^BܤpC9WFF |9wTqU; cmMH$`l V)PȪx9F>+Fb ʭ@eb $JwzPq21h*y5 Q' onvoYLS•!_ą]^u8B'N(K/:=S'q!ƔtEF#/Ri]LBT+͝ΚG (iLgž(O9 Ĭw|7^Y d~/P,@ Po..3(@@1iΧX"gsVL̐ƀb8@ 1 Slܺ34B]XbƭJGNB^\~d%|RХlSTh5ˎ Vxh0H*aOb+P3Y+V-hDO_Yt -2?dNhh YA Ȫȑ sOVk3E景dD+#$f D_⒮w_֥C Ȃt:`Ɠyg(4F=W<JV로O4@D'˺c,@@!Vp;lC T ZV1Eՠ&/[@ Ws(M)}#32CCz>B0c\ \J&ldx@"T;[N=!&x2bdlJdSXK:*_ 1 f- b3CZ,p16VNQq*  Vq0pPf MO]1"dz\Sk2;|odW? K V%;AˏYv}pbYNJd^-[,7L꽮rDU5ܭµ2ˎjp G2G(E|YIO;N`2ޝmq5[q~՜n$_>T7?*ɷ>OZ!|N2.hMԦs|ᆦ8|4o{%6b@PUWOZx9 x#qЧj)PȪ`Bejwfbi jjVaDUD؁ij )(;J`n@n($hw11x upKd#eagzlXIƜ{|@9>=Ik^B_~{3}d"UENÖoUh*!M?n  (,QIkFl捀b-0 -g+Q W ;bl +H"=Q&8@7s֨8fQe<ǐ;jdo.seH4[Bk;:0' >T>˖DR 35p\& i{NZe]ܨ|n[T gQz\r (@~@e $m\t{kC nd{SkV1 sxQYLx tZ.[jfE;/Sf;)ԙ>dDr.U¿iGWAbeST!̉[eP5)P߫5)PHLS9(dUHv>cSPT _^>2)PP紃B>,@qm)1ŔM3'AЃ}N )uQ⸧|d! B(H% s_ȵ28p;mJ'b a?TzWYLBP'Fb *jPY6yxȺ深3qy.1 nUCk19sQU-_i,j QEBf=N?—en%d'_I  _I_I$a l\q*ߕ|l~_:-f9=Յ!DV~KO%?)cQPTf^:*@@6 Q @Sȉ:u#pY"*eȇ3QQ Slf,\m/9[(xU(qČif1 A*@ÐYLBm\9` 7ћ)"fx+x` G~Y PAf*Wl6)K{[_ZQERWk⽄\AirY?ǎ)6mV<|*4Q6@5n*^U}fmāFP Cf{5+x$ 5P)ؑI}L,7Q{n<3ジQCG@x^{H5Dq,{h9vJeaC TG52F[[F^Gdp> t3mo$(W?Ebl$\݃`d^k+ y찶ί72u/am&̈Ӣemb eM_1 ;gBi{> w6-|JQNI= >S0fF eb(`SxA.FE񁙏6y+s 2QeLY'ke{#t&&}.4*Dt$fګ ;쵀ZdM=~%Hoq64¡ȇ]+SЍ3'DDAW f0̲պBVQ9 {A1f1 Uyeq,@Be߉anܜ=^|gjB=ZƊ׼MnuAΘP3G a֢5ŽɼCuTE} *z-2'%(eEO6~ӳ\-w>fG[n9|̐`섂C}fgiqOc%nh&'0l1 [MOV3 U1/Emth` TZ-ӱ G=ixIёh܂5Fʆ 4jHYi tc BeZ9]jb-PXMpSQ t,\ST!I_hb q/駾ULBRlf{~ВYLBx 0V1EʕRgH )ߵ4|Df D4:BICF3LnEۆ 5hux^ş&___mZY5.s 6'x&k@$"L R4s [WCĄ#~U>EPHg"n!{7"ۚ1T o$H<j%ʂHY~}U(4UmsthIJ0ok]kAFẬYD+8UH&*NńnQv괠R=kيsM74] -"YEL/>i|=v?GՀj&$B%#NY#hY Q|"6: p3{a"u 1hDq~ܓKWb *n1NY&W>fk"CF:w5IB[I~j"^E[–1ᖠڠѕc=IEmo.Q,>ˍ!aA[;Iȸ0\hXNOaD̗t,+MLvg겋 YyȴojzO\=~.qL7]hvc!+7mBɉMyKrPT6ƈfV1 d uLEÛvsQYgmq)fseotxkM*{#ˆ 4jX:l( Y^!x`S/veYR7zjw1,u*l3RǼ9!:?}(| ,: v3}B3Qh 3KÐLOk?: r#ME0cKTp\>H*@unF@3>3l&M M ͯZk:?5{G׽I?(?qr~ۏ*2G^o#p#i*1簚+큗!O}^{ w+ T]淑tPJ),çlU̘H, @ i8>Ouam ìk_r.fjxԴ/-CP;bt u[ꂹ "1c=`(>G#Շ1`3zsIq݁p@"\q51(*MJcdӆX{%0 cHH泿!S>/<\”£MK2)$a4"qY! -uTqW+NK4qE(0v1E$A2FgYLSh磝$Mlc U"S8LkZv vG SG#,jQ?vpGVE*D4=bf;zf 1e w=V(t;SҍEf0P oFᶘua@-C[@ PU0YӃ&ίX]C9IYU׬չmf`-0ƝvT|Tl2 fv2Grϩ'_'3=!WepXfL([Z}i$#S;#R (8U$kC U  h(@ @ A[ 3A@,0f1 Iir:2vL! >\A,0f1 E Gh1!cw Z8pЎ kQ@YV $ CDU XW XMkfڿ:u hl^fVȪ*Z(Um{: U @U]|\Zzv PIຬQ r@68G80mNO!YWj j=Q @P& )nrkLR7fC65;7; Psue*WE ͝LOma9@T]tҟ{oKil[e鍂eA&^Z̜Fhp9,hٵP߽VA!_*;O~ 0 A@g]m9+g6Op8Ȧy x(&w8o%V;? (H#j s2F5i#&L4% \*;?uI3~u,y7nI cс|OFju Q/GUοYBƙۥ81ʚ0}n*a'ۆӵZd[y婂r<Yv(B2Vs\h*7j!Z =sX(U+0v1̗ܯX#  :py괛,73Q}$>35 1hTp,;0@7=\^av1E@krѵlLV$揮n+tC;eGm+[ *YU,:vqkg˻Ի=MKz]ф݉% s HuD1A;Ȳ 牷Vh|:ўus v;p>e3 V\OSf yUc#C<S>1HM0$Dfȼ0c + A%ܻPu$VV1Q;Ӈx &fev"By[95"^^mb`L*pGz4 XFм~W}ȹi M,SvGwγ^lk] ԑ$.̪SD.) wڿ(`xǤLpՙfrr䃓!pHcqWdSTAFӪ;6mV; EQ0f1 A{1W0f1 Q\ >+ݖe k},N@nGّl)pLBU^.TgpYaM hE_LbN\a/M W^z`b dj+T/k3ʯia]'ӒV/6K6B.2HN~{2cPTL7{ h} uIWX~U~`G6gM @Shz>[Q@vwz%Gb ~ML%uק}6LqGRձVP~:5߬h5:j"y=34r y)f^46G'>"ōy;n'\u DFq[}{B氌AQ%LB[Yhpags061w>^#38=ey=W `7g>ٺvLbFeyÕyTקk3wʯ陌ȜG_D H 2q5 a0;3Au7cxCWG?V1"AEP>W"6@#}aLD(6 #LPn<@7g|xkF1A|G;mQ @Tȯ?(@ @n#^(P5!*+}T@%Y(U(0wcS0:Uk1<5fk(E>蚭uAEO^MN]HU7H8hvKp|X/ ex(hvԫWȞ_/lX@ƔU[iyACؘ7 E9wգDZصZ,Z[wRH4oOx4Y@(1H 5 Ә yɘЩ{콝9QD_19,$sXvi-y95 '/)s%eQF}H$\^d0NP-oՁ YT^PF|HcWU7chaͬ$iw;`ejb|A9㮣U%u$ܵ Tdɟ բ٠cKV)8?_:x+k]Č)61*aj$|#1͆- **zfD/)6HCf98_؎~(ilc T" Ne c $J۽1; "(i+ cآLEPnoLe!kK{c\u2P ULM.iP*;˹ Hm5ƒPx&Өe ߂`Kf+ j7*&?Lr-oz++?PExW*t;iICVF1|Kr{ZRV%_uoviEoU$ijU۪T"t6o5*@ xemZa*H1}Mܟ@Xd[A~ ~kaь=tJ+ڑDZČohWM塪44t?klx6B B/_w[H8_(v1 ^0 ,@!B]I]Vf1 Q2G͏YLBR2ӵ=joQ*\*sQ@Oaj[3wjNWjTWO̴0`QUñ潄6UƉB d8E@Cԩb P,wYLLHeAM7ڨӂDc|C+|k"cP17#r֕nշgty֡!Q@+") .&˖O *#).\*@BC*w>벆ٗq*Qou̳UDRj⾑01F$uvߍ3L.{&f.g!P`a&| FIE+if/u@"f!oS%~T &1|$k#O->Z|T|f|_` bIx[- 2UB.ΡE-_/vh n- j{E-´"mK "ZV,/D)޶7ƇK5:H oOe .C o`GW֨E\+Upܣ+._FչoXcu5]]CRieifwj)҇.,$*pvU,R4@© %.kݠw 85YaA7oe8*:rlb $J$a#e c $JC}mZv]T9ТPS[nϦ9).iԇG3g;LSuuy &L"1le(dWjxSf3NsJB_M)?򼗑YȿIe$쯐.DgN $(@ @yrvY~R~C6 "+)[I{/niw3K,v2Q u3ԺHQo\:`mc4j?XaLc4jpg cH~L.TB@M9_ŵ hx(f Օ21A5*NØ@nz^ G./٣ 72qɒ"h))hdzĘJM)H*T_jU @V<\%u6٠kǷ>tVQWjU##פ$fߔyP?@Y n2;BZD _dQIG{d6:p`]b֞ڋdt7ĢӞUhkxp\R^.'CZn V1Q*$~ZMb $315_Y3(qmG/Ia%\?#ڊjtHCrBHQ&T2rv" eۈ wk[4; Y (,aDP@̗+^21*߁:q'aDRvq'벆1Y%YU%Х c $Jb|P0v1 6.k>bX MI }Q@9r$}>#Ld3 Kx4/[q8q4>.Qtfr[r`bUýD{";v\[ G-Ts.$k眤~5Vvϔ>"f.g̋BE 7yAhx0#q]64CvcB%?m "RA- !d--]s녆ry˼ɚzaW٥GXiy7^",b(cVjc2ۿF/:L,72yr/U5lG 0K b:Oth{GLxGmƚ7kC T _$)P $?㻋db *dd@#F3%.B3%[wyjQz L-D}db† 4j8ҐFv1 ILIK{jQfp?%ZԃN,3u6g@/e35!*`Z?&QYppQuWF KCcM| 4Gacph@]K{S^-(8g~|*3MztfFj%3Rwl@ë0uɝׯsӣ=1js>@. v#9M{1c߂o/k׍D N%{g$URIFuB vC֌(5}Uк_l 2sШSΌv;;Mv*QӉ kms9:ߚ3c T"N450Pu:Cibqe8¹HD(fR;S'5J6ᚵ 84tӼF5b2ȔfzFR U4ޫnPV-zfx45[_l k9u[&1h2zY"pɌ.'[18c)_DQZitoʈ{-޶w'fd,Q%;o  Kz#0FV1YLjmfT6J\}f=o@zQ @Sše/*T—:Jyho&ٜpAOhfƍS\e~JQ/#(ZG!@YoJXd|\=j(qt DRvX(DUHE[y6+E'&Qxa@߹s/)ՠD{N_u}F&P owGC.K*pE{şbw>͓D[0 EgTg(PU!AmtEƩwim0$J φ1F$4Vl$qY@©Dfd,PUp E t9h0aɥ,"gc9׉Z6>'̇t0ǢlQ˜ha[G!/?(1i1HDA cL^Ae꟞ jrgCLj77#3/}tc]&MOy]%r#=STs9lfPa>np8IQҖUm/MޑGbZ𖯅 WMۗ(+~S穭m{I_]"f -4E_QӻҬHGGV Hd0hif*tDYNm6Y*h5p^otFv1M%6^C[h?s58,3zI Z Kw AGt|ыj&?j #bNj97Ǻ40tC5 ]д@ZD-´pi Vw Ds"0{pC#tdc^Bf{$* Í~K4lo+_`E|ߞ;?Tmg^1޴hpeUjh]X$W)(W~C/ۑH3PlDj6cqt"n>6.TI EF61Cۆ4kC T =\! Qv:o1Zfm1Gǒwxwފ BhvoQ,aȬȾ䝿_ ֛*udQd0ͭWVa> ȪK& 1t(l#]C|ܝ⮫+/67.O\r8ͼkEtީNV́W oᓵ1穻i\0ƈ:UrXh{l72yKR$H`+11IU6F-@) wW _M @PhҦN7ptZZ߿{7"I)?<P.k3JҸd~#_Ǿv2W t4{|/q_e\L%¾R&6.mC 5nGxz-{PϳCBd~5-e/i^FQ+LEq/@@BKzOP8d53:s= gĩ 3bɽ2COH]dk[`m/#.H$ȣ\k"ءėϰǙ#"H3CR$ G gfœf<2pA4Zk {ߵۑ3L=Uc—/M3T ni+V;oU瀾1B-|$ЩCtRbhQ9;(T%v~![e;#/P!@ ClRmS9hx-S O>JkSU!pv`>*dkST!S֜"[" (bN޻tGWf_Ǩy,[CY{첡Q)TrOPyP1gJ2F4-ΥxUw_Wo dB`z#2/]"a(U]MCFk|>?"fLBle&0]VhdTHprͧtJELD,YfkӻDVG4&L8̓3U @_{c dHP-PDYc3(w0ɕ $|I@W6 73q>PwC5)weB~H:hZ ἡHI]tx_4=jPN)%Jf-:Lw kŘev 5A!/{rh*`2wˁ ] %*"ϝ!H*`)?B\x o'ź~d?fQqTǖ;rC7mҎƻke"qE=V؞0dM y{V(xrl*&"sVBNk4_vTHjec#5d197K^-^/Gw#85+Lv΀R_-p]dy:cᣆzͮ0Ha*kZڈVy70k}h2to־7{آ< O610=$fߔ[_I?)?^wvjMbYzٚ w#hŴ}oU* oW 6`Y _;(bBy> t#)KncD,YH֋"g:=nEޤ\i{EW凹5ns}&18g/5Gz_j[kLbNyOXW|R{ S"yyl!rK'?= p3#KWgbo n~aSBB&3q&U%5rs>&R=!y92}N'fZ5QK+iaSTµW@NY.lNnkoC[ y1`ܾ*zxh]^kSU!̋^Jv K>2)P}>; *@ @h|5!H*yw9[ ( LEr҆e`zgC5/^p(Em.k91(XrJhi/Q @S [[7(rOڣ, B[ Et%R:k!`SVQ+2w0n3!hӻ9V o'Y0ÝVG3MN Y *Π.?&i{K4kNo'U mm޷i&/Eeb/ Z}x:>h#Myj#I@D7^Zl]#ˍ̔=r\(*a K*ZGꢇikStVM)xi=jLWaMn{&6lWPVUD^/ùvP܏-NV1VE5!"ԗe뷖wƣDx\9LSՅ[{IGn|;A}Bbkzd1IKZMTs[x6Z([aW " PqJ#1;ܐ556Nau51U7Y:-U4Hy$}""#AYV jmf-{lLgw[ZXv0+쨍.U ŕ_0/\ZzjTNgpYd9ÏpYY5,@DE][5Z" Hum:al>NY8+iw ҏeNZ#*E?4M058o̺bG=sA"1+48$\z{^:B/3(|B+vߋ2jO>A6VV粇n&W$0u*lؚ5@;] 7|Ju7i,8op/-tqػ"L Stl5잠G}o4*-eUհj:f#X(dUmKBV1E$0w;F1U95*4fwF' sG|h{h7t73P3_L2`D-.Mμ6&EY/jp;ܟm@_6 A |ē(sf+ @84eMc $J}n.V1A%o`c)W}m4u+'̖2 K9Nzcd܏I}լw\[esG|KT>|6|;jk4]y, c??ר$]LSh:.aU Wc@P̕|-\:_h iaNO* _k(M#}Xf3 &N ӗZs´NtpCZH{ľr""Ʋ|N3$3PM*)\+jյ V!y[Yt )J}){HvhƁ,V]JRJ7wE{dmch^4gJeSu)`"^Y [!WE\U$K_S;Pv[™]"D&bbvg+h(P`XS@(/ hu]6]UKu~YI$N&$4άTj&?Qo2J;Щyv;@Wm "UEL+{,͌vg.#R;;΢TkST!2A@[c1*avuB>&hxS UR+OO.Dč5<\.XC{.ݳ ve{Dnh!K(FYyIt[&\( ݍWn~ 3 =%fA%<LJ$d*%,? I[ȧ%BK2gw|td!|FjkK8Пw(@aKR<#7m60ߴgj> r#MnS#ŒEjS*qFap[\a61۝DŽ LЬKftq ;BM:Xq˩y}(kgl[0-*VinGka!A&tq́FVRe PYL^]\,K##5c`AW)*lwR}yy)n)n+ aO _b-0:!$ЎO=lK7c= نlb|]VDZ<l~yaB>35h|.n1%׽&ۤt ~5F!mW0{MG@F1A" Q(@ @nKdb$RՐn6*,B*735=S3&͚0Qpa)$~q޹FI9qLX8C(Rw{f C rNoHxT^!(*`v1'|Y[Cz]:ݕY/xt$mNŸ~pY#83 b *dS^ykSDun'9a0 '⟵4p39Ӟet&Kw{g׹QjǢq+tӲ9ޚt7P1j  CW W@",[e@PyG{M @R g}cCD@ "޶M١|?İѯP(Q![5vhV#-,>DU G'' |M 0?(_2mV#1]OnB\(8Uq:(jb *x:fٵxLArv8rJu3^ Q1UG0?Sfp,Db+l/g f\LO;: r3y؏/ asUw'× !;U{)/rYGHm @"uմY"Yu8 !*h=I@6&b_/-`ڟd3O} $bt~rȌv~^.|g.@,l7,Gd]qGsx+P΃2Fd"#_6ޟCV1EY~}MCK:PsLZnԶ@ZD8?j/|ўFS ܼ01F$W?SOᲆ1N%u9yBmnk TqPW1A%g%$y cHHk zRksw^s9I[(}K9dSU+ʮ6wᲱO|c"GOdk3oʏ1G4QO S~661D<^nYk&E÷`|E{TݗtN)qjOؘlYT\ wQ7r>@$hRW o]"bT&۬tj*sθ;w.:pӝ[5j]$jv1 Eiq=0Lb $JyDx?EPs DI:Z+.Mj>bv&'d]ͻ6Hxp<yW=)6U @P36>k,#չ-A]z>p7や] 0>!˪?QY^V7Xy'rA }aҰy稛6IQk:a`ɿ J7,o쳣6$.@! <6.k 7=%_ 4޵ڇn6g0Q i#~3N @^ޗ^$n&O-מ=MI5jL|ZgTA%SwtPМQUQp@ARD;,O$@cL> H6èo|F& _#res#6tSPp*zbJWHX)7'3liŎPȰZ $J3\z}m*@7=V@'61C)R|0໛ObUM&1CHɎeN> i&b'H,oVMV 'Yx\pFYYH+a fb ϙͲCB+ēz#UwjaZyѓt߅&deT4 CDT 3Ğ@Rװԭ@ &fy|EcHOG{c|U|mUook1KDHR /FOف8"9-jώVv}gۺ̴nVx>Bk6x1k8k]LW`'V1Ȕ#g " "|dFQmnT{>"W@1Kn55< 4p*? ԅMLc,,xu: ?7t#>i+ڪlpAQ+h 3h'f]١ěn2$@4VY#:TXHZY-z|F {t92>@$=~C;|LipC0kGSgÈ%@s ?K2P T +@ u@<]x3 #8o(!d=5 ݃\NuͼP@JL1#/-5avbPݞ}Gn . <4dtSMːԑpN2YeܜPM8=HE̩Ebދ8PD.ksn2'nEPW$Ů#qO~fz M>'34l9F*o$E5ẍFZ5^FP σ i]LR~ѕ52w4vv3y4>2ul<5fdEGo{ ڑE%&R|[XeʅHv%h}a]u7^i;iƾwԑuāHU(4t&Yfcj]iwOG}>Jl {Wp - 8+xUHk^RZE([!QJ,ʹ%$u+N-+5}e~3 fZup^(~Xۍ{.N%I-BU +nbyg!K 7-Hi բC˜|C Nu7Z\]XH+2^eLWvA"QFP ے*44cLOo4oY!/:e1EsjR7_i٩r|.Y-ey>iDGY0;L>X7PAƑǵ8gLxb8&N؜`ԡ򼎖PbFV s5Z@1E%<{wi)PLXp0ۗb HzHA&"O$r2#Q?B@C )Awjc *ߛKk:/\өߣ>'v-+/W"? [TpSrL4@#_ICk4@#a |aLc4j{wNB?<8(F}TiӘ#hS#廙;W4k: v3W#G ,=V<岫+tnL[֓L}N|Ȼob͋ \Eio"Q͎f"(b(!_ MmGg t蚹a0d5j7]@_(4U0z0(L zeόZ=jEtFNhq[0Hdp U"lC Pn xQM*߿+5Z348 Kb('FE{oǍEY]Jm>u47#[Ó!v2[dyW/1MAЏ !/ͨ])d:ݿ*wЧ, Qk̮+gy/cgVsl D^7Rlc4jP' c ȵV1Fyse>EPf"ǝc>f\IרFeBK[ ]]h|2iaSDyuhAAdSD@1E i?~]d(k[dYU] $J1mg (M7W#6 #EPwˉ{yZqc 7;2m*ጣ%|#a:ݚ<gjиI b *n7?1)P(Ͱ8 ڀw6)P1 dST!IA=ޒfY^Zyn ctQqap(^íBT?HE>( VW4|s&HDkQw^'jiѹ_7^e(p*apye"!©Z_%VV1A"uWޟ_X([7a/pekC T st IU>Aj-< \"בFr7h @㬩yT ""݉DS-ivh'meA%NaE`5δ"IEL{pN䲶Qa]#q1d-E5;ΟrA4T_G}ObuAG Q)h |h/2=u, @-_xMY[-@A;A@SJglwI騫׈}G2v}:1  5`r2:3lLUX |K=]~WҗxiocBh$?d^UFNN.3I1 QKqu)z4u1f8i*b*|'_5[f]*mdD]LS:DYX)r5y aTaC wuEkS06LD1=:q/a E ImX4^~w+!ͿRyajkCL,X3l$:uxdh~8ԬQ<̛[fv)TV mE zSYV|Tsim%vFp ×|O2ZV1O B%&r1gGÑbhBuxa`Ww3ҹbQUYOL] Ox&m. /Ө{`60|0;^EU[>$f9'LaɌmԤ@KQʩЦӽ(4EeWdʪ5-Fޒrxq2E({xuʂh0*uV.Kd4hMKG}N0otZDw)#"h wJϮ/o7ɈLHˁE%NZ G4Yjx[64vkL*[^_ejof :pL=}$2C괠 ^%gGb+uY~PG[^%cRQf*vO61 t.A7?A U*ekg^<JLV#鑉^(C@dSSڼ~v N(;2G? v#Mk| njTí?wiaZJl.n#۳]LWt^pRKP;P~hnӻj* ISV(oN/*([Z#,sQxႡ1ꈺ~ɐcj-7̶_./F`gAM[}6U0i 0d<&g =Gk3w:%r_!oMb^]q{~&1ۅGk*I= ?MC 5t쿮o36:k0Eq*?]ḿFU;@٦/1h*at`ZD㛻nKлf1 N1knɪd5g N<|hf 2`o;ih>H참&#h?[HDi>mc͗7 4eIUw$f_$lҮaDHo/M:(r`!)]<]0`nO<>H+ F4Fr:xn9[^4骡L`\4ru-_F#˘DS KȒ~aSU!2{ϣV1D"a$Myuu óM¡! G52LcMY>1Wbo֡]\xȋ@Z#R.@B0'$7=LoCٴ?_&#Onz<mU> 73Fog_ Q5%-b *%vGYx]C=q%~Q @Uĕg3 BQ8:9ڡN%[b]U]s]J\hDBPX>+ zIf1E2nϺ :-EV/,7s"G{Meרt3./D\g *H4#~0)P`XžU,{Q5n0 \8@ M+O1?߹ph0kr%ύF^`xN*rn&YVM xf5oFDS. mpmP G6fU] DDkªU5AOe?G8կ:c5t!f#Ӎ4W!`gJ.FV TGE ʭzRK xN[= E#C T J8\@ҩ6$glcuXXy,F1Um|OSŵp9QVF,B0]!K]aD`fIOMvE{NgV[Rg1QkNn[QC1d?ÞYqMKRAďN׀G Sy[ޒd"x%JR9(Z+}MVs'3 lbxרLx?0`7@>6JLHa $T:xV0v1 NRx5) ==RXko:n#ڿ)&ӑF&ٹ-R6.U°F/'j!VEs22"tZ Q %6!Za[vN_>5!H*ޑ lbH1~]{b\86֔~ŸϦMשrt`޵Ei1E`{82:k6)Pp`X՟5^Ը8;:Ѯ^zB0jo[v]t,]S٬ύ>Fwsfo RMvj{ f/m_u4!S~_;uE%Y!|T HUwTg0!ȷܕC;5!] G^8u(YS Ď5 K9$# e/eԈmDL[ȨM[4"NElm5M.'.@!Bf_*YLBTd}`b( G4h}=(]8H|> r3E,?0p Ro\()5{*j  #[%,*Gg'>:I OԒ/RM W?8Q @P4ɧ_Fk3x@t7(+t#b׹}EKOEr{ջxlh5jhv(3%3E%vb *51)PhM(YywƬIh2Br7$f!2?߁ZsFfRbJTgz\$g#nˈHl[#Ңya9B,mo;HW#KU/eXzaL|g ڟx'V͛:.UW?hJјFhѫ'1Fa2)P&o<§О-)`MT{F<:_IL0:Pp}עU&xY8.ThxH " HΎD܋Fm­#ˍG{* U%i*@RW%/lbq$j9w@oNy}\wtBs/˷t4Mz\RVǭ3dȕ"pSX(<+ V1;5+w< oO`LbQ4oGG5arnUқj^ &6:#Zoiϼ}z52| K95v(=ihpf *ww짳{}!"ynAz)Ppuޮ"Y o71) % r4XߔL!i;y^-0Hdat& A PT}&f_8&&1CHG 6kZ_D"'̩ IjeMQS#XjPyAb{e<1dX|7eaC U dkH(6$m@E増03ԍȅ*25FGS_Luwvhvm\Fv\9{o6+wr3IdXK؃QVxq8p=GN ^%' ˘xiSIzOt3(-րM74R Ԙ 6p ov<1E%, )`ְ "g-|rhy R_*wP# c-V!/ț] e/5 G'!Pd3O71FI?+_MwK$fEzm'&j|3 5q5L+3 E$Z59C[Z $J&iaie5timereg/data/sTRACE.txt.gz0000644000176200001440000001676213520007035015027 0ustar liggesusers]\M$m_:HM'[ ?72 =7S]]ED__~~_oo?ӿ_/W?~?s~#~_gkXK\3?nn~gfkW~3X|`ટN_?~M|YtX73F~lS1~u6֯Am[G>h1Nmʌx~|Hvƒśj5}봷@2?[̺+ݕ&>B Ǝ-Ӹnܯcn_`|Tn:[ Ѹn{jn}k'bQ 8&Ig"WftCE\lXf>u3n|a>[6o'~T_O-#3`2&usu^w<Ɇ οfG,W7ml=7ΌwrҢ+jo˩{ryby"ڧYzh;_<2GOǻ-p-a յkWN%: w`'f)~?؊-||]ůwu;O#m6-G5ZC-wK,/a| tlso};tkIϸ'p'ށ0s#7Ɋi3WWy&k+aŬ ̓zk#a +a;b1; A8t+J]N0 :{0u݄ؾ2q3w1`vN)^,0y80}#l^εh}"cXwfv/t$3-5QgnΑCcă.dzmD)ܙt@ݝ 6ZeXzk^7=#x X}kPw7i7<\ߵ[nƸ57OS-{/ EHgJ (( vj=KW@be,`k .2d[U_=mVr#/U BD'UT,'q;ep|H|tՓT`93I-" (+;@ql4`Gs$7d&4WnIE}2:Xt)yz>-h|T2[plF_mykQCjToi=_c 1I-M7J~7-.~%Qbhv+x"mDt G*iv3oN_.?4nA>`wW&j$G,fk2L<=O@0 Rl;2ƽ!V{$sTm@28uc3"el?TK?6|f fؗ0GI=h$qf< 3[pW1`w.GnkzJoQ,JVq`q|#{vLA%fTЎU눦x{eV{^8sBػۈ醯#cZ" =xI|dcS$ !nJTYJ5I7HDr-RT;-jbIN22h8‡T{X*@o?3ŒslTV`+Vvn(KnݏVu%LhCոΉDl36N^DUHW@ ])DN.V#%0z҂3" gÌ 䇂1 g!V$[+uVVj${Gxw,7wL'i5<-qBYwFyFD[ %D ǪS((%=URJ^D) A%l!pdZ4ddxɫdu`+/e {B}aP`W#ݼUGce!FPv'nWm+=|L>`?k'!z5H-uֳCgxa}R^[U@[&; >Q8xYk+JGȬ$jBxVj~Yig+a-~g6ek-y:׽ Qڿj6$0B?RLɝ 萟_6h$|eh$ b #3/jHF%\)5KPw3>-@W =wi^YGdtQ#1jWxmE!(22tPz| xwN ȗ&TCbBPx #΂:3u}m85Z(hs3rS1 kdЀY\NH Ac KM8r=/?&!O^/Uhx"s.A; _9&*2ط0a[]8Bi]!O?-2'PUOOM}G΢d%\}F<ެ> uֿOd"؇ȷ|,Z-Zw濕+sPۍW{%^```,lONzT)a*wkRdžVL8Itq,D9axʔ^v>o;}(1k.qhƯwsx 8Ri<$YhO[T6nm$vwCdq*(_eE{O_me6a/}Ⱥ-09=)mө MH,|l#yb9D\ղ~X7I+uZa9~z&EhIQѢNsPfy׿s??\d@g*,, j(a__U'wUBZY-)B.e4a`3Ư`E)Qr&CDeOzo a+2ogPtzEPmg/FF w;5rF =(:˶^"<3^9&Rr@(&KcʾyVڎE/vjzTYoVFi:p)VdTzEIiM ^Z)rj e,'PQ}{*::t z5ґf>_NlR%M=z|˕]i>Q; \= E̼CK5;FuN vJ.kHT5qс}b;rQUMHAs1cIh'ؓh *Jjv]fw >}+QWx,vK½ L/\RJ n/H ݇ttOj8dFN| WFk>lqNފ/t|Y)W̆u\ߤ[u%YӠ`7|z\d#fa8XoH[.USp9;cfxe)d۾/cA[`jXEo7V٪~|6t̘0L +_5_RN}֫Ag yVaYRzB&y4MbJ9 ).|ժCe>5K0#4jxQ65&c&Ғ O oԽڮxBl( iZrX;dgsZ:T _pzE23'#؜;JZir67hIVtq:)E}͢iɰȪ"vGE+@qΆH:~MHPIO}Т3߯cLI!AőƉ}I\u77ϐLl-uxz3; YiƒI?"֘MPU߆y?sV*V$F ϋM@IpZ5f-KY󀁟Tk$v;QQ*m{f(3i'V^,eKK yUDWBWp<[Yrx!8?y\jG6xb/nɭƁ%k{NKHfuNγt;mI_l"FvẗQoO/<|57w_wHq,t&4f/o+Z7 /MTDդSS`{j5c9R=92k*DVZ5AkW/V?h%pvDz yt*&HSVy|LV Hdu3V[(ed 'D_K}`MX fN#Uu?ϩW~ۙzdXI}V;.szӯ[Y­FQ6HH$5Kޚ5SjSY9-HΥ_{)fddB{\WiԞ7},J7x  i-݂̤z Q3ɓE?PA'O;uAl{sL-ֲ0ȑqL, X}v66;cS !縟_ESOrCk\CT"yn!y>UJ;m'Ʀ\iOB-\l\O6g|yV9W/GwKFE^t_7ᡮdmIPbB76\%wإ=#b(7ꗈ&瑾xL@?1,Ĭ3bhִXY>34G`,&﨓Dt'ј`牐}"x&*9;lt1[Z:y^16,[g:*7{T[hRkR%EUS7f٠\vu֦6ľM r,6o|_H~-Q 幈;U I(7V_r=XZZq,"54e̤DƉڕ"ѵjBKv:!3+yQ5Vsp PS\E}r#ۏ*Eﮝ2{!<`VcĖttZOtimereg/data/diabetes.txt.gz0000644000176200001440000001375713520007035015567 0ustar liggesusers][ฑD{L&_1 ϸ ~N$%*\vESR>#y_?_W_?+_?~_?e>[nVI??~赖̽X ҍ(~]5QZ(*9fLFTMg,!_AXzoC}ꏥ3KmbRSl=GL󧌫lYGR`Ur-6@>&@O4g3[%[`I 6)?fO+W3qV``|pNce>{߷i&w>i70XYk ' ̸15fncRv48ibh⚓m@;"f=mpRer:kRYTyw˹Jlf'u " NBJ#N2aR|,l>޽ +"Z;*1nx4>7(j'Y"K1LYwJHƞd_yB6.6~cjb@F" 6sOl LmʋY?4_UF0jF-q$25%ckI]'Xgj>ʽ@վ")4*y:0cc O%]Ɋ*1dM $y7d+FUk>-)(n-Fmew$u)oBT_ tq+$.%i,]VT_oyxR#j 4]}O<վf T*dwLRfh)$m8ikLUrNK BDUm먦4R.Xix'08- g211/uZInD}@X<й˱R0fH ~\@C1*c{ "z o\ :TTrmRu+@z'EHrVowi_+D1DJ$]${C9AѠX-KvQJ?S.sWt+qSky)ѿmPvtdoZL-h?.?aI58d`"H%T͵G>@qH?HgK#mNىaԔ)OI.@b|LqR'rk' 5Y'Lf*4D̗!/0%f =]2)< -28ƍ!WF v1H&qk&3-F4vx УVF[Cd KV)9WM:)ũm N< J,]=P·cI8,3c/l=RI}`S&-yP{v@DM 2"FI)4NAƯre_TBrPU]()T~41ۮ(]j mG\(N櫋UEg}f58T@P2*QPNzTR|+KfL/E툿-P$^i4 l勢ldnR iM!ڋ:M #y8CsجwdyXJW#Tq[ < sYCk'MBgҦ;c09=C$k۱(]dbJD.P<ͩB}(g;l;l*)h2I ?2՘oSқ7!#>]=KpVD-&b:_IpKp T/Ӛ4 O'0;ݐO$ePhBnޏGdwhCYJ8E(I@Ҫ …-vjA;pHs %K9Y`PE$ߔ )NnRBA8cC)_T$pHDݞ6EIx>(D!t> /zѐ2,ּ_k46G lG!yޒs̓,d VRǪ=ocel糩zk;J/1Kngd%YAj6Pt\ƲeXdr yaGe egB3 kw L> g'!"jQՄj EzUz8kc l>HIQ#֧=NaQDBUq@X}y0x'wE]BAp5]Rt:ҬB)`FYκ9ݱ".HX!ɋUy|,G 5BaYe[Aia`dwkƊO\I ;FHO\INWXU OQ#T|$XWհaM;zJwt)~MjrVu\)%XOI[)HFoњ 3,o-Pj D\ͦy/B#$6fUXg߰`}PtNTÃajFMWٞ[PV<@4hLCA^<&-HxbҜHV6F&=d!Jv'%=sPI mJe|b4JeB V&Nܖ ͞CB8:}iRK~N͂0R'L!~\-)%\삿l*m_2'jw%0@@F9˂ֶ 5i2k.g>X/]թ:>+ (1^Dm;Aا,kI$[՘vM Q᫡uDc?qߕ(Qxu0Nb;d$bs\{v=Nޓ(R6R!tDŀZ!X$6j&!M-vfz KGg}P@z yzr4we0goj>.e i è?! /X،)UA|àhPoPsn]'=7nPY)*<IG*k̴9J,q(m 6ZYMb]$<3{(xl=<'15rٮhn7j^j>yHm׬j_)e&&"f}'28m Eo7ʶ9{X(CBTwti}0wVA1#`:RaQ Ҷ+ Aa.qq$%3iq09tsa.s*LwSr&֑ NLQ7Ѧﮉvpx6Dr\iÛn*ǸQ'y.HEuI2iy30@~ ˥m"`w2/ivz,Htq mx0$ͣeՃ h}WKj;6@Y)1Pte`L)(U> ЅAI1-y-Tl Mv,VEYZrD0B3Z{}`" _,6溺F:Ez7P>9)+P~e}WAU]Xvc]I⪲R9 ӓu^hKYh ]h |_*Pay]AC q68 k2=yѽ4#o޾`ahFbV fXp֋ X\UdM>yTߨE3M)9? 2ˇX\u"?UaEW[=Vx+^HIF]Z-sftBqj)#}_-V7$k(dM5vyYb %MDDTGa.Wb}EYK"B`hS%-cXv"C ܊rj 6uH=0踣ĨDe D9=z#fcLmshM7 kݭl*hX]E6kfo kډ<~%84f.ݺN+1m㰀4pJPI|`~`+QXxr͋16;Ƅ~`z3"*\@.nʄ{9V޴[B &SN-m0{.ߧD(`aihFvZB*kRAފrUeYD~,~>L =,X^T\09#Zst|^m<(Q+VAXu + (B5 {~J(a\O28I̐ bԔc]VgzP$~`DdN>>][MV7'! E|Nд*< 8QsgЅ ~`:\RďTxkBKQfه rA^i=tP2Kg*Tc~`"enzaamY!,%@S 1,%~hBkpe@e`Y:qtځ"hZM=c=#ǮMv'ZvHab<Ҋ=Z0Y_<͙iMrHa,65hZ>sϛۧ}ⷴ{n_e>t4|~G֚hU|h7~7Щi>s/Uf [_CF|ުU:td}%O9s'}%m|u-|fokyutzh;y[_3+B؀S;o+r[}!OUz9O= |K-ޡKMbge[[}eeSGXW[[}vwXJ]?fgv۠lRx oSFl|n-_sf-'5>lsjV{Km?h9c6Cl|InsMmWU[w6ѳwu,>6y}GxC7ks~_}J~8sכ 1w^%VB3/+fev^WJDobU8?g@W=2MR$~v~~8ŷn Ό~}îk9#5l_ך6s'2cZlA?˵OjI-X.l-.-K ̰+QaVYjfԙ:;fzĩ7mH+bp>Ovcc|su8ֽX.]D 쵹i9]{)݆OwXϾycN(6'>:L_ K2ݽy)M$xm%3~Xuc.Pi }:*&%ػKx~o.)qo k\iĨf6ZF El׀*- tذ?pgԯ\I:0_ϗG|^94dQ%tLqo4m3Ol~ۼ[,v-:% c/n2xE-G+ԐJJ۱tw*XGEG/uA wj`} 1?یpil8_5?^-mzULiҏj; 14,* 6iF^UcEj8VܒfG %Nد>aov3Hm+cfS wa<GUU\c68k؜X ; ҀqcO h,-U6__]IlUC\psȟevяBX흋n^fJTP< Qe<3$@@"aJ}j>ݸگտYD-B=t \Wk|3 a5c_;GNOuvA6Wy| ,]s.1Qus>j-#M\<1O"T:O`tgmDRfݜѮ h@$qER9uJ,ȴI3nDӧᏜxq#^`6?DŽvHeބ[ac.8=&5i>@g6 &h3lvmKYN+m7Ui6#P<-=|ۊa­h{lVaր&bXC&&8V:!E'u <6MõTɚo!nfcX< ϕE芮%Bcix$ТT30t6:WqDnݺ)=vܾ"2GDqun`܋xd0Oo:wbf>^di$ )kl*VqD3m?4+'XIY1-3P5ٲ&1t,=߁ s&2x;85l10jxJ 'Fi ?[G]h4ƍ[| 3w&pjL|dxtu}vLri#;/cWsY]r ԟxAVwV{hǮm`Z_u*x)MO^쏉WPi4aVbWevjZ\۳V nv>¿mHo>c ݝT*>m -U! #ȩdf- Z[U0Ȯ4]d*v9ǙKx;ܘ]vf=y  C}uX[E]=v*(ItOfNoZ=2&Y't0g!{Nh7 'И;.پϖsV}6 y̪bz$SOrm+P+Fs YJ070xLpsqg:)#wg蜑F%9ka ,~ehLAwGefd?3@A-2Y8r3lwuRoٔ+`$\ zS`5 AY?$}<Ǭ3ǠeT(L:@spNqpu!M_}i=fSUH6 &s)ß5j DzK*X1ʠ"[-2T y!.;Sjc&TIsڐlK5Ḁnf[q\OdhoK/hv!2}8FX @h-}̐cOrx2'51mpRtYZ@U]9ܷI~M=GaLkMTBB!1:yL*&5&-NXle ZlOv\`@1 B|rǻ /;p.jkY- gcl^~щm_4hhjܒkLtH= ))K yjMfdg/ag_iu"$5XqҐDhϓ–)DOAv$cB ?kǿEۖ,<>&3H|SNdQn۶a #AJȱ 5?[jzYkyћWr~âipi`@;PvȳAAG7UY^}I};FJ/[|u*#ǽ]H\3F՛wS>;ukʘݰn-p[QjwpT:f/!Q+'dtyyzvRez45fwΞlMp <`t-I vӟ6Hp6ă2WC4zdI{s%zʬ"JynikKS=;c'd/!<Ԕa@kR\h*Tp% Uz>Pb;@‰q hn*l,FLM #qN/l4bsz+xAL-g^BrT7B>)KF{d̗y%Q"Gw&U x/g0Pyw&09*ngvR`V(m-d֓v/n-Eývr>~29Dx#*"@vg55gEsP^]IZ\R9d"6̢rb &;n ĩdNXRzt`Ά3;82tga*(z(MJPj%%c4@vz+G4hɖLxBje>^[ʒn#ezN r2¨D _kO+IJ2 {hL-1"0b$x<(W>p+/Bwd/8:Np[#\.BIQ6tub'#GTh|ATAj!'Q!SCGҡ>>;r= {+<ĵG2[ZNU@+cxxn46Ne3s.VB@Hd,$22?GKƟ᪅[Y5{9sKPTfML 3v A̓@]m@BsT?L4ue2EBƺVr(8d 9~,Pt_ DWM>[ߖx=h1qKoc7uCx%2[.q%iӑsLH-2̣^d ~6" (׀M3 cRדb#VO2"Rυ5psrY'*Xv2~#P+N3qbW ؈C_3ӿȔ_^dQi@5&bQإm" QU¢[^d]s̎h8g|ܒѥ>) 2y/2Y29 핋^x.Q&Q2,tyeYD5e^iD R R .y4JVНs2-.y^+P[KSؽPšFQ 9P{HVZqi施7-`:ZW" O0-Fjy)UB+C*4PYVnᠸw 5L)TIeM,Z8y"@}|yd|y ‡WfA~fs(5n)²X~6I7ؠE@@P'/`а4cPޏ#Όc-zW#x3I'L0r{8ѓ::P?$"kRyR*K ]P][ЦlűICu7NlM 3M`i%s)< p6^E%|Ƿ-Lg1 䦨`c"%jVq?Xէ@'h1USA8W<&- 2ZQr(%6]GVb&$f֮QyB-bJÌC9 OR?ɉ6 c"o38Eh0fZC<(wjF`~dlwpVlC:!mjw$5 {C t2:4@&ER6#9Zo39xLL:PRX2HںFogzOSMz!J* "'P ͢o-EQԒ`XeÎS(Z;Fs3{c72>oV9tԢ48tװ?_\AJAxg_RMސZhOs~R3t*[BD͛h:,(|+x  2 (A-;g|!>*gUG蚭Tt8F6C ќϊgٍ΅zu '37o fj`no%FOF?s_ZR&ϝz2!2_>@>qy|~|H&cQe^6,/Vi mYnUvtyQEi r%3{G|TN?SNL[܄zFu/J#&my+J#I GZ`Y.Q-PӆܔTb"mE*ai,ݐhxur8/@#Ͻ]XЪ|IH5o$ e+}蛛KY֯D0^p%#. P=23oMUPlYc0.,Lc !7N.*);A-e 7mFs(j+!k@!{ab:!UcC+\}]Iy-f<04>iѝr3N68%)zsp4Z־v\*4 E݂QإCJ4G>$+/H2;Ky]| {-G]S̩@![.X<,Yy) iRuk-{[g1AiOP^4ms?ٜx㈏fIs hPIP+JFv&aip]l_VNwޛ؜3;NLDx>܌@j)P<ݨʦU#ep &j`vԬ̯ Fx9E/@0ZM` +&@(,Yzt-6yM)ιwڜlH< tע0 yGr^1BP-;}t0e-w6!Kdڇ'좌E[]I^.c0Hʟ8<:a J.Yځd@^vP[Bͬ*t֚:zgR A ZEJ5lU&ʅ& b5Zt5h +HxljɲPĢ7m ygVeA~ѧH(ZJ#z{EPRߐ0=:w'%J6 ﴁ]O"'f ]`Ē^ʕ\ up*7&|~׿Q dWEQRuOrVxh,bt׬ 8#Db]n?jDk:B*XNAIS-^'=Ek}enI:;1R^H4:ɱ#ohP$'* Ay5 g>K2ÎKړVj.꒩JMKoMj|֜*Nϝ3>~oNGp~(Rod`FteQZBms85"+'+qì!ևZ b e?חvNEZL9lHDK&Nn*@XiùOq5(䪑'5v2>9# VO%NLևCećgffta ,,>4@VE4[9e? Ց Kj&4V?o؏Ba0\e^8Yi}-bz>SC m"e{@ 9,?͙[;KVkٰ:BCe-r`Q.z^YdN>?ʄs7rɚU9~L[HΉF*_cz- 6q>}+D`}j\=Zc<6eL^ QW>vcF2_ܭxAZ}H U 7gJ T D%9\93*.0 B=J%ڗ⒆{r-v`Y(˅G4?܎gk7eәi[K$.&pĐd6f|E렀H.{MZM٨$mը3ia?=}])⢈3u $7,ijXΌAQRQcڏri.;Sf(M>y1k6+/,p!lj6<ܒKh~y^ƧwdWˆ:~Ĥ#u}3/(qYyQ.zi#c]3]d ,\m+eS[ˡy*HpTP(ld;K>e+@r|i. b;bڪߠff4+r)ZWI3 >}/$ʜηJHQJRkN*'8tHgɣ.z!y=|Leiw;=%:l!H- .yFu;^/ѯ< T>%,d6Lt@pI-/׻-]sCCVle. uK!0h aj;jtY\iXh+/0"Rl[?қT̂tΊ $O[EC];6.V <~)C^UM3b\ $ŨYX yDv'u\i"Y1(q(X e]YOp^Up%:`x?λƶ"\ l~,lg[}E_h l%$~hܛy6wV/8{VPmMVɚHދ*LD[~ Ffm:q;iy$%ע/ԫ!@2 [ E ٤BPtȓ ^@v:d̫hZ9|<_쉫Ѓ~`,ɍ_M w::]l;vT@˾;,fwyZNql|LVh>AT z~]yGL_^7kyQ ,Y_[eU6~[brϙ]*C.4%ZCR9P'?4zoxOy6 l"dQJ㒖qni @q9oj13^T?&`J^_`Bbķև4ű0YOkK%g f<:k*33yg-DgD>x%s(_I%po~9>28nA{ofjGEJ!>oUVtSxi2‚[ o|>튶:AGq-6Y2-rBqb:n(ȑ򞛥Ȁ,ݯyj̻*;@hG-&~ϟvk*,H7s}}8ZOW134.O5@I"Ndd5V~:ɳo=Dԃ@k}OyU7 q9xsTW gr{RNEI;ceg=_/2PH˫"q(Iݷ snn)Ɋ}>o`dAfcyVzERhv͍DQ(`MIBJd(^9dyve{Ih[f+nX|mLKjFf\bJ Pxǫ#&yzDڌwNVRO1 Z[t*Ճyr;g״5hy5nh^!Jជ7_ <ۣˊLF^sXbjFx -xQrCF8H{{ l0=?AO/Q W!`|*h{n-_Oseo򂀓>y i#& --v&7pXiQ";mkCy0UH9p{+M$$:pkTUl#EH1 {+0ݑ9ĕHDs]?mD1U9@ ;yo^ʛ *|̣TٞO7 PudJGp w4<ŋn,'+/ڢ? _)S`e͆kNjn\ x׵(~5|lERQR*J {6 'n$m mxGZn :Q\ZrN(>"PEu^7oab{ldǴTaҒv09έ4D3+slxb҈6`޵Rf&.^!qO1c{+ZZتU'37q],(TExf$;<"D[krtimereg/data/cd4.txt.gz0000644000176200001440000006552013520007035014454 0ustar liggesusersmba`A&FplD{YU$Ql6o/3_??_(S_O W?'X~R m ??QJO' $ ϬNv\}Z1O.G~Ak(SKK??M(S3iW*OFo$_g3Z_МşHkK넅 9hși/`,ͷ1QB2|zIi}?}0 Sgq0m٢pL&3E4hK?ߦI[94h+2 ^Ӡ ut\b4,D!?DӖxiWQ_Fl0h{-'U`lP}_m/hʫcWާz8F<ף8zX; i٦lC}78W>st  a6>כj/zNH:]OQfƴ̩WJvSV1Fp`{e2Q3e2[ɡ\LשcmdA+W,SF]Fq3W,g clTA5/Օ(v`c \>oȦƳi1ͅaR4XÙo^@ ~p97aP\Ax=f\Guup(W<2Y gÏܳQ )@x{*x9rb_T\ӞgFDŽ55𶏞+\ 0ߨ6$u[UZp Ùְ2 U:m8 x"Kl+K HԸ(pďÄݓԖh[4F|l #A5 @)_Q3(G _ၦδ_^FgB^޳D4HI"Ob*&))@K)UZSMؼlVB%%=7aÃ_}FUH鏒bX3=@cϒ| RSR5T6I?t13iHOZ(=S 0HP&'i'HQZTEOuAFQ"!qYlؠpbmJP4(Ef_(S(wN~GUeJSIm u{x)S(k֒s:]n NZqM# *@~ rU^5(}m+CkoIO\S꫸H滛?>@Vp/>ԓTJ71J"YڳsֱS}*nXI9m) ;5ژ_;$fdKڒw"y!E| .}ɢCc]דqr,B㣌%sir񫌥 .rͻk}A9v@.b*Wżdu3As!I[_Z#zٻzd!ڞSo[Ө@_َ-Mz+[!0s7n+H;e߆8r) yKPe)A* LDWILH[4V{mN8$@Ƌw9kGپxhz3AYc7,U6𛅐7Ñ'/1SmhN EZ>u|oEGv3q; VP̽D8}oo&7vf"x}IYh"9u76-fr>z=(r GF]s[̹_U"`L>F/ ¸58q|=O~D$ې=HlFZ3Pfox9߬Ee-j"87BrxP-ٺ:%k:> =KҠ@g{1 !n&BĞr97.(p3,~?[OܴOPUjdk [wӏ[ d7(X@3NP¡u;L| ה %hoeЃ5! oe`"AuKP݊л='FFt ūJ&%Hk\)D.ʾ(s < ~˴NQgc<V7,˜>K 3n-Y#e*\Sb:ƢtpɗØPL/:u‘gA CHK-AJPϺfŗH%}A³V1kgq .|,(Q$;t TT' +Zb ׅ:£G -tADH HBzn$HR ͞7eCJD(asf]gF ?Ĭ T"A]o T&58KH=*AE%hQ峄uxmZT5[[PZ@Gܜ**H*W'4n?B%)q|/( TԦ<%(O*s6g;%N'hi_ SprQ-AsJМ~A;aa6 uGYg~К܈ohw>k qh v).z8*cxqVUSV]\JЗx8IcdLYt-,/Z$ `ش'xO6ē$ixP4) LVƬbQ9Xx7CCE{3|` wx &hʹPFo;I {ų jQag4Hf";Y]!?w i&b|?|/ؙCC+iCztdjG|;:j# ;#HI ϷÐ"jPx2L0v8e~V)M*j1: L/Fyq9* j(2v$oޔ&bj4ÙWfhfY|&MUsVQ9pRߏ1oD个XjYv@1ʈI0Df4`҉ h&ϡa~0"JrQ|Ѝ2̋T:SyZ/C?j/͂/$=EkYawֹ %i&"ďH&Вf9@Dd4W }o [~ҡCö8 GoF:>XyU(.>ppP2%;` дF.F9l%#@K3Q2.lMP94ۦл1*^eCg#V,xo bRN]hX+]~00{Wu'MJ3])IE01Tgk %|; Rh/|Mӊi_xGk :&-e.wxZ ܅d1vBt92v,[#3Vx屇~)Ry(Y1Ͽ!aOͱ0Q:EV֜gFJ ~;J]&'3pw\f }<4s6Ie{U-3L6w 0(NT 3ۘʯk7(jI 3ۇ4#|!Kݬ8zD i&"YwrQ )HHQ ⥋|.i Qˁ=s;Qߔtf=L!@eL%r[m*!GeO3zf{P')Çpo৔%hvSS/7akwCȝU$ʱ 5scC]5(yI>2m*L_9BӕA; yv #CDj')ݢ'ʐU7=H'?P2*q> k8kVXJk {Dþ `wB"Pj>pQ:U-=h!D <uC QtL8sɅ3 !J{D ! `n,6DCP3;#hQZ]籢e(gX7z :Fr:fuĞƄ,U/C4M׷V2)+&OET4-M_ @h2)1HumcSl +J`a`/_*Xph^%4'%VxW-~i{k>gG͵^@*hu9|LQd\_W-ޔ4EQ`rE,eaFQ"c GQg)\|@(bp~X `Cdlbljc.tذ~ ,Ob 9]g^1} = 5)Q"t~/I@${gIh P@: ~Fq@+@8%Re'`Nh .iOQa}Hb1N~ $ť+w~xwDe']wvU`ϰC]`4L } 0t@+ m %*J~t'*#Lt|}Ispt=jQ&Vr:Ika @+ r) IV6$gCJykN0$08(|8зw,x,U@)$J.V؞>P 1  0*t1"3;0A_.{ Bkkzs#, z~|0.02;:ʖPq6Ex/W 䭧3WX WS"m۵)6i|0dҜؾuaE v.,I1lU%R|9jWdž,. tKEC*PG޷-e^(]Jq^;, 7wHR K~ (]JWGX>1r\'m4N2=dKsd6l }=gΰ$-0iJrVA_Ki=KI0kD0",r^0"D.3uHH[Җgc ÚP_,v ĭqX-hC9!t =p K."W=E%1@Td!v]=WoOX21@SU yQN{+C%~r iyGw8H֭ 2lU8W}~xįVwD*''|;A*P@Cri˃l: _5{a9.пʰvP>'y 6(h#;>aMg@`h/ͅay^C*л]Q6t5*( u6ʋV񙫷Z/^r9ݡ;>: QBTRz~aJA eU7(}+ V `oO߫Lk!,BĪD|;E.]R֛Ϭ"._%%*VYBHQO2_1.caSPyM,>yb|UG0/ nO[+gXXn$*ZTU!U 9BF~;缕3\B~^EMJa[Vc W[!ZUV#Wi@M, I T[QyyUƛrU~qZvUc)]Һ@$^Tz{$ZAL".׋&=(X5f'LvhQRpUKB@~FG}̆M`_BP6#?YJ^zU^(& jF?!Uv5=,7C Uk-,tBЪE9| 8y:j2;M6ƗB t2] lP( sr2Ǩ*%ֻ!U k* UU/Ov$ !"ښ iӷ ٫/ XOV{U^<"w;'t }U% ;-qs-J7#KƕѾ~ՑfoyZvxAu$&ZXݙWξwE>ژy^y)W(t- 'IsޮF|w2^Jq쁋 BE+V}EOT jLD9W 4k+2?KޫT}n6,8*ݕ`gd'Ä+Edx,'a:+`xVL\_B:*x:y948<UHPfq~Ρz .Yȝw$ xzÏEA+l:X,W:*M59?[ᕎf%LlP5ߕ ɮGjWg+GYQmuG.sx?b7>֫Àn#ZL-T\nд 磭ƯZLp2c~&vW"Vy&J?(­-' Dr q}9?Qy*>f }QyQO^_&J: ҷ,싫I(#pbWv̰j+gd|\ ;O|cAtr+hRl&"@7R Lp|&\Ptǖfb@}whP9Y=t(̌Hr  ?CPf"p>*CCO LM"䵩L(/6:9]=`4\D^ނyM*7߹V- k&>+@tPfbI'B q?M|,BM5^$wl6hՉ}),L/ Hެ۵50>ԯ1lz * X >rIB(/mk|=5awx[7M`\Oxjez?ހT5p5N>*[o!|8yŰrګ 9{k&~| AvK]gk&}f7Eзfj~B\ΕrA7oNedSbU O5hˊ NRn! ,N)ԯCMY[<ՠ|D4)pA½X1iAj8XRx2 {'ᷜࠟy7Lܾ. a2 o'jC{FLr4>կ ~5G7] wǽWyuuOy< [X kd,g2'ZC DX jr+УyIX7n6\3ͶOZ]͑*e7qNuTg4k&N7* {5NU 8is!h]̀7e WÖߣe^@ξܶh лpQ}6h] ZW\|L8+ڔxuk0074.}Qah[.86gy^0hkι5[D"l+Ew:Kh3`s^)5sCգi}V |5_u +\|n$x#)CsB]V!Y:8AW 4H`"%́6 G^Sh` gaCk/ٷ{ JXgy:01԰Qr W9H\\ܠ5(bi9< %Sh UAK*? ~YL e,-7' Ա'K Eؗ}q\Bupn`c(e JYն_ZL:Fe4,e߻x@Xh%:j!u̷Di~;D,/"\'g)ۓLCK'An@Ngt6(#%Vm7x7}^y n?OJ\69i!!`v(o } hy"iAwݪT9a' Cb=H`zZ;W'{!u,Npr <`O>djYG;y` j:fVwu(eJYڐuC`X: ^:UNB.a=T [WXl U>yJgH['CPF 4:qν:TJ!vVJޗ Ur%x}ï 4 x(B{;ԡTu(U#fh F5jÐZKlz JA+gŋUuhUϒJǷᔯRV5.Z豯NJ~+bpb&&1RB$WP*j\I FW0oЬƕ!胲.PS'%.?) U:^|Ojߠ4PSAY.J(˜fd d3eTJr@w9@=c{,|UAmFwx@ –>6x$ 0\Im[ (U3AkY 0:𐽝 forcO҆tn@m @J$įnZMAaUg[*>JC3b5 uyqa}`z8  _iT [ %gkAX+?]Fa嶶8WοՎO_oָ Qn4lmրHr2z~[T$C *wJ\V̬nV%^3V"9%z*izb. /&}_Q؀5)xƃ5m|[ ~8<L𘐷p*\У"qk`Az,_@}st~pp0 w ܦ-nٟ$# vx/]Txt= I~n!vud_H~ChzoJMkvn^@ÔPa]k@j}01)z|г7_,q} _D(+6hCij@߉ U^N 7 Wġ\C+Uhu!Њx1뮵_& BLK2wL%U 1X5o\5l N>j&"  /O覇5'ԃ=!t "+缶A蚉3;"Lk&~^D!pD9:LOvbnÏen [3ؼn`.gC 犥0'퀏!i+%v@К ʳ@Κ :ѽAК wR5LOGmw^L~侽y9^B1|澧â#;'*"x9s˗WZG n}IF u᫄]GsB? @h!h+dbgn@ƚ|p??HEU9S=WH 򷶽\Q1'0_CNh"!GfqINxgQ#yB Ex>_լSZINhomoB &zd.bg/R:)lR򀯛\;/לAp{F0XPz FFĂSJCf)׺nz` (㠬@ìh~XRlhQ& sMQIq/mM0䬾/^SR h|*g1v]z6 HZ7-޶gxG„Q`mIHO^Q hu/)6 iuNRڡSȂ f[krW;ngP>~dKmJpZ:u3V֢=:(FJR⿑}n#A_+fҤU@LMA^OGaN.5@"Kyҵp$>HJ|" {9@fH Wb͔/o*:U@"廤Wd/P> َ2"Y(a$/+rmU53(/ɻfDbC5B'g\7L4[>qǽݫ `Z~y웓~$Ƭ_ 5F{Kp!Mڂڋp .Ѫ-7ͷѰ-9]ByhۖEHG [)Kg vnU|Ѿ@5ڸ5Ǐ ͬXw|{(KI37yzu?i.+?]Di~9$/xDۇO`kdl#8 |-`DL;fmHN(A3J,B^Y=7KGqB}WZֶGv6ejʣ2l88߷dۿgNT, ~θٱ1q(t|W~]DtO1RH <?K6"B8_AK* M?~*]6x;&НP_[@E74~|Pk9Ay/i[*mwUOm>nk& frlU?iuCv‘0ƄҺBw0h_V?4i3ulc;A6khf*tx\gw(яT. 9 At|zOz8 fg{mhsڍʻlJ4 \CN|3AJc8~"Q 3þ; bP ϾQN7{Bi{.ʵR<#Vqp56'1c']}ӭ{;T R6S4MYԟ (Td8Y֔%Ph郖ʯ[L3#P x/l搒ჶz.ּ^~Qlۥ'?㷥L[%I[he/ESS>KaCt86SQG) h ˯IY+46SQG)}5)g)a.'-a~4RڿTN]lNig)m9Qgpv_LFogQv" "T^\A ʡMG db~ߔf;.1 ꓞma$[ϣ9WS-Alv ]l|2o/M^tWJWB ^szV/ ^WK!%x̤dZJf42KAnJw:ML|@,@:S Nu[t?P9 Pr~WZ&g7AlG4P; ZZ7^]%? }>M&xj]Ex'l?Gu5tf>٧RJWHB9:HE%Riaihk}T4~si N~@ށjZ&x֪34+CDŽP@(~O"TPffibS?:$f/LU'r(25A QJ(?)5M 􇨽@I-@R#|`AM ~$F5?h:~}mp<>J0;~@,@=$cU(Y'-&!cY>X4=䳞W؍ %!3H3 fY_aͭ`ޓe+hEQAkԎYlrr ^Yp&Y~DYt6]n6D6Hh"bb >z\.6SJ Fm>or¥?KJ;f j[FYB.xME(l؛oHJ[6K9Hx`!^coRHRt%mNR")n@%eX*I"RV+^D%:Z+o$Y(ϴ#[:Iw)x:{38G7Ap^?yeq\GJt3'|u[Cn(罤fVpa~ ؼlBrJ?(iܔp`¶0GY|➈–0mįR{`.[2Nʵ ިT5t?KY%Knv&Y)Tf糑Lq_֊ O_fDd,6:9AK,l#KIp5%HiĻozj|'EoOo"ء7g7S'7ajZ|.XN>%:?jz3uQ>r`h$!;@r6RǛVX[QF*x3.8(Xv;Tav7SQ,\ƃb <\);壔Ԯo]LEQJjWۢ-~Ĕ@F;ª~!vOi0ջJzɊLʺi(0:[p@:蟤9}x v)Yu7;<#U]OՓʭϣ)T,mZW.Tp? ^>BxR62Fݸ|jZTj{3uxoN2/弙N*!'K7'6jx8HBD&V߰ΝEִ9T_F~@+O^B 9Ejz31'5s]OH+!4ۡ7S6C57S|G7Sˣ#;TAo~ff BA(yfHI bm= ?I 5~LJDʄ 2a%o40/L$B1GTyCF^%>JCݗ5H S?I F0(dzb4$RLHIa._)*Pkq7Rj0;ٌ~F2iPI( ;}f F`4X?]TZGiy}l(&Ȅ-,ݒrx bsNBA z+ٕSL|o QL~<5`()wk` (=+gZb` KbmQL`۠M!rtx{j ym(_1Gz3T/g5\TM/Ak՘~2gQPKo͙5 •wfMzMD?;}+ɦW ,'Fj@ ~ _OxS4' ~ =wb6oD/AWٮ&)$?%H~ _[ΡւP}D eԂco-C&[VO/䄣B/`jYx0ٖjFxƏߑa`jUt{~`j Q̍fɗvf,){ 7vw$%B8/a͈_ ]Bgp0VjR]TUl֚&>Iy,QKP@ }@.A#Eۓ)_u zݸ%y"1 WV(%v#zD![W_I.Amvj(;JKC kn\׽Cr[6k>2 m?|-;l@JeҹS v"SZːD'7)$J(%{ӯ2 aMRҏ*RDIeX*))VFJ%1ߣ 638-Szf=k~2"O{ذz!ê-U:]uFT3ul*\.!(^JLEHG ǣejE)o¿}=Д/MݧI$['~qLoN)S0ԌFC MW{˙yFs贳d,LoůǤ :|ЖǶf~PX *RxߑRLv+p*F-#.aeȑ P?_/M 8Iȷ%&78 :nwsFr9IS87|Wh:L /!0-VJFÔ'#]K:dT9-yB!pr_ D8Sq>JIr(4ʀ3w|rrg(T UaN eNB?b*-.]p#Pcs&OKXz >wDgLopg+7ӟ/6.i %Ɲ#La kV$y_ONQIŘΤдa>JY0d(fUǁBEďRyVdr]R(tR^Y@'@K,xxy+f7a1ZLE7B h؝_jz:cM4.)CX)_N  -Tf*By/N L&l9ގ·&)VL'1bU9q'LQ V>㷻 \xLbƅ'BvD-#и@G Oі^Ц+tP+P~'rB0 h!йNJP+J5+ 4=@oK4Q+Jx#+,|PrWܕJvUB@]]e }McCo+P]ŽaMB%ĸ=k8zP+PD8l]4dwʝs9[-ے-[V\ne]w' ,}V.~͚z[V}w>Qov&By|>:B 9ګ$Xhl"7!]+vVek־%*l G (, [¦vJ˵Oiekyx@._T/s Zs1 45c!PQ+PGIZN['@ ]MrAȢ!>8ޢZ.#y3n|] ĵOFb/ 52Jvd?kֶ_7By@^竔uQ`+ v{p+46Aq c+mb[k.4hE~-ZPhnՓF){TJ%@k-~uPd+D;-߶ŵq C-kdCa@dy ]S=7p5Z?_>$n[ )e=n|v onK_DuJ[&R;Ikbp2ܾ񈞶Pq|W Ǯ}(qϺ-Rq 5nA.׍8_SmlI@k27gXB@g|>RB 6v]ˏTXB魌&zxPbZvm e7躸]{Vlr[֛<:"b;\wV!lZ* w7:P)Uo;1 Ip V%B&!;]:HB!ozAV!|/qݫHh"XL$k`<OV!yDzJBe#xPp*iܑ3ۑR`؆KEv1XV 8*u ]MН}UF3BU-׸fAP{t\]L2=RKܓX.rYUhhr{~ǢvĻ0w]a3*DNdV!u,"++~PұJB./N 1 nVJeRX>ɽB(aw}d leQTB~n"gޗPzӐC5/U YBFݮJ}BOEgkRn.RJ֕X:6tΕE| JRώTJZ3ǣY! |y]hnGrXpzv )w}(d%I,U|I^bQkrљ/rO(5HdӏNZQ(krq*IC.}v0VkD3 ۫^:nd(5Hf:_;[$3‘g7Jf \Gbkَ9pM(5eXJQ*kʈ]dGW[̨452 m)n2YLFoM!UFJp9c!P⏞ƄL6vW]1BU]ب5heWjvҨQ2ý$k:xը5(g%Wm?KVdIAs.Jh Z]=]_H!U#g8 J;CD)ˇm7ZF !3=ݣeL{^SyNC{$1˳"$T ZBmEy ̵y IA쾱) -K!C4q??!zvw[4%߳ `u5(x)5H`@Wރٝ׍Xֆ_O:;`3ߖM"֠;{=Z!_UiMXNܘ˕y>G1jsjtF'ixqaU6K>i団֠u;{t`_LAkvGQHQmQ>KYzl=$}Zx+5n}ݏޢշb{/7FNzV?XFAg/dyסʼnQʵDJ eZXxT.օg#{=\! ?^I:8Չ{:8܋W« >+)7>knn,ʻ$UÖVAp;W6U[6V0u4e̮ifC hw^¸ Fסv$5:7a~Vhwi`Hn@ BGcb[t+qmīQfCbG~KR][timereg/data/melanoma.txt.gz0000644000176200001440000000363313520007035015570 0ustar liggesusersMXK%7)Gy6 TMUwHI) ~_?_~~_?뿱?\;K%\bٸƵ& \/l$ږYfmK,ILع>,@G}yy)ãn̔/l8LFa}.Rsy$\M^n-f4?7fVsa)v.0|x_r]KO2"7Lp6=Go.k^'o-!$ma"%äбmO=Z"~E0 Hp>P񪱒(mXCPc9H;`E@JR# ܈ϲhq ;ᶁD]oE/Z`fKė87p  X'eNj2V*]?=WmU;I N@/s]Q':F G9vEs0/C3XtеE\)$y9YdY,slL$zLR`6exA'k}D=5nF;oB@<~xŢiIkUoݫh:9 8ZbG/jdȋ1 Yh,e'B޶wWg^TqaZ5oRhIukA*|گc7J5z嬜]\'_Vtf6SF>%{vkӍ`h;`X77 m.w)Z݈n⨺(:nGxfiOxnz%0T:!yGNTIDŽ{ێowα Dk|ubc{⚂25hL^OԪ7ѮbP648;`Ȯ$0EV7oD6!dhIӵtA]"+ҳ +'d_nmO(`GX*'jA+_8xzAi5Vf/3yd`m\8$$HO%Tgf$@#mKN%r5` 3 XrfVb)fji 'pKj3;zU8|КXυM7k{PN*S,'S;^?ƿ*N wTjû 49k0]QUcNٸ^+4) ʅ|PJIK㫉Ƿ!fg|@VI9}O@VMa9g NnOfwZXe/.,DpԹR1~rq!-)k½Q2csD!6VT6bO'UƓ LrXgq&>timereg/data/bmt.txt.gz0000644000176200001440000000763113520007035014563 0ustar liggesusers]ͮ8wBD=NQh4P 05? epmȏ?/Ͽ;߸믟9#1S./qY?._bW\̿|ۦ;;Ϝ"ON"qkӋb l.InW4Q֧2Y!yٯP~ v[ϧ贊4-<~8ֲe /U>'N9R[bsjwC{vQt5peiCH7^a~t}Pd n[2a_O!_~FYhe򻇺\2}gG'VogᬌPߑd3;B;Ϯ*z"!Y<^?YL~\)L}ֲ,?D5xM-Tv].Kf?{bWڝ7s5Gք2W2J)>~Cg-Yǻ;"n0 kQOǯ zaf2pE)!7gN%ȡo|Ze&.wn^Ik>lἥw9~CKvپ]/L+~O>X/A7g`l-/)3L43iM}ntaӬ6Sܢ"|ŕb][G(HP#Qp crp΃'1e^T֣)0S*ȣ>F;JLЕUfW Li߳|m槊֫}A6HZRF6R餽7= La?D`9>O] T$`֥>X;F|[nkVy`!񠞳xVy`YDFU H}`vB pֹg'fnsVd4A1  LsG[)'y>ܯY1Qҥ=5o<W",$ XS8G)J#Gs9pS8j[L%N$6% 'iR߅Fd,'z!zQҞ{c0{T3|x>38StmƝ{HO%F)AU )f>hD !Ç$*~1*v?hii9pƪݍloPoE9~<;N|qENߵ]bz䀯>U~g0e Eλ2U-:J%EK0F-Zŏє.Ƅ6TH#ޘL祅AHUk{ҔTe#\_]t1T)w3/[Ҕl?%OHp"N\kT?bâ5-!=Wަg9΍A||kʊ]>(TW}8#$#sGhį8ZRЭ06\&54鞋JJi=9:tįİKWm^V=E#z-dRnsARj+]~,R]8} Rڏ@Uk1+Pղu=0*H)n3ҋQT~W9L P33i+@6g%dTmYW Ljj UH/,-PբoF/oEv*~ݮh6{Ť.[lC 6]T`ATSܨ#<[}4)0)'r\ G3 e n+P?> |J*p<17JB+;c *SSs[{T`0T"S#w_3TiSΎ!s(8գ7mB$'Fldq{;lÄ-S%HHV`jJ "}@#((C{L];_SQo%q9Pp,K*Eѿ2[O@?OXYQ뎫S޼/D,-eGC<P]+ WUOO6(Pu."T{#`խ,3xDoH9@խ2]۞TqPu $ ͑=ID2+A. `&~ mV'9>H:811T@WxjC8i~VDqze?:V[VoƭM|[>5+~`eܓX=kv& F8p$w>] %[sQff&[LgsW[g,6jl_]v{TLj۟${j`f)JanfMNf^$~hM8} /j[S!ny\q@*timereg/data/mela.pop.txt.gz0000644000176200001440000002150113520007035015504 0ustar liggesusersˮ,nD\ #mJI 1Bytu2) Rԟ^?__c5?{P~mi}\c1OiZ5.ej?-]^nso<ǚ.!_| _UjDګ~Ǽ/Z+.։zclzm0펶DhuO 뜍<מ>cz~|!ۯχ®]duϨ40Cu}uqv=vw"۶\G\L:m.woNq-zk\9}_6;4\lqK Nϝh{/trzLǙ|ȵ$$_3i䫞#p̏}\'쁆?sMt-ɗŝmە|[vfq\w/kBhkxϴyyY1|g{Lb8![z>Ya[õ/}KH5lvl Ϗ@gx~z|B,D/f<`GB꼽?'d0?Ϙ-Wlf9PD ij9b  _6x}8q.w49 ^}~|K\PCG.D>+!5_7 gm#@ҏEL|>-[t5EB-?\K ׽?j!pLez尴O%:0qq׳|/Nm Y盁PXj׿{.K܀,7 (qiCܒ7<}ںgBȃɁP '_q+97ku _|-gZ1ۯsSGC6~ DPFu-6#&V5#/z r B]WF^%!yK]4-Q,YV}T:d,+i7͟G[Wk:C$Ct~t|BV&lrLH1tej\X3Uכ>a\s3{0y1zvgMs%cwu ɚmPVUE'ۚl#O`_HGu& .jB;3Y,$N=w*?9a䝩{uUR5', :w*@Ț @eRbjgj*cȊfX5hI _^:BΗR{X(/PML^(OsfB%|eަ W j''9NNrP;9ѡvrCDOɢw^3Bv>Ȅ΃ΟO2b{1# ^ Sl Jy  K:wz|N{aj=3C,k3/}weFqed:zglGPݎ*!U,C b[|^> VG^Tá"7$eٜVesZ2)%Y6JK#N`T_g՞TnjNIgYDz\ʝpX^ 7ڗNS2ݙq }hd!#0$}`jP.,.5H] ͢P"uj=:sPvD!\qfBۯ:x|K0k͉huK'-Vt"Z҉huK'DT36'eͼ R5Z i3Fvg~L] [uܛJ\UJR"7WbqQj%˺{wnER ubǢ -vak&>ʨ:e'F?N@sBga^3C&31dV`,HY\w*Vs_reFf{Ighdd0cdd􂡑 -x_h0_/So:m_ͶkrFҗ5Җp9]#m 5v9]#Sh2t[O#;VnSX`妬j\Okj\Skj\[kj\ckj\gkj^+WHY1_6<ێk&i4P(g5O 9m{i,P6uu$BZ:DG[!j@"@Q QR/ qYCdfK{˫ˤD:1SA)HK\\Kz$IQ I $ g*_K2PS8> Np}0)X5?%\iY?B#mkKG@nm ȭ-%Be~`=qv u(B: !eY֡&\*0gWc8|潟At#d˯'KuUhG%(`wWjkK63Yr& Zh]B˙ HNnhKV@%Y ƒ,cI4I-_eHѱ I!:Vyy 9b羨b':k~Ş=E:,.& o6ϖe.õe6xMg)*U\RTTrmKR_  ߷2nbru^]~xaOϑ;oNcG.9yrW]19_u9.|eڬcpV:\RIrXb%dɋܚ%/Nrk8=|'$]૮sdU:2*,$l#:DҰ}R{CCԄ(. Dx|ؐ[Rf{0\0\0\0\0\N5n6ɥapEa]|pXW"M羪Swdu=xG7e8@2?Uo,̓թRÙ]f^3SЗ;/Ew0ߊ}+}+SIթ}S ko7Џ, uVtqurL7,Ǵ{&6>*EvXa#`v:Vة孰k닋k ;^?1SmXmc~GXgXW,)=j`VlZ`fU5"!? ZDٓTRXD6VnPQc,,鐪}w?{ 0~#ALoF٣ g1 ) ڴɄ +:^qzRP# "(tȖ6:wr; aHv]\jmԌ;Ds5w]I* ?o@W@ղlil_?W6L ?,qt+ʞo[Q+ͩ WS#437 ˩ʟJʟ ʟpRlO%uȉ{YK#-w}I?H?c(ܭ>:C"Xvn]!֝dםf17'4߲ӈ9= #Tr{T4S4*c4s4ۃ4ۓ4ۣ4D?)@+``9Җ{Бؓu$-5# gV=Ht)hמeB0#龐=H<#HҁFp Pq@TO 5$?kH-9mhy֑ _z{KOOOޟ?A_?Q_&@('F2mC;%*Qd[vD%m4WKci(j%*[w4lNJTۓ-f]QQm( )R+)lKKE z_64HfР/-h}yi-i}}iZs\j(m{_c_c_c_5W17~/cO;C.km%ʥM&M&%M&eM&M&&"U͏ b7DoKϻ5QeWG#n$n0E7X'*=aqun=uDy ([GOY'Oj.4$*VzSm]) Mpt(ZdjBkq9:케w:\yb-ozb*-v.Q3v_0$pl^ӎ9e}Srמ3侗ӎC{exUq=eiNԙ(*!''(J l.7}NCj|F5>WrZ4$QV!2 IT#yoH 3U":?-="M"4L堪2㋆)ư91T}/5zdzV}AƽV}Aƽ@߼7Om {P' !d i'3lxtY-ƟC TաnUkV."=ׅ)- "фV{Ąd %lrCuǵB(7^hU\jj%hax@SPmU=*tgU5E!tH|xh"{3y' #7LPY/Y[x]PA.SѨ?K5{P󱇑 5{PIpQE!ʐWn\$W.y(zt{T{VAy){_t2/ ;<7oIO6Daj.$t"ܩU;Ew!uN]HyS"b;En."SN>]&dYdoW"2Y;E)OAxvLNѼT} q~~L2+cMz<:g3*)Zd۫S4ȶWm{u۶Wm{u۶Wjw:WYM~}[Fߕ|w%q]g\?:yDHp[9"ðݱaX0vwlVe4~|timereg/man/0000755000176200001440000000000013517763134012473 5ustar liggesuserstimereg/man/recurrent.marginal.coxmean.Rd0000644000176200001440000000425313377441614020221 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/recurrent.r \name{recurrent.marginal.coxmean} \alias{recurrent.marginal.coxmean} \title{Estimates marginal mean of recurrent events based on two cox models} \usage{ recurrent.marginal.coxmean(recurrent, death) } \arguments{ \item{recurrent}{aalen model for recurrent events} \item{death}{cox.aalen (cox) model for death events} } \description{ Fitting two Cox models for death and recurent events these are combined to prducte the estimator \deqn{ \int_0^t S(u|x=0) dR(u|x=0) } the mean number of recurrent events, here \deqn{ S(u|x=0) } is the probability of survival, and \deqn{ dR(u|x=0) } is the probability of an event among survivors. For now the estimator is based on the two-baselines so \deqn{x=0}, but covariates can be rescaled to look at different x's and extensions possible. } \details{ IID versions along the lines of Ghosh & Lin (2000) variance. See also mets package for quick version of this for large data. IID versions used for Ghosh & Lin (2000) variance. See also mets package for quick version of this for large data mets:::recurrent.marginal, these two version should give the same when there are now ties. } \examples{ \donttest{ ### do not test because iid slow and uses data from mets library(mets) data(base1cumhaz) data(base4cumhaz) data(drcumhaz) dr <- drcumhaz base1 <- base1cumhaz base4 <- base4cumhaz rr <- simRecurrent(100,base1,death.cumhaz=dr) rr$x <- rnorm(nrow(rr)) rr$strata <- floor((rr$id-0.01)/50) drename(rr) <- start+stop~entry+time ar <- cox.aalen(Surv(start,stop,status)~+1+prop(x)+cluster(id),data=rr, resample.iid=1,,max.clust=NULL,max.timepoint.sim=NULL) ad <- cox.aalen(Surv(start,stop,death)~+1+prop(x)+cluster(id),data=rr, resample.iid=1,,max.clust=NULL,max.timepoint.sim=NULL) mm <- recurrent.marginal.coxmean(ar,ad) with(mm,plot(times,mu,type="s")) with(mm,lines(times,mu+1.96*se.mu,type="s",lty=2)) with(mm,lines(times,mu-1.96*se.mu,type="s",lty=2)) } } \references{ Ghosh and Lin (2002) Nonparametric Analysis of Recurrent events and death, Biometrics, 554--562. } \author{ Thomas Scheike } \keyword{survival} timereg/man/sim.cif.Rd0000644000176200001440000001125413232042553014302 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{sim.cif} \alias{sim.cif} \alias{sim.cifs} \alias{subdist} \alias{pre.cifs} \title{Simulation of output from Cumulative incidence regression model} \usage{ \method{sim}{cif}(cif, n, data = NULL, Z = NULL, drawZ = TRUE, cens = NULL, rrc = NULL, cumstart = c(0, 0), ...) } \arguments{ \item{cif}{output form prop.odds.subdist or ccr (cmprsk), can also call invsubdist with with cumulative and linear predictor} \item{n}{number of simulations.} \item{data}{to extract covariates for simulations (draws from observed covariates).} \item{Z}{to use these covariates for simulation rather than drawing new ones.} \item{drawZ}{to random sample from Z or not} \item{cens}{specifies censoring model, if "is.matrix" then uses cumulative hazard given, if "is.scalar" then uses rate for exponential, and if not given then takes average rate of in simulated data from cox model.} \item{rrc}{possible vector of relative risk for cox-type censoring.} \item{cumstart}{to start cumulatives at time 0 in 0.} \item{...}{arguments for invsubdist} } \description{ Simulates data that looks like fit from fitted cumulative incidence model } \examples{ data(TRACE) ## Logit link for proportional odds model, using comp.risk to save time #' cif <- prop.odds.subdist(Event(time,status)~vf+chf+wmi,data=TRACE,cause=9) cif <- comp.risk(Event(time,status)~const(vf)+const(chf)+const(wmi), data=TRACE,cause=9,model="logistic2") sim1 <- sim.cif(cif,500,data=TRACE) #' cc <- prop.odds.subdist(Event(time,status)~vf+chf+wmi,data=sim1,cause=1) cc <- comp.risk(Event(time,status)~const(vf)+const(chf)+const(wmi), data=sim1,cause=1,model="logistic2") cbind(cif$gamma,cc$gamma) plot(cif) lines(cc$cum) ################################################################# ## Fine-Gray model model, using comp.risk to avoid dependcies ################################################################# cif <- comp.risk(Event(time,status)~const(vf)+const(chf)+const(wmi), data=TRACE,cause=9) sim1 <- sim.cif(cif,500,data=TRACE) #' cc <- crr cc <- comp.risk(Event(time,status)~const(vf)+const(chf)+const(wmi), data=sim1,cause=1) cbind(cif$gamma,cc$gamma) plot(cif) lines(cc$cum) # data(TRACE) # mm <- model.matrix(~vf+chf+wmi,data=TRACE)[,-1] # library(cmprsk) # cif <- crr(TRACE$time,TRACE$status,mm,failcode=9) # sim1 <- sim.cif(cif,10000,data=TRACE,Z=mm) # mms <- model.matrix(~vf+chf+wmi,data=sim1)[,-1] # #' cc <- prop.odds.subdist(Event(time,status)~vf+chf+wmi,data=sim1,cause=1) # cif1 <- crr(sim1$time,sim1$status,mms,failcode=1) # cbind(cif$coef,cif1$coef) # ################################################################ # simulating several causes with specific cumulatives ################################################################ data(bmt) cif1 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=bmt,cause=1,model="logistic2") cif2 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=bmt,cause=2,model="logistic2") ## must look at same time-scale cifs <- pre.cifs(list(cif1,cif2)) plot(cifs[[1]]$cum,type="l") lines(cifs[[2]]$cum,col=2) legend("topleft",c("cause1","cause2"),lty=1,col=1:2) n <- 500 sim1 <- sim.cif(cifs[[1]],n,data=bmt) Z <- sim1[,c("tcell","age")] sim2 <- sim.cif(cifs[[2]],n,data=bmt,Z=Z,drawZ=FALSE) ### rt <- rbinom(n,1,(sim1$F1tau+sim2$F1tau)) rb <- rbinom(n,1,sim1$F1tau/(sim1$F1tau+sim2$F1tau)) cause=ifelse(rb==1,1,2) time=ifelse(cause==1,sim1$timecause,sim2$timecause) cause <- rt*cause time[cause==0] <- tail(cifs[[1]]$cum[,1],1) bt <- data.frame(time=time,cause=cause,tcell=sim1$tcell,age=sim1$age) scif1 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=bt,cause=1,model="logistic2") scif2 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=bt,cause=2,model="logistic2") plot(scif1$cum,type="l") lines(scif2$cum,col=1,lty=2) legend("topleft",c("cause1","cause2"),lty=1:2,col=1:1) lines(cifs[[1]]$cum,col=2) lines(cifs[[2]]$cum,col=2,lty=2) # Everyhing wraped in a call assuming covariates work in the same way for two models dd <- sim.cifs(list(cif1,cif2),2000,data=bmt) scif1 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=dd,cause=1,model="logistic2") scif2 <- comp.risk(Event(time,cause)~const(tcell)+const(age), data=dd,cause=2,model="logistic2") plot(scif1$cum,type="l") lines(scif2$cum,col=1,lty=2) legend("topleft",c("cause1","cause2"),lty=1:2,col=1:1) lines(cifs[[1]]$cum,col=2) lines(cifs[[2]]$cum,col=2,lty=2) } \author{ Thomas Scheike } \keyword{survival} timereg/man/prop.excess.Rd0000644000176200001440000000655013377441614015241 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.prop-excess.r \name{prop.excess} \alias{prop.excess} \title{Fits Proportional excess hazards model} \usage{ prop.excess(formula = formula(data), data = sys.parent(), excess = 1, tol = 1e-04, max.time = NULL, n.sim = 1000, alpha = 1, frac = 1) } \arguments{ \item{formula}{a formula object, with the response on the left of a `~' operator, and the terms on the right. The response must be a survival object as returned by the `Surv' function.} \item{data}{a data.frame with the variables.} \item{excess}{specifies for which of the subjects the excess term is present. Default is that the term is present for all subjects.} \item{tol}{tolerance for numerical procedure.} \item{max.time}{stopping considered time-period if different from 0. Estimates thus computed from [0,max.time] if max.time>0. Default is max of data.} \item{n.sim}{number of simulations in re-sampling.} \item{alpha}{tuning paramter in Newton-Raphson procedure. Value smaller than one may give more stable convergence.} \item{frac}{number between 0 and 1. Is used in supremum test where observed jump times t1, ..., tk is replaced by t1, ..., tl with l=round(frac*k).} } \value{ Returns an object of type "prop.excess". With the following arguments: \item{cum}{estimated cumulative regression functions. First column contains the jump times, then follows the estimated components of additive part of model and finally the excess cumulative baseline. } \item{var.cum}{robust pointwise variance estimates for estimated cumulatives. } \item{gamma}{estimate of parametric components of model. } \item{var.gamma}{robust variance estimate for gamma. } \item{pval}{p-value of Kolmogorov-Smirnov test (variance weighted) for excess baseline and Aalen terms, H: B(t)=0. } \item{pval.HW}{p-value of supremum test (corresponding to Hall-Wellner band) for excess baseline and Aalen terms, H: B(t)=0. Reported in summary. } \item{pval.CM}{p-value of Cramer von Mises test for excess baseline and Aalen terms, H: B(t)=0. } \item{quant}{95 percent quantile in distribution of resampled Kolmogorov-Smirnov test statistics for excess baseline and Aalen terms. Used to construct 95 percent simulation band. } \item{quant95HW}{95 percent quantile in distribution of resampled supremum test statistics corresponding to Hall-Wellner band for excess baseline and Aalen terms. Used to construct 95 percent Hall-Wellner band. } \item{simScoreProp}{observed scoreprocess and 50 resampled scoreprocesses (under model). List with 51 elements. } } \description{ Fits proportional excess hazards model. } \details{ The models are written using the survival modelling given in the survival package. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ ###working on memory leak issue, 3/3-2015 ###data(melanoma) ###lt<-log(melanoma$thick) # log-thickness ###excess<-(melanoma$thick>=210) # excess risk for thick tumors ### #### Fits Proportional Excess hazards model ###fit<-prop.excess(Surv(days/365,status==1)~sex+ulc+cox(sex)+ ### cox(ulc)+cox(lt),melanoma,excess=excess,n.sim=100) ###summary(fit) ###par(mfrow=c(2,3)) ###plot(fit) } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer Verlag (2006). } \author{ Torben Martinussen } \keyword{survival} timereg/man/summary.aalen.Rd0000644000176200001440000000152113075643764015542 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.aalen.r \name{summary.aalen} \alias{summary.aalen} \alias{summary.cox.aalen} \alias{summary.prop.excess} \alias{summary.timecox} \alias{summary.dynreg} \title{Prints summary statistics} \usage{ \method{summary}{aalen}(object, digits = 3, ...) } \arguments{ \item{object}{an aalen object.} \item{digits}{number of digits in printouts.} \item{...}{unused arguments - for S3 compatibility} } \description{ Computes p-values for test of significance for nonparametric terms of model, p-values for test of constant effects based on both supremum and integrated squared difference. } \details{ Returns parameter estimates and their standard errors. } \examples{ ### see help(aalen) } \references{ Martinussen and Scheike, } \author{ Thomas Scheike } \keyword{survival} timereg/man/mela.pop.Rd0000644000176200001440000000244513075643763014506 0ustar liggesusers\name{mela.pop} \alias{mela.pop} \non_function{} \title{Melanoma data and Danish population mortality by age and sex } \description{Melanoma data with background mortality of Danish population. } \format{ This data frame contains the following columns: \describe{ \item{id}{ a numeric vector. Gives patient id. } \item{sex}{ a numeric vector. Gives sex of patient. } \item{start}{ a numeric vector. Gives the starting time for the time-interval for which the covariate rate is representative. } \item{stop}{ a numeric vector. Gives the stopping time for the time-interval for which the covariate rate is representative. } \item{status}{ a numeric vector code. Survival status. 1: dead from melanoma, 0: alive or dead from other cause. } \item{age}{ a numeric vector. Gives the age of the patient at removal of tumor. } \item{rate}{ a numeric vector. Gives the population mortality for the given sex and age. Based on Table A.2 in Andersen et al. (1993). } } } \source{ Andersen, P.K., Borgan O, Gill R.D., Keiding N. (1993), \emph{Statistical Models Based on Counting Processes}, Springer-Verlag. } \examples{ data(mela.pop) names(mela.pop) } \keyword{datasets} timereg/man/TRACE.Rd0000644000176200001440000000312313075643764013624 0ustar liggesusers\name{TRACE} \alias{TRACE} \alias{sTRACE} \alias{tTRACE} \non_function{} \title{The TRACE study group of myocardial infarction} \description{ The TRACE data frame contains 1877 patients and is a subset of a data set consisting of approximately 6000 patients. It contains data relating survival of patients after myocardial infarction to various risk factors. sTRACE is a subsample consisting of 300 patients. tTRACE is a subsample consisting of 1000 patients. } \format{ This data frame contains the following columns: \describe{ \item{id}{a numeric vector. Patient code. } \item{status}{ a numeric vector code. Survival status. 9: dead from myocardial infarction, 0: alive, 7: dead from other causes. } \item{time}{ a numeric vector. Survival time in years. } \item{chf}{ a numeric vector code. Clinical heart pump failure, 1: present, 0: absent. } \item{diabetes}{ a numeric vector code. Diabetes, 1: present, 0: absent. } \item{vf}{ a numeric vector code. Ventricular fibrillation, 1: present, 0: absent. } \item{wmi}{ a numeric vector. Measure of heart pumping effect based on ultrasound measurements where 2 is normal and 0 is worst. } \item{sex}{ a numeric vector code. 1: female, 0: male. } \item{age}{ a numeric vector code. Age of patient. } } } \source{ The TRACE study group. Jensen, G.V., Torp-Pedersen, C., Hildebrandt, P., Kober, L., F. E. Nielsen, Melchior, T., Joen, T. and P. K. Andersen (1997), Does in-hospital ventricular fibrillation affect prognosis after myocardial infarction?, European Heart Journal 18, 919--924. } \examples{ data(TRACE) names(TRACE) } \keyword{datasets} timereg/man/timecox.Rd0000644000176200001440000001253313413657513014434 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.timecox.r \name{timecox} \alias{timecox} \title{Fit Cox model with partly timevarying effects.} \usage{ timecox(formula = formula(data), data = sys.parent(), start.time = 0, max.time = NULL, id = NULL, clusters = NULL, n.sim = 1000, residuals = 0, robust = 1, Nit = 20, bandwidth = 0.5, method = "basic", weighted.test = 0, degree = 1, covariance = 0) } \arguments{ \item{formula}{a formula object with the response on the left of a '~' operator, and the independent terms on the right as regressors. The response must be a survival object as returned by the `Surv' function. Time-invariant regressors are specified by the wrapper const(), and cluster variables (for computing robust variances) by the wrapper cluster().} \item{data}{a data.frame with the variables.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{clusters}{cluster variable for computation of robust variances.} \item{n.sim}{number of simulations in resampling.} \item{residuals}{to returns residuals that can be used for model validation in the function cum.residuals} \item{robust}{to compute robust variances and construct processes for resampling. May be set to 0 to save memory.} \item{Nit}{number of iterations for score equations.} \item{bandwidth}{bandwidth for local iterations. Default is 50 \% of the range of the considered observation period.} \item{method}{Method for estimation. This refers to different parametrisations of the baseline of the model. Options are "basic" where the baseline is written as \eqn{\lambda_0(t) = \exp(\alpha_0(t))} or the "breslow" version where the baseline is parametrised as \eqn{\lambda_0(t)}.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{degree}{gives the degree of the local linear smoothing, that is local smoothing. Possible values are 1 or 2.} \item{covariance}{to compute covariance estimates for nonparametric terms rather than just the variances.} } \value{ Returns an object of type "timecox". With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{the martingale based pointwise variance estimates. } \item{robvar.cum}{robust pointwise variances estimates. } \item{gamma}{estimate of parametric components of model. } \item{var.gamma}{variance for gamma. } \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals. Estimated martingale increments (dM) and corresponding time vector (time).} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time. } \item{sim.test.procBeqC}{list of 50 random realizations of test-processes under null based on resampling.} \item{schoenfeld.residuals}{Schoenfeld residuals are returned for "breslow" parametrisation.} } \description{ Fits proportional hazards model with some effects time-varying and some effects constant. Time dependent variables and counting process data (multiple events per subject) are possible. } \details{ Resampling is used for computing p-values for tests of timevarying effects. The modelling formula uses the standard survival modelling given in the \bold{survival} package. The data for a subject is presented as multiple rows or 'observations', each of which applies to an interval of observation (start, stop]. When counting process data with the )start,stop] notation is used, the 'id' variable is needed to identify the records for each subject. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ data(sTRACE) # Fits time-varying Cox model out<-timecox(Surv(time/365,status==9)~age+sex+diabetes+chf+vf, data=sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out) par(mfrow=c(2,3)) plot(out,score=TRUE) # Fits semi-parametric time-varying Cox model out<-timecox(Surv(time/365,status==9)~const(age)+const(sex)+ const(diabetes)+chf+vf,data=sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out) } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/plot.dynreg.Rd0000644000176200001440000000515113377441614015231 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.dynreg.r \name{plot.dynreg} \alias{plot.dynreg} \title{Plots estimates and test-processes} \usage{ \method{plot}{dynreg}(x, type = "eff.smooth", pointwise.ci = 1, hw.ci = 0, sim.ci = 0, robust = 0, specific.comps = FALSE, level = 0.05, start.time = 0, stop.time = 0, add.to.plot = FALSE, mains = TRUE, xlab = "Time", ylab = "Cumulative coefficients", score = FALSE, ...) } \arguments{ \item{x}{the output from the "dynreg" function.} \item{type}{the estimator plotted. Choices "eff.smooth", "ms.mpp", "0.mpp" and "ly.mpp". See the dynreg function for more on this.} \item{pointwise.ci}{if >1 pointwise confidence intervals are plotted with lty=pointwise.ci} \item{hw.ci}{if >1 Hall-Wellner confidence bands are plotted with lty=hw.ci. Only 0.95 \% bands can be constructed.} \item{sim.ci}{if >1 simulation based confidence bands are plotted with lty=sim.ci. These confidence bands are robust to non-martingale behaviour.} \item{robust}{robust standard errors are used to estimate standard error of estimate, otherwise martingale based estimate are used.} \item{specific.comps}{all components of the model is plotted by default, but a list of components may be specified, for example first and third "c(1,3)".} \item{level}{gives the significance level.} \item{start.time}{start of observation period where estimates are plotted.} \item{stop.time}{end of period where estimates are plotted. Estimates thus plotted from [start.time, max.time].} \item{add.to.plot}{to add to an already existing plot.} \item{mains}{add names of covariates as titles to plots.} \item{xlab}{label for x-axis.} \item{ylab}{label for y-axis.} \item{score}{to plot test processes for test of time-varying effects along with 50 random realization under the null-hypothesis.} \item{...}{unused arguments - for S3 compatibility} } \description{ This function plots the non-parametric cumulative estimates for the additive risk model or the test-processes for the hypothesis of constant effects with re-sampled processes under the null. } \examples{ \donttest{ ### runs slowly and therefore donttest data(csl) indi.m<-rep(1,length(csl$lt)) # Fits time-varying regression model out<-dynreg(prot~treat+prot.prev+sex+age,csl, Surv(lt,rt,indi.m)~+1,start.time=0,max.time=3,id=csl$id, n.sim=100,bandwidth=0.7,meansub=0) par(mfrow=c(2,3)) # plots estimates plot(out) # plots tests-processes for time-varying effects plot(out,score=TRUE) } } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/print.aalen.Rd0000644000176200001440000000106113075643764015200 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.aalen.r \name{print.aalen} \alias{print.aalen} \alias{print.cox.aalen} \alias{print.comprisk} \alias{print.prop.excess} \alias{print.dynreg} \alias{print.timecox} \alias{print.cum.residuals} \title{Prints call} \usage{ \method{print}{aalen}(x, ...) } \arguments{ \item{x}{an aalen object} \item{...}{unused arguments - for S3 compatibility} } \description{ Prints call for object. Lists nonparametric and parametric terms of model } \author{ Thomas Scheike } \keyword{survival} timereg/man/prep.comp.risk.Rd0000644000176200001440000000736513377441614015647 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/comprisk.r \name{prep.comp.risk} \alias{prep.comp.risk} \title{Set up weights for delayed-entry competing risks data for comp.risk function} \usage{ prep.comp.risk(data, times = NULL, entrytime = NULL, time = "time", cause = "cause", cname = "cweight", tname = "tweight", strata = NULL, nocens.out = TRUE, cens.formula = NULL, cens.code = 0, prec.factor = 100, trunc.mintau = FALSE) } \arguments{ \item{data}{data frame for comp.risk.} \item{times}{times for estimating equations.} \item{entrytime}{name of delayed entry variable, if not given computes right-censoring case.} \item{time}{name of survival time variable.} \item{cause}{name of cause indicator} \item{cname}{name of censoring weight.} \item{tname}{name of truncation weight.} \item{strata}{strata variable to obtain stratified weights.} \item{nocens.out}{returns only uncensored part of data-frame} \item{cens.formula}{censoring model formula for Cox models for the truncation and censoring model.} \item{cens.code}{code for censoring among causes.} \item{prec.factor}{precision factor, for ties between censoring/even times, truncation times/event times} \item{trunc.mintau}{specicies wether the truncation distribution is evaluated in death times or death times minimum max(times), FALSE makes the estimator equivalent to Kaplan-Meier (in the no covariate case).} } \value{ Returns an object. With the following arguments: \item{dataw}{a data.frame with weights.} The function wants to make two new variables "weights" and "cw" so if these already are in the data frame it tries to add an "_" in the names. } \description{ Computes the weights of Geskus (2011) modified to the setting of the comp.risk function. The returned weights are \eqn{1/(H(T_i)*G_c(min(T_i,tau)))} and tau is the max of the times argument, here \eqn{H} is the estimator of the truncation distribution and \eqn{G_c} is the right censoring distribution. } \examples{ data(bmt) nn <- nrow(bmt) entrytime <- rbinom(nn,1,0.5)*(bmt$time*runif(nn)) bmt$entrytime <- entrytime times <- seq(5,70,by=1) ### adds weights to uncensored observations bmtw <- prep.comp.risk(bmt,times=times,time="time", entrytime="entrytime",cause="cause") ######################################### ### nonparametric estimates ######################################### ## {{{ ### nonparametric estimates, right-censoring only out <- comp.risk(Event(time,cause)~+1,data=bmt, cause=1,model="rcif2", times=c(5,30,70),n.sim=0) out$cum ### same as ###out <- prodlim(Hist(time,cause)~+1,data=bmt) ###summary(out,cause="1",times=c(5,30,70)) ### with truncation out <- comp.risk(Event(time,cause)~+1,data=bmtw,cause=1, model="rcif2", cens.weight=bmtw$cw,weights=bmtw$weights,times=c(5,30,70), n.sim=0) out$cum ### same as ###out <- prodlim(Hist(entry=entrytime,time,cause)~+1,data=bmt) ###summary(out,cause="1",times=c(5,30,70)) ## }}} ######################################### ### Regression ######################################### ## {{{ ### with truncation correction out <- comp.risk(Event(time,cause)~const(tcell)+const(platelet),data=bmtw, cause=1,cens.weight=bmtw$cw, weights=bmtw$weights,times=times,n.sim=0) summary(out) ### with only righ-censoring, standard call outn <- comp.risk(Event(time,cause)~const(tcell)+const(platelet),data=bmt, cause=1,times=times,n.sim=0) summary(outn) ## }}} } \references{ Geskus (2011), Cause-Specific Cumulative Incidence Estimation and the Fine and Gray Model Under Both Left Truncation and Right Censoring, Biometrics (2011), pp 39-49. Shen (2011), Proportional subdistribution hazards regression for left-truncated competing risks data, Journal of Nonparametric Statistics (2011), 23, 885-895 } \author{ Thomas Scheike } \keyword{survival} timereg/man/cox.Rd0000644000176200001440000000054613075643763013564 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.prop-excess.r \name{cox} \alias{cox} \title{Identifies proportional excess terms of model} \usage{ cox(x) } \arguments{ \item{x}{variable} } \description{ Specifies which of the regressors that lead to proportional excess hazard } \author{ Thomas Scheike } \keyword{survival} timereg/man/cox.ipw.Rd0000644000176200001440000000374113075643763014362 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/cox.ipw.r \name{cox.ipw} \alias{cox.ipw} \alias{summary.cox.ipw} \alias{print.cox.ipw} \alias{coef.cox.ipw} \title{Missing data IPW Cox} \usage{ cox.ipw(survformula, glmformula, d = sys.parent(), max.clust = NULL, ipw.se = FALSE, tie.seed = 100) } \arguments{ \item{survformula}{a formula object with the response on the left of a '~' operator, and the independent terms on the right as regressors. The response must be a survival object as returned by the `Surv' function. Adds the prop() wrapper internally for using cox.aalen function for fitting Cox model.} \item{glmformula}{formula for "being" observed, that is not missing.} \item{d}{data frame.} \item{max.clust}{number of clusters in iid approximation. Default is all.} \item{ipw.se}{if TRUE computes standard errors based on iid decompositon of cox and glm model, thus should be asymptotically correct.} \item{tie.seed}{if there are ties these are broken, and to get same break the seed must be the same. Recommend to break them prior to entering the program.} } \value{ returns an object of type "cox.aalen". With the following arguments: \item{iid}{iid decomposition.} \item{coef}{missing data estiamtes for weighted cox. } \item{var}{robust pointwise variances estimates. } \item{se}{robust pointwise variances estimates. } \item{se.naive}{estimate of parametric components of model. } \item{ties}{list of ties and times with random noise to break ties.} \item{cox}{output from weighted cox model.} } \description{ Fits an Cox-Aalen survival model with missing data, with glm specification of probability of missingness. } \details{ Taylor expansion of Cox's partial likelihood in direction of glm parameters using num-deriv and iid expansion of Cox and glm paramters (lava). } \examples{ ### fit <- cox.ipw(Surv(time,status)~X+Z,obs~Z+X+time+status,data=d,ipw.se=TRUE) ### summary(fit) } \references{ Paik et al. } \author{ Thomas Scheike } \keyword{survival} timereg/man/two.stage.Rd0000644000176200001440000001510613176074107014674 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/two-stage-reg.r \name{two.stage} \alias{two.stage} \title{Fit Clayton-Oakes-Glidden Two-Stage model} \usage{ two.stage(margsurv, data = sys.parent(), Nit = 60, detail = 0, start.time = 0, max.time = NULL, id = NULL, clusters = NULL, robust = 1, theta = NULL, theta.des = NULL, var.link = 0, step = 0.5, notaylor = 0, se.clusters = NULL) } \arguments{ \item{margsurv}{fit of marginal survival cox.aalen model with residuals=2, and resample.iid=1 to get fully correct standard errors. See notaylor below.} \item{data}{a data.frame with the variables.} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{detail}{if 0 no details is printed during iterations, if 1 details are given.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{clusters}{cluster variable for computation of robust variances.} \item{robust}{if 0 then totally omits computation of standard errors.} \item{theta}{starting values for the frailty variance (default=0.1).} \item{theta.des}{design for regression for variances. The defauls is NULL that is equivalent to just one theta and the design with only a baseline.} \item{var.link}{default "0" is that the regression design on the variances is without a link, and "1" uses the link function exp.} \item{step}{step size for Newton-Raphson.} \item{notaylor}{if 1 then ignores variation due to survival model, this is quicker and then resample.iid=0 and residuals=0 is ok for marginal survival model that then is much quicker.} \item{se.clusters}{cluster variable for sandwich estimator of variance.} } \value{ returns an object of type "two.stage". With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{the martingale based pointwise variance estimates.} \item{robvar.cum}{robust pointwise variances estimates.} \item{gamma}{estimate of parametric components of model.} \item{var.gamma}{variance for gamma.} \item{robvar.gamma}{robust variance for gamma.} \item{D2linv}{inverse of the derivative of the score function from marginal model.} \item{score}{value of score for final estimates.} \item{theta}{estimate of Gamma variance for frailty.} \item{var.theta}{estimate of variance of theta.} \item{SthetaInv}{inverse of derivative of score of theta.} \item{theta.score}{score for theta parameters.} } \description{ Fit Clayton-Oakes-Glidden Two-Stage model with Cox-Aalen marginals and regression on the variance parameters. } \details{ The model specifikatin allows a regression structure on the variance of the random effects, such it is allowed to depend on covariates fixed within clusters \deqn{ \theta_{k} = Q_{k}^T \nu }. This is particularly useful to model jointly different groups and to compare their variances. Fits an Cox-Aalen survival model. Time dependent variables and counting process data (multiple events per subject) are not possible ! The marginal baselines are on the Cox-Aalen form \deqn{ \lambda_{ki}(t) = Y_{ki}(t) ( X_{ki}^T(t) \alpha(t) ) \exp(Z_{ki}^T \beta ) } The model thus contains the Cox's regression model and the additive hazards model as special cases. (see cox.aalen function for more on this). The modelling formula uses the standard survival modelling given in the \bold{survival} package. Only for right censored survival data. The data for a subject is presented as multiple rows or 'observations', each of which applies to an interval of observation (start, stop]. For counting process data with the )start,stop] notation is used the 'id' variable is needed to identify the records for each subject. Only one record per subject is allowed in the current implementation for the estimation of theta. The program assumes that there are no ties, and if such are present random noise is added to break the ties. Left truncation is dealt with. Here the key assumption is that the maginals are correctly estimated and that we have a common truncation time within each cluster. } \examples{ library(timereg) data(diabetes) # Marginal Cox model with treat as covariate marg <- cox.aalen(Surv(time,status)~prop(treat)+prop(adult)+ cluster(id),data=diabetes,resample.iid=1) fit<-two.stage(marg,data=diabetes,theta=1.0,Nit=40) summary(fit) # using coxph and giving clusters, but SE wittout cox uncetainty margph <- coxph(Surv(time,status)~treat,data=diabetes) fit<-two.stage(margph,data=diabetes,theta=1.0,Nit=40,clusters=diabetes$id) # Stratification after adult theta.des<-model.matrix(~-1+factor(adult),diabetes); des.t<-model.matrix(~-1+factor(treat),diabetes); design.treat<-cbind(des.t[,-1]*(diabetes$adult==1), des.t[,-1]*(diabetes$adult==2)) # test for common baselines included here marg1<-cox.aalen(Surv(time,status)~-1+factor(adult)+prop(design.treat)+cluster(id), data=diabetes,resample.iid=1,Nit=50) fit.s<-two.stage(marg1,data=diabetes,Nit=40,theta=1,theta.des=theta.des) summary(fit.s) # with common baselines and common treatment effect (although test reject this) fit.s2<-two.stage(marg,data=diabetes,Nit=40,theta=1,theta.des=theta.des) summary(fit.s2) # test for same variance among the two strata theta.des<-model.matrix(~factor(adult),diabetes); fit.s3<-two.stage(marg,data=diabetes,Nit=40,theta=1,theta.des=theta.des) summary(fit.s3) # to fit model without covariates, use beta.fixed=1 and prop or aalen function marg <- aalen(Surv(time,status)~+1+cluster(id), data=diabetes,resample.iid=1,n.sim=0) fita<-two.stage(marg,data=diabetes,theta=0.95,detail=0) summary(fita) # same model but se's without variation from marginal model to speed up computations marg <- aalen(Surv(time,status) ~+1+cluster(id),data=diabetes, resample.iid=0,n.sim=0) fit<-two.stage(marg,data=diabetes,theta=0.95,detail=0) summary(fit) # same model but se's now with fewer time-points for approx of iid decomp of marginal # model to speed up computations marg <- cox.aalen(Surv(time,status) ~+prop(treat)+cluster(id),data=diabetes, resample.iid=1,n.sim=0,max.timepoint.sim=5,beta.fixed=1,beta=0) fit<-two.stage(marg,data=diabetes,theta=0.95,detail=0) summary(fit) } \references{ Glidden (2000), A Two-Stage estimator of the dependence parameter for the Clayton Oakes model. Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/csl.Rd0000644000176200001440000000310013075643763013541 0ustar liggesusers\name{csl} \alias{csl} \non_function{} \title{CSL liver chirrosis data} \description{Survival status for the liver chirrosis patients of Schlichting et al. } \format{ This data frame contains the following columns: \describe{ \item{id}{ a numeric vector. Id of subject. } \item{time}{ a numeric vector. Time of measurement. } \item{prot}{ a numeric vector. Prothrombin level at measurement time. } \item{dc}{ a numeric vector code. 0: censored observation, 1: died at eventT. } \item{eventT}{ a numeric vector. Time of event (death). } \item{treat}{ a numeric vector code. 0: active treatment of prednisone, 1: placebo treatment. } \item{sex}{ a numeric vector code. 0: female, 1: male. } \item{age}{ a numeric vector. Age of subject at inclusion time subtracted 60. } \item{prot.base}{ a numeric vector. Prothrombin base level before entering the study. } \item{prot.prev}{ a numeric vector. Level of prothrombin at previous measurement time. } \item{lt}{ a numeric vector. Gives the starting time for the time-intervals. } \item{rt}{ a numeric vector. Gives the stopping time for the time-intervals. } } } \source{P.K. Andersen} \references{ Schlichting, P., Christensen, E., Andersen, P., Fauerholds, L., Juhl, E., Poulsen, H. and Tygstrup, N. (1983), The Copenhagen Study Group for Liver Diseases, Hepatology 3, 889--895 } \examples{ data(csl) names(csl) } \keyword{datasets} timereg/man/Gprop.odds.Rd0000644000176200001440000001234513377441614015006 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/Gprop-odds.r \name{Gprop.odds} \alias{Gprop.odds} \title{Fit Generalized Semiparametric Proportional 0dds Model} \usage{ Gprop.odds(formula = formula(data), data = sys.parent(), beta = 0, Nit = 50, detail = 0, start.time = 0, max.time = NULL, id = NULL, n.sim = 500, weighted.test = 0, sym = 0, mle.start = 0) } \arguments{ \item{formula}{a formula object, with the response on the left of a '~' operator, and the terms on the right. The response must be a survival object as returned by the `Surv' function.} \item{data}{a data.frame with the variables.} \item{beta}{starting value for relative risk estimates} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{detail}{if 0 no details is printed during iterations, if 1 details are given.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. This is very useful to obtain stable estimates, especially for the baseline. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{n.sim}{number of simulations in resampling.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{sym}{to use symmetrized second derivative in the case of the estimating equation approach (profile=0). This may improve the numerical performance.} \item{mle.start}{starting values for relative risk parameters.} } \value{ returns an object of type 'cox.aalen'. With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{the martingale based pointwise variance estimates. } \item{robvar.cum}{robust pointwise variances estimates. } \item{gamma}{estimate of proportional odds parameters of model.} \item{var.gamma}{variance for gamma. } \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals. Estimated martingale increments (dM) and corresponding time vector (time).} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time.} \item{loglike}{modified partial likelihood, pseudo profile likelihood for regression parameters.} \item{D2linv}{inverse of the derivative of the score function.} \item{score}{value of score for final estimates.} \item{test.procProp}{observed score process for proportional odds regression effects.} \item{pval.Prop}{p-value based on resampling.} \item{sim.supProp}{re-sampled supremum values.} \item{sim.test.procProp}{list of 50 random realizations of test-processes for constant proportional odds under the model based on resampling.} } \description{ Fits a semiparametric proportional odds model: \deqn{ logit(1-S_{X,Z}(t)) = log(X^T A(t)) + \beta^T Z } where A(t) is increasing but otherwise unspecified. Model is fitted by maximising the modified partial likelihood. A goodness-of-fit test by considering the score functions is also computed by resampling methods. } \details{ An alternative way of writing the model : \deqn{ S_{X,Z}(t)) = \frac{ \exp( - \beta^T Z )}{ (X^T A(t)) + \exp( - \beta^T Z) } } such that \eqn{\beta} is the log-odds-ratio of dying before time t, and \eqn{A(t)} is the odds-ratio. The modelling formula uses the standard survival modelling given in the \bold{survival} package. The data for a subject is presented as multiple rows or "observations", each of which applies to an interval of observation (start, stop]. The program essentially assumes no ties, and if such are present a little random noise is added to break the ties. } \examples{ data(sTRACE) \donttest{ ### runs slowly and is therefore donttest data(sTRACE) # Fits Proportional odds model with stratified baseline age.c<-scale(sTRACE$age,scale=FALSE); out<-Gprop.odds(Surv(time,status==9)~-1+factor(diabetes)+prop(age.c)+prop(chf)+ prop(sex)+prop(vf),data=sTRACE,max.time=7,n.sim=50) summary(out) par(mfrow=c(2,3)) plot(out,sim.ci=2); plot(out,score=1) } } \references{ Scheike, A flexible semiparametric transformation model for survival data, Lifetime Data Anal. (to appear). Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/predict.timereg.Rd0000644000176200001440000001204613377441614016052 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/predict-timereg.r \name{predict.timereg} \alias{predict.timereg} \alias{predict.aalen} \alias{predict.comprisk} \alias{predict.cox.aalen} \title{Predictions for Survival and Competings Risks Regression for timereg} \usage{ \method{predict}{timereg}(object, newdata = NULL, X = NULL, times = NULL, Z = NULL, n.sim = 500, uniform = TRUE, se = TRUE, alpha = 0.05, resample.iid = 0, ...) } \arguments{ \item{object}{an object belonging to one of the following classes: comprisk, aalen or cox.aalen} \item{newdata}{specifies the data at which the predictions are wanted.} \item{X}{alternative to newdata, specifies the nonparametric components for predictions.} \item{times}{times in which predictions are computed, default is all time-points for baseline} \item{Z}{alternative to newdata, specifies the parametric components of the model for predictions.} \item{n.sim}{number of simulations in resampling.} \item{uniform}{computes resampling based uniform confidence bands.} \item{se}{computes pointwise standard errors} \item{alpha}{specificies the significance levelwhich cause we consider.} \item{resample.iid}{set to 1 to return iid decomposition of estimates, 3-dim matrix (predictions x times x subjects)} \item{...}{unused arguments - for S3 compatability} } \value{ \item{time}{vector of time points where the predictions are computed.} \item{unif.band}{resampling based constant to construct 95\% uniform confidence bands.} \item{model}{specifies what model that was fitted.} \item{alpha}{specifies the significance level for the confidence intervals. This relates directly to the constant given in unif.band.} \item{newdata}{specifies the newdata given in the call.} \item{RR}{gives relative risk terms for Cox-type models.} \item{call}{gives call for predict funtion.} \item{initial.call}{gives call for underlying object used for predictions.} \item{P1}{gives cumulative inicidence predictions for competing risks models. Predictions given in matrix form with different subjects in different rows.} \item{S0}{gives survival predictions for survival models. Predictions given in matrix form with different subjects in different rows.} \item{se.P1}{pointwise standard errors for predictions of P1.} \item{se.S0}{pointwise standard errors for predictions of S0.} } \description{ Make predictions based on the survival models (Aalen and Cox-Aalen) and the competing risks models for the cumulative incidence function (comp.risk). Computes confidence intervals and confidence bands based on resampling. } \examples{ data(bmt); ## competing risks add<-comp.risk(Event(time,cause)~platelet+age+tcell,data=bmt,cause=1) ndata<-data.frame(platelet=c(1,0,0),age=c(0,1,0),tcell=c(0,0,1)) out<-predict(add,newdata=ndata,uniform=1,n.sim=1000) par(mfrow=c(2,2)) plot(out,multiple=0,uniform=1,col=1:3,lty=1,se=1) # see comp.risk for further examples. add<-comp.risk(Event(time,cause)~factor(tcell),data=bmt,cause=1) summary(add) out<-predict(add,newdata=ndata,uniform=1,n.sim=1000) plot(out,multiple=1,uniform=1,col=1:3,lty=1,se=1) add<-prop.odds.subdist(Event(time,cause)~factor(tcell), data=bmt,cause=1) out <- predict(add,X=1,Z=1) plot(out,multiple=1,uniform=1,col=1:3,lty=1,se=1) ## SURVIVAL predictions aalen function data(sTRACE) out<-aalen(Surv(time,status==9)~sex+ diabetes+chf+vf, data=sTRACE,max.time=7,n.sim=0,resample.iid=1) pout<-predict(out,X=rbind(c(1,0,0,0,0),rep(1,5))) head(pout$S0[,1:5]); head(pout$se.S0[,1:5]) par(mfrow=c(2,2)) plot(pout,multiple=1,se=0,uniform=0,col=1:2,lty=1:2) plot(pout,multiple=0,se=1,uniform=1,col=1:2) out<-aalen(Surv(time,status==9)~const(age)+const(sex)+ const(diabetes)+chf+vf, data=sTRACE,max.time=7,n.sim=0,resample.iid=1) pout<-predict(out,X=rbind(c(1,0,0),c(1,1,0)), Z=rbind(c(55,0,1),c(60,1,1))) head(pout$S0[,1:5]); head(pout$se.S0[,1:5]) par(mfrow=c(2,2)) plot(pout,multiple=1,se=0,uniform=0,col=1:2,lty=1:2) plot(pout,multiple=0,se=1,uniform=1,col=1:2) pout<-predict(out,uniform=0,se=0,newdata=sTRACE[1:10,]) plot(pout,multiple=1,se=0,uniform=0) #### cox.aalen out<-cox.aalen(Surv(time,status==9)~prop(age)+prop(sex)+ prop(diabetes)+chf+vf, data=sTRACE,max.time=7,n.sim=0,resample.iid=1) pout<-predict(out,X=rbind(c(1,0,0),c(1,1,0)),Z=rbind(c(55,0,1),c(60,1,1))) head(pout$S0[,1:5]); head(pout$se.S0[,1:5]) par(mfrow=c(2,2)) plot(pout,multiple=1,se=0,uniform=0,col=1:2,lty=1:2) plot(pout,multiple=0,se=1,uniform=1,col=1:2) pout<-predict(out,uniform=0,se=0,newdata=sTRACE[1:10,]) plot(pout,multiple=1,se=0,uniform=0) #### prop.odds model add<-prop.odds(Event(time,cause!=0)~factor(tcell),data=bmt) out <- predict(add,X=1,Z=0) plot(out,multiple=1,uniform=1,col=1:3,lty=1,se=1) } \references{ Scheike, Zhang and Gerds (2008), Predicting cumulative incidence probability by direct binomial regression, Biometrika, 95, 205-220. Scheike and Zhang (2007), Flexible competing risks regression modelling and goodness of fit, LIDA, 14, 464-483 . Martinussen and Scheike (2006), Dynamic regression models for survival data, Springer. } \author{ Thomas Scheike, Jeremy Silver } \keyword{survival} timereg/man/internal-addreg.Rd0000644000176200001440000000405213377440751016024 0ustar liggesusers\name{pval} \alias{summary.restricted.residual.mean} \alias{plot.restricted.residual.mean} \alias{summary.resmean} \alias{plot.resmean} \alias{print.resmean} \alias{coef.resmean} \alias{coefcox} \alias{kmplot} \alias{pval} \alias{additive.compSs} \alias{CsmoothB} \alias{Csmooth2B} \alias{pval} \alias{kernel} \alias{percen} \alias{localTimeReg} \alias{nameestimate} \alias{namematrix} \alias{aalenBase} \alias{aalenBaseC} \alias{aalen.des} \alias{aalen.des2} \alias{cox.aalenBase} \alias{read-design} \alias{is.diag} \alias{semiaalen} \alias{semiregBase} \alias{check.missing} \alias{sindex.prodlim} \alias{read.design} \alias{read.surv} \alias{risk.index} \alias{faster.reshape} \alias{rm.missing} \alias{plot.two.stage} \alias{summary.two.stage} \alias{predict.two.stage} \alias{predictpropodds} \alias{plot.cums} \alias{plot.comprisk} \alias{plot.predict.comprisk} \alias{plot.predict.timereg} \alias{pava} \alias{plotScore} \alias{summary.comprisk} \alias{pred.cum} \alias{slaaop} \alias{pred.des} \alias{Cpred} \alias{plot.cox.aalen2} \alias{plot.predict} \alias{coef.aalen} \alias{cox.marg} \alias{summary.cox.marg} \alias{coef.cox.marg} \alias{print.cox.marg} \alias{comprisk.ipw} \alias{coef.comprisk.ipw} \alias{print.comprisk.ipw} \alias{prop.odds.subdist.ipw} \alias{summary.comprisk.ipw} \alias{coef.cox.aalen} \alias{coef.comprisk} \alias{coef.two.stage} \alias{cluster.index.timereg} \alias{coefBase} \alias{des.aalen} \alias{timetest} \alias{print.pe.sasieni} \alias{pred.stratKM} \alias{prep.glm.comprisk} \alias{print.predict.timereg} \alias{print.two.stage} \alias{residualsTimereg} \alias{summary.pe.sasieni} \alias{summary.predict.timereg} \alias{coef.dynreg} \alias{coef.timecox} \alias{dynregBase} \alias{prop.excessBase} \alias{prop.odds.gam} \alias{plotConfregion} \alias{semicox} \alias{timecoxBase} \alias{timereg.formula} \alias{names2formula} \alias{twin.clustertrunc} \alias{vcov.aalen} \alias{vcov.cox.aalen} \alias{vcov.two.stage} \alias{vcov.comp.risk} \title{For internal use} \description{for internal use} \author{Thomas Scheike} \keyword{survival} timereg/man/mypbc.Rd0000644000176200001440000000033113075643764014076 0ustar liggesusers\name{mypbc} \alias{mypbc} \non_function{} \title{my version of the PBC data of the survival package} \description{ my version of the PBC data of the survival package } \source{ survival package } \keyword{datasets} timereg/man/cd4.Rd0000644000176200001440000000231313075643763013437 0ustar liggesusers\name{cd4} \alias{cd4} \non_function{} \title{The multicenter AIDS cohort study } \description{CD4 counts collected over time.} \format{ This data frame contains the following columns: \describe{ \item{obs}{a numeric vector. Number of observations.} \item{id}{a numeric vector. Id of subject.} \item{visit}{ a numeric vector. Timings of the visits in years.} \item{smoke}{a numeric vector code. 0: non-smoker, 1: smoker.} \item{age}{a numeric vector. Age of the patient at the start of the trial.} \item{cd4}{a numeric vector. CD4 percentage at the current visit.} \item{cd4.prev}{a numeric vector. CD4 level at the preceding visit.} \item{precd4}{a numeric vector. Post-infection CD4 percentage.} \item{lt}{a numeric vector. Gives the starting time for the time-intervals.} \item{rt}{a numeric vector. Gives the stopping time for the time-interval.} } } \source{ MACS Public Use Data Set Release PO4 (1984-1991). See reference. } \references{ Kaslow et al. (1987), The multicenter AIDS cohort study: rational, organisation and selected characteristics of the participants. Am. J. Epidemiology 126, 310--318. } \examples{ data(cd4) names(cd4) } \keyword{datasets} timereg/man/restricted.residual.mean.Rd0000644000176200001440000000510313075643764017664 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/restricted.residual.mean.r \name{restricted.residual.mean} \alias{restricted.residual.mean} \title{Estimates restricted residual mean for Cox or Aalen model} \usage{ restricted.residual.mean(out, x = 0, tau = 10, iid = 0) } \arguments{ \item{out}{an "cox.aalen" with a Cox model or an "aalen" model.} \item{x}{matrix with covariates for Cox model or additive hazards model (aalen).} \item{tau}{restricted residual mean.} \item{iid}{if iid=1 then uses iid decomposition for estimation of standard errors.} } \value{ Returns an object. With the following arguments: \item{mean}{restricted mean for different covariates.} \item{var.mean}{variance matrix.} \item{se}{standard errors.} \item{S0tau}{estimated survival functions on time-range [0,tau].} \item{timetau}{vector of time arguments for S0tau.} } \description{ The restricted means are the \deqn{ \int_0^\tau S(t) dt } the standard errors are computed using the i.i.d. decompositions from the cox.aalen (that must be called with the argument "max.timpoint.sim=NULL") or aalen function. } \details{ must have computed iid decomposition of survival models for standard errors to be computed. Note that competing risks models can be fitted but then the interpretation is not clear. } \examples{ \donttest{ ### this example runs slowly and is therefore donttest data(sTRACE) sTRACE$cage <- scale(sTRACE$age) # Fits Cox model and aalen model out<-cox.aalen(Surv(time,status>=1)~prop(sex)+prop(diabetes)+prop(chf)+ prop(vf),data=sTRACE,max.timepoint.sim=NULL,resample.iid=1) outa<-aalen(Surv(time,status>=1)~sex+diabetes+chf+vf, data=sTRACE,resample.iid=1) coxrm <- restricted.residual.mean(out,tau=7, x=rbind(c(0,0,0,0),c(0,0,1,0),c(0,0,1,1),c(0,0,0,1)),iid=1) plot(coxrm) summary(coxrm) ### aalen model not optimal here aalenrm <- restricted.residual.mean(outa,tau=7, x=rbind(c(1,0,0,0,0),c(1,0,0,1,0),c(1,0,0,1,1),c(1,0,0,0,1)),iid=1) with(aalenrm,matlines(timetau,S0tau,type="s",ylim=c(0,1))) legend("bottomleft",c("baseline","+chf","+chf+vf","+vf"),col=1:4,lty=1) summary(aalenrm) mm <-cbind(coxrm$mean,coxrm$se,aalenrm$mean,aalenrm$se) colnames(mm)<-c("cox-res-mean","se","aalen-res-mean","se") rownames(mm)<-c("baseline","+chf","+chf+vf","+vf") mm } } \references{ D. M. Zucker, Restricted mean life with covariates: Modification and extension of a useful survival analysis method, J. Amer. Statist. Assoc. vol. 93 pp. 702-709, 1998. Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/invsubdist.Rd0000644000176200001440000001027513133341114015141 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{invsubdist} \alias{invsubdist} \title{Finds inverse of piecwise linear sub-distribution} \usage{ invsubdist(F1, u, entry = NULL, cond = 1, ptrunc = NULL) } \arguments{ \item{F1}{matrix with x, and F1(x)} \item{u}{points for which to compute inverse} \item{entry}{possible delayed entry points} \item{cond}{1 indcates that we draw given that this subdistribution is used, so scales mass to 1 to get conditional distribution function} \item{ptrunc}{possible trunction weigth for delayed entry, if NULL then uses ptrunc=1-F1(entry)} } \description{ Finds inverse of piecwise linear sub-distribution to be used for simulation of subdistributions } \examples{ F1 <- cbind(c(0,5,8,10),c(0,0.1,0.3,0.9)) plot(F1,type="l") u <- runif(100) Fiu <- invsubdist(F1,u,cond=0) points(Fiu$time,u,pch="x") F1cond <- F1 F1cond[,2] <- F1cond[,2]/0.9 plot(F1cond,type="l") u <- runif(100) Ficond <- invsubdist(F1cond,u,cond=0) points(Ficond$time,u,pch="-") Fiu <- invsubdist(F1,u,cond=1) points(Fiu$time,u,pch="x") entry <- 4 ### F1entry <- subdist(F1,entry)[,2] ptrunc <- 1-F1entry ### F1entry5 <- F1 F1entry5[,1] <- F1entry5[,1]-entry F1entry5[,2] <- (F1entry5[,2]-F1entry)/ptrunc pos <- F1entry5[,1]>=0 F1entry5 <- rbind(c(0,0),F1entry5[pos,]) ### plot(F1entry5,ylim=c(0,1),type="l") u <- runif(100) Fiu <- invsubdist(F1entry5,u,cond=0) points(Fiu$time,u,pch="-") ### Fiu2 <- invsubdist(F1,u,cond=0,entry=entry) points(Fiu2$time-entry,u,pch="x") sum(Fiu2$time-entry-Fiu$time) F1ce <- F1entry5 F1ce[,2] <- F1ce[,2]/tail(F1entry5[,2],1) plot(F1ce,type="l") u <- runif(100) Fi1ce <- invsubdist(F1ce,u,cond=0) points(Fi1ce$time,u,pch="-") Fice <- invsubdist(F1,u,cond=1,entry=entry) points(Fice$time-entry,u,pch="x") sum(Fice$time-entry-Fi1ce$time) ## simulation of distribution with delayed entry starting at 3 par(mfrow=c(1,1)) F1 <- cbind(c(0,5,8,10),c(0,0.5,0.6,0.9)) F1 plot(F1,ylim=c(0,1),type="l") n <- 100000 entry <- c(rep(3,10000),runif(n)*7+3) ###entry <- rep(3,n) u <- runif(n+10000) ### Fiu <- invsubdist(F1,u,cond=0,entry=entry) ### # library(prodlim) # pp <- prodlim(Hist(time,status,entry=entry)~+1,data=Fiu) # plot(pp,xlim=c(3,10)) ### entry <- 3 ### F1entry <- subdist(F1,entry)[,2] ptrunc <- 1-F1entry ### F1entry5 <- F1 F1entry5[,1] <- F1entry5[,1]-entry F1entry5[,2] <- (F1entry5[,2]-F1entry)/ptrunc pos <- F1entry5[,1]>=0 F1entry5 <- rbind(c(0,0),F1entry5[pos,]) # # lines(entry+F1entry5[,1],1-F1entry5[,2],col=2) ############################################################## ## Simulations of two cumulative incidence functions with truncation ############################################################## par(mfrow=c(1,1)) F1 <- cbind(c(0,5,8,10),c(0,0.5,0.6,0.9)*0.3) F2 <- cbind(c(0,5,8,10),c(0,0.5,0.6,0.9)*0.5) plot(F1,ylim=c(0,1),type="l") lines(F2,col=2) entry1 <- 3 ### F1entry <- subdist(F1,entry1)[,2] F2entry <- subdist(F2,entry1)[,2] ptrunc <- 1-F1entry-F2entry ### F1e <- F1 F1e[,1] <- F1e[,1]-entry1 F1e[,2] <- (F1e[,2]-F1entry)/ptrunc pos <- F1e[,1]>=0 F1e <- rbind(c(0,0),F1e[pos,]) F2e <- F2 F2e[,1] <- F2e[,1]-entry1 F2e[,2] <- (F2e[,2]-F2entry)/ptrunc pos <- F2e[,1]>=0 F2e <- rbind(c(0,0),F2e[pos,]) # # truncated identifiable version lines(entry1+F1e[,1],F1e[,2],col=1) lines(entry1+F2e[,1],F2e[,2],col=2) n <- 10000 entry <- c(rep(entry1,10000),runif(n)*(10-entry1)+entry1) u <- runif(n+10000) ### F1entry <- subdist(F1,entry)[,2] F2entry <- subdist(F2,entry)[,2] ptrunc <- 1-( F1entry+F2entry) Fiu1 <- invsubdist(F1,u,cond=1,entry=entry,ptrunc=ptrunc) Fiu2 <- invsubdist(F1,u,cond=1,entry=entry,ptrunc=ptrunc) ### ptot <- (tail(F1[,2],1)+tail(F2[,2],1)-F1entry-F2entry)/(ptrunc) rt <- rbinom(n+10000,1,ptot) p1 <- ((tail(F1[,2],1)-F1entry)/ptrunc) p2 <- ((tail(F2[,2],1)-F2entry)/ptrunc) rb <- rbinom(n+10000,1,p1/ptot) cause=ifelse(rb==1,1,2) time=ifelse(cause==1,Fiu1$time,Fiu2$time) cause <- rt*cause time[cause==0] <- 10 ### simulated data, now checking that things are working # pp <- prodlim(Hist(time,cause,entry=entry)~+1) # plot(pp,xlim=c(entry1,10),cause=1) # plot(pp,xlim=c(entry1,10),cause=2,add=TRUE) ### # lines(entry1+F1e[,1],F1e[,2],col=2) # lines(entry1+F2e[,1],F2e[,2],col=2) } \author{ Thomas Scheike } \keyword{survival} timereg/man/Event.Rd0000644000176200001440000000142613075643763014052 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/event.r \name{Event} \alias{Event} \alias{as.character.Event} \alias{as.matrix.Event} \alias{[.Event} \alias{format.Event} \alias{print.Event} \alias{rbind.Event} \alias{summary.Event} \title{Event history object} \usage{ Event(time, time2 = TRUE, cause = NULL, cens.code = 0, ...) } \arguments{ \item{time}{Time} \item{time2}{Time 2} \item{cause}{Cause} \item{cens.code}{Censoring code (default 0)} \item{...}{Additional arguments} } \value{ Object of class Event (a matrix) } \description{ Constructur for Event History objects } \details{ ... content for details } \examples{ t1 <- 1:10 t2 <- t1+runif(10) ca <- rbinom(10,2,0.4) (x <- Event(t1,t2,ca)) } \author{ Klaus K. Holst and Thomas Scheike } timereg/man/summary.cum.residuals.Rd0000644000176200001440000000126013075643764017240 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mgresid.r \name{summary.cum.residuals} \alias{summary.cum.residuals} \title{Prints summary statistics for goodness-of-fit tests based on cumulative residuals} \usage{ \method{summary}{cum.residuals}(object, digits = 3, ...) } \arguments{ \item{object}{output from the cum.residuals() function.} \item{digits}{number of digits in printouts.} \item{...}{unused arguments - for S3 compatibility} } \description{ Computes p-values for extreme behaviour relative to the model of various cumulative residual processes. } \examples{ # see cum.residuals for examples } \author{ Thomas Scheike } \keyword{survival} timereg/man/prop.odds.Rd0000644000176200001440000001305713377441614014700 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/prop-odds.r \name{prop.odds} \alias{prop.odds} \title{Fit Semiparametric Proportional 0dds Model} \usage{ prop.odds(formula, data = sys.parent(), beta = NULL, Nit = 20, detail = 0, start.time = 0, max.time = NULL, id = NULL, n.sim = 500, weighted.test = 0, profile = 1, sym = 0, baselinevar = 1, clusters = NULL, max.clust = 1000, weights = NULL) } \arguments{ \item{formula}{a formula object, with the response on the left of a '~' operator, and the terms on the right. The response must be a Event object as returned by the `Event' function.} \item{data}{a data.frame with the variables.} \item{beta}{starting value for relative risk estimates} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{detail}{if 0 no details is printed during iterations, if 1 details are given.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. This is very useful to obtain stable estimates, especially for the baseline. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{n.sim}{number of simulations in resampling.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{profile}{if profile is 1 then modified partial likelihood is used, profile=0 fits by simple estimating equation. The modified partial likelihood is recommended.} \item{sym}{to use symmetrized second derivative in the case of the estimating equation approach (profile=0). This may improve the numerical performance.} \item{baselinevar}{set to 0 to omit calculations of baseline variance.} \item{clusters}{to compute cluster based standard errors.} \item{max.clust}{number of maximum clusters to be used, to save time in iid decomposition.} \item{weights}{weights for score equations.} } \value{ returns an object of type 'cox.aalen'. With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{the martingale based pointwise variance estimates. } \item{robvar.cum}{robust pointwise variances estimates. } \item{gamma}{estimate of proportional odds parameters of model.} \item{var.gamma}{variance for gamma. } \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals. Estimated martingale increments (dM) and corresponding time vector (time).} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time.} \item{loglike}{modified partial likelihood, pseudo profile likelihood for regression parameters.} \item{D2linv}{inverse of the derivative of the score function.} \item{score}{value of score for final estimates.} \item{test.procProp}{observed score process for proportional odds regression effects.} \item{pval.Prop}{p-value based on resampling.} \item{sim.supProp}{re-sampled supremum values.} \item{sim.test.procProp}{list of 50 random realizations of test-processes for constant proportional odds under the model based on resampling.} } \description{ Fits a semiparametric proportional odds model: \deqn{ logit(1-S_Z(t)) = log(G(t)) + \beta^T Z } where G(t) is increasing but otherwise unspecified. Model is fitted by maximising the modified partial likelihood. A goodness-of-fit test by considering the score functions is also computed by resampling methods. } \details{ The modelling formula uses the standard survival modelling given in the \bold{survival} package. For large data sets use the divide.conquer.timereg of the mets package to run the model on splits of the data, or the alternative estimator by the cox.aalen function. The data for a subject is presented as multiple rows or "observations", each of which applies to an interval of observation (start, stop]. The program essentially assumes no ties, and if such are present a little random noise is added to break the ties. } \examples{ data(sTRACE) # Fits Proportional odds model out<-prop.odds(Event(time,status==9)~age+diabetes+chf+vf+sex, sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out,sim.ci=2) plot(out,score=1) pout <- predict(out,Z=c(70,0,0,0,0)) plot(pout) ### alternative estimator for large data sets form <- Surv(time,status==9)~age+diabetes+chf+vf+sex pform <- timereg.formula(form) out2<-cox.aalen(pform,data=sTRACE,max.time=7, propodds=1,n.sim=0,robust=0,detail=0,Nit=40) summary(out2) } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/aalen.Rd0000644000176200001440000001454613413657513014052 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.aalen.r \name{aalen} \alias{aalen} \title{Fit additive hazards model} \usage{ aalen(formula = formula(data), data = sys.parent(), start.time = 0, max.time = NULL, robust = 1, id = NULL, clusters = NULL, residuals = 0, n.sim = 1000, weighted.test = 0, covariance = 0, resample.iid = 0, deltaweight = 1, silent = 1, weights = NULL, max.clust = 1000, gamma = NULL, offsets = 0, caseweight = NULL) } \arguments{ \item{formula}{a formula object with the response on the left of a '~' operator, and the independent terms on the right as regressors.The response must be a survival object as returned by the `Surv' function. Time- invariant regressors are specified by the wrapper const(), and cluster variables (for computing robust variances) by the wrapper cluster().} \item{data}{a data.frame with the variables.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. Default is max of data.} \item{robust}{to compute robust variances and construct processes for resampling. May be set to 0 to save memory.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{clusters}{cluster variable for computation of robust variances.} \item{residuals}{to returns residuals that can be used for model validation in the function cum.residuals} \item{n.sim}{number of simulations in resampling.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{covariance}{to compute covariance estimates for nonparametric terms rather than just the variances.} \item{resample.iid}{to return i.i.d. representation for nonparametric and parametric terms.} \item{deltaweight}{uses weights to estimate semiparametric model, under construction, default=1 is standard least squares estimates} \item{silent}{set to 0 to print warnings for non-inverible design-matrices for different timepoints, default is 1.} \item{weights}{weights for estimating equations.} \item{max.clust}{sets the total number of i.i.d. terms in i.i.d. decompostition. This can limit the amount of memory used by coarsening the clusters. When NULL then all clusters are used. Default is 1000 to save memory and time.} \item{gamma}{fixes gamme at this value for estimation.} \item{offsets}{offsets for the additive model, to make excess risk modelling.} \item{caseweight}{caseweight: mutiplied onto dN for score equations.} } \value{ returns an object of type "aalen". With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval. } \item{var.cum}{the martingale based pointwise variance estimates for cumulatives.} \item{robvar.cum}{robust pointwise variances estimates for cumulatives.} \item{gamma}{estimate of parametric components of model. } \item{var.gamma}{variance for gamma. } \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals. Estimated martingale increments (dM) and corresponding time vector (time).} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time. } \item{sim.test.procBeqC}{list of 50 random realizations of test-processes under null based on resampling.} \item{covariance}{covariances for nonparametric terms of model.} \item{B.iid}{Resample processes for nonparametric terms of model.} \item{gamma.iid}{Resample processes for parametric terms of model.} \item{deviance}{Least squares of increments.} } \description{ Fits both the additive hazards model of Aalen and the semi-parametric additive hazards model of McKeague and Sasieni. Estimates are un-weighted. Time dependent variables and counting process data (multiple events per subject) are possible. } \details{ Resampling is used for computing p-values for tests of time-varying effects. The modelling formula uses the standard survival modelling given in the \bold{survival} package. The data for a subject is presented as multiple rows or 'observations', each of which applies to an interval of observation (start, stop]. For counting process data with the )start,stop] notation is used, the 'id' variable is needed to identify the records for each subject. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ data(sTRACE) # Fits Aalen model out<-aalen(Surv(time,status==9)~age+sex+diabetes+chf+vf, sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out) # Fits semi-parametric additive hazards model out<-aalen(Surv(time,status==9)~const(age)+const(sex)+const(diabetes)+chf+vf, sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out) ## Excess risk additive modelling data(mela.pop) dummy<-rnorm(nrow(mela.pop)); # Fits Aalen model with offsets out<-aalen(Surv(start,stop,status==1)~age+sex+const(dummy), mela.pop,max.time=7,n.sim=100,offsets=mela.pop$rate,id=mela.pop$id, gamma=0) summary(out) par(mfrow=c(2,3)) plot(out,main="Additive excess riks model") # Fits semi-parametric additive hazards model with offsets out<-aalen(Surv(start,stop,status==1)~age+const(sex), mela.pop,max.time=7,n.sim=100,offsets=mela.pop$rate,id=mela.pop$id) summary(out) plot(out,main="Additive excess riks model") } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/event.split.Rd0000644000176200001440000000270213377441614015236 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/event.split.r \name{event.split} \alias{event.split} \title{EventSplit (SurvSplit).} \usage{ event.split(data, time = "time", status = "status", cuts = "cuts", name.id = "id", name.start = "start", cens.code = 0, order.id = TRUE, time.group = TRUE) } \arguments{ \item{data}{data to be split} \item{time}{time variable.} \item{status}{status variable.} \item{cuts}{cuts variable or numeric cut (only one value)} \item{name.id}{name of id variable.} \item{name.start}{name of start variable in data, start can also be numeric "0"} \item{cens.code}{code for the censoring.} \item{order.id}{order data after id and start.} \item{time.group}{make variable "before"."cut" that keeps track of wether start,stop is before (1) or after cut (0).} } \description{ contstructs start stop formulation of event time data after a variable in the data.set. Similar to SurvSplit of the survival package but can also split after random time given in data frame. } \examples{ set.seed(1) d <- data.frame(event=round(5*runif(5),2),start=1:5,time=2*1:5, status=rbinom(5,1,0.5),x=1:5) d d0 <- event.split(d,cuts="event",name.start=0) d0 dd <- event.split(d,cuts="event") dd ddd <- event.split(dd,cuts=3.5) ddd event.split(ddd,cuts=5.5) ### successive cutting for many values dd <- d for (cuts in seq(2,3,by=0.3)) dd <- event.split(dd,cuts=cuts) dd } \author{ Thomas Scheike } \keyword{survival} timereg/man/simsubdist.Rd0000644000176200001440000000517713377441614015162 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{simsubdist} \alias{simsubdist} \title{Simulation from subdistribution function assuming piecwise linearity} \usage{ simsubdist(cumhazard, rr, entry = NULL, type = "cloglog", startcum = c(0, 0), ...) } \arguments{ \item{cumhazard}{matrix that specified times and values of some cumulative hazard.} \item{rr}{"relative risk" terms} \item{entry}{not implemented yet} \item{type}{either cloglog or logistic} \item{startcum}{c(0,0) to make cumulativ start in 0 with 0 cumhazard.} \item{...}{further arguments} } \description{ Simulation from subdistribution function assuming piecwise linearity for Fine-Gray or logistic link. } \examples{ data(sTRACE) cif <- comp.risk(Event(time,status)~const(vf),data=sTRACE,cause=9,model="logistic2") cumhaz <- cif$cum ## 1000 logistic without covariates with baseline from model fit sim1 <- simsubdist(cumhaz,1000,type="logistic") ### cifs <- comp.risk(Event(time,status)~+1,data=sim1,cause=1,model="logistic2") ### plot(cifs) lines(cifs$cum,col=2) ## 1000 logistic with covariates with baseline from model fit x <- rbinom(1000,1,0.5) rr <- exp(x*0.3) sim1 <- simsubdist(cumhaz,rr,type="logistic") sim1$x <- x cifs <- comp.risk(Event(time,status)~+const(x),data=sim1,cause=1,model="logistic2") ### cifs$gamma plot(cifs) lines(cumhaz,col=2) ################################################################## ### simulation of cumulative incidence with specified functions ################################################################## F1logit<-function(t,lam0=0.2,beta=0.3,x=0) { pt <- t*lam0; rr <- exp(x*beta); return(pt*rr/(1+pt*rr)); } F1p<-function(t,lam0=0.4,beta=0.3,x=0) # proportional version { return( 1 - exp(-(t*lam0)*exp(x*beta))) } n=10000 tt=seq(0,3,by=.01) tt=seq(0,3,by=.01) t1 <- invsubdist(cbind(tt,F1p(tt)),runif(n)) t2 <- invsubdist(cbind(tt,F1p(tt,lam0=0.1)),runif(n)) rt <- rbinom(n,1,(F1p(3)+F1p(3,lam0=0.1))) rb <- rbinom(n,1,F1p(3)/(F1p(3)+F1p(3,lam0=0.1))) cause=ifelse(rb==1,1,2) time=ifelse(cause==1,t1$time,t2$time) cause <- rt*cause time[cause==0] <- 3 datC=data.frame(time=time,cause=cause) p1=comp.risk(Event(time,cause)~+1,data=datC,cause=1) p2=comp.risk(Event(time,cause)~+1,data=datC,cause=2) pp1=predict(p1,X=1,se=0) pp2=predict(p2,X=1,se=0) par(mfrow=c(1,2)) plot(pp1) lines(tt,F1p(tt),col=2) plot(pp2) lines(tt,F1p(tt,lam0=0.1),col=2) #to avoid dependencies when checking #library(prodlim) #pp=prodlim(Hist(time,cause)~+1) #par(mfrow=c(1,2)) #plot(pp,cause="1") #lines(tt,F1p(tt),col=2) #plot(pp,cause="2") #lines(tt,F1p(tt,lam0=0.1),col=2) } \author{ Thomas Scheike } \keyword{survival} timereg/man/rcrisk.Rd0000644000176200001440000000366413517763134014270 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{rcrisk} \alias{rcrisk} \alias{cause.pchazard.sim} \alias{##} \alias{cumh=cbind(breaks,rates),} \alias{first} \alias{rate} \alias{is} \alias{0} \alias{if} \alias{cumh=FALSE} \alias{cumh=cbind(breaks,cumhazard)} \alias{cumh=TRUE} \title{Simulation of Piecewise constant hazard models with two causes (Cox).} \usage{ rcrisk(cumhaz1, cumhaz2, rr1, rr2, cens = NULL, rrc = NULL, ...) } \arguments{ \item{cumhaz1}{cumulative hazard of cause 1} \item{cumhaz2}{cumulative hazard of cause 1} \item{rr1}{number of simulations or vector of relative risk for simuations.} \item{rr2}{number of simulations or vector of relative risk for simuations.} \item{cens}{to censor further , rate or cumumlative hazard} \item{rrc}{retlativ risk for censoring.} \item{...}{arguments for rchaz} } \description{ Simulates data from piecwise constant baseline hazard that can also be of Cox type. Censor data at highest value of the break points. } \examples{ data(TRACE) cox1 <- cox.aalen(Surv(time,status==9)~prop(vf)+prop(chf)+prop(wmi), data=TRACE,robust=0) cox2 <- cox.aalen(Surv(time,status==0)~prop(vf)+prop(chf)+prop(wmi), data=TRACE,robust=0) X1 <- TRACE[,c("vf","chf","wmi")] n <- 1000 xid <- sample(1:nrow(X1),n,replace=TRUE) Z1 <- X1[xid,] Z2 <- X1[xid,] rr1 <- exp(as.matrix(Z1) \%*\% cox1$gamma) rr2 <- exp(as.matrix(Z2) \%*\% cox2$gamma) cumhaz1 <- cox1$cum cumhaz2 <- cox2$cum d <- rcrisk(cox1$cum,cox2$cum,rr1,rr2) dd <- cbind(d,Z1) sc1 <- cox.aalen(Surv(time,status==1)~prop(vf)+prop(chf)+prop(wmi), data=dd,robust=0) cbind(sc1$gamma, cox1$gamma) sc2 <- cox.aalen(Surv(time,status==2)~prop(vf)+prop(chf)+prop(wmi), data=dd,robust=0) cbind(sc2$gamma, cox2$gamma) par(mfrow=c(1,2)) plot(cox1); lines(sc1$cum,col=2) plot(cox2$cum,type="l"); lines(sc2$cum,col=2) } \author{ Thomas Scheike } \keyword{survival} timereg/man/recurrent.marginal.mean.Rd0000644000176200001440000000351213377441614017504 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/recurrent.r \name{recurrent.marginal.mean} \alias{recurrent.marginal.mean} \title{Estimates marginal mean of recurrent events} \usage{ recurrent.marginal.mean(recurrent, death) } \arguments{ \item{recurrent}{aalen model for recurrent events} \item{death}{aalen model for recurrent events} } \description{ Fitting two aalen models for death and recurent events these are combined to prducte the estimator \deqn{ \int_0^t S(u) dR(u) } the mean number of recurrent events, here \deqn{ S(u) } is the probability of survival, and \deqn{ dR(u) } is the probability of an event among survivors. } \details{ IID versions used for Ghosh & Lin (2000) variance. See also mets package for quick version of this for large data mets:::recurrent.marginal, these two version should give the same when there are no ties. } \examples{ \donttest{ ### get some data using mets simulaitons library(mets) data(base1cumhaz) data(base4cumhaz) data(drcumhaz) dr <- drcumhaz base1 <- base1cumhaz base4 <- base4cumhaz rr <- simRecurrent(100,base1,death.cumhaz=dr) rr$x <- rnorm(nrow(rr)) rr$strata <- floor((rr$id-0.01)/50) drename(rr) <- start+stop~entry+time ar <- aalen(Surv(start,stop,status)~+1+cluster(id),data=rr,resample.iid=1 ,max.clust=NULL) ad <- aalen(Surv(start,stop,death)~+1+cluster(id),data=rr,resample.iid=1, ,max.clust=NULL) mm <- recurrent.marginal.mean(ar,ad) with(mm,plot(times,mu,type="s")) with(mm,lines(times,mu+1.96*se.mu,type="s",lty=2)) with(mm,lines(times,mu-1.96*se.mu,type="s",lty=2)) } } \references{ Ghosh and Lin (2002) Nonparametric Analysis of Recurrent events and death, Biometrics, 554--562. } \author{ Thomas Scheike } \keyword{survival} timereg/man/const.Rd0000644000176200001440000000051413075643763014114 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/aalen.r \name{const} \alias{const} \title{Identifies parametric terms of model} \usage{ const(x) } \arguments{ \item{x}{variable} } \description{ Specifies which of the regressors that have constant effect. } \author{ Thomas Scheike } \keyword{survival} timereg/man/sim.cause.cox.Rd0000644000176200001440000000647713517763134015457 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{sim.cause.cox} \alias{sim.cause.cox} \title{Simulation of cause specific from Cox models.} \usage{ \method{sim}{cause.cox}(coxs, n, data = NULL, cens = NULL, rrc = NULL, ...) } \arguments{ \item{coxs}{list of cox models.} \item{n}{number of simulations.} \item{data}{to extract covariates for simulations (draws from observed covariates).} \item{cens}{specifies censoring model, if NULL then only censoring for each cause at end of last event of this type. if "is.matrix" then uses cumulative. hazard given, if "is.scalar" then uses rate for exponential, and if not given then takes average rate of in simulated data from cox model. But censoring can also be given as a cause.} \item{rrc}{possible vector of relative risk for cox-type censoring.} \item{...}{arguments for rchaz, for example entry-time} } \description{ Simulates data that looks like fit from cause specific Cox models. Censor data automatically. When censoring is given in the list of causes this will give censoring that looks like the data. Covariates are drawn from data-set with replacement. This gives covariates like the data. } \examples{ nsim <- 1000 data(bmt) cox1 <- cox.aalen(Surv(time,cause==1)~prop(tcell)+prop(platelet),data=bmt,robust=0) cox2 <- cox.aalen(Surv(time,cause==2)~prop(tcell)+prop(platelet),data=bmt,robust=0) coxs <- list(cox1,cox2) dd <- sim.cause.cox(coxs,nsim,data=bmt) scox1 <- cox.aalen(Surv(time,status==1)~prop(tcell)+prop(platelet),data=dd,robust=0) scox2 <- cox.aalen(Surv(time,status==2)~prop(tcell)+prop(platelet),data=dd,robust=0) ### cbind(cox1$gamma,scox1$gamma) cbind(cox2$gamma,scox2$gamma) par(mfrow=c(1,2)) plot(cox1); lines(scox1$cum,col=2) plot(cox2$cum,type="l"); lines(scox2$cum,col=2) \donttest{ ### do not test to avoid dependence on mets library(mets) data(bmt) cox1 <- phreg(Surv(time,cause==1)~tcell+platelet,data=bmt) cox2 <- phreg(Surv(time,cause==2)~tcell+platelet,data=bmt) coxs <- list(cox1,cox2) dd <- sim.cause.cox(coxs,nsim,data=bmt) scox1 <- phreg(Surv(time,status==1)~tcell+platelet,data=dd) scox2 <- phreg(Surv(time,status==2)~tcell+platelet,data=dd) cbind(cox1$coef,scox1$coef) cbind(cox2$coef,scox2$coef) par(mfrow=c(1,2)) basehazplot.phreg(cox1); basehazplot.phreg(scox1,add=TRUE); basehazplot.phreg(cox2); basehazplot.phreg(scox2,add=TRUE); cox1 <- phreg(Surv(time,cause==1)~strata(tcell)+platelet,data=bmt) cox2 <- phreg(Surv(time,cause==2)~strata(tcell)+platelet,data=bmt) coxs <- list(cox1,cox2) dd <- sim.cause.cox(coxs,nsim,data=bmt) scox1 <- phreg(Surv(time,status==1)~strata(tcell)+platelet,data=dd) scox2 <- phreg(Surv(time,status==2)~strata(tcell)+platelet,data=dd) cbind(cox1$coef,scox1$coef) cbind(cox2$coef,scox2$coef) par(mfrow=c(1,2)) basehazplot.phreg(cox1); basehazplot.phreg(scox1,add=TRUE); basehazplot.phreg(cox2); basehazplot.phreg(scox2,add=TRUE); # coxph cox1 <- coxph(Surv(time,cause==1)~tcell+platelet,data=bmt) cox2 <- coxph(Surv(time,cause==2)~tcell+platelet,data=bmt) coxs <- list(cox1,cox2) dd <- sim.cause.cox(coxs,nsim,data=bmt) scox1 <- coxph(Surv(time,status==1)~tcell+platelet,data=dd) scox2 <- coxph(Surv(time,status==2)~tcell+platelet,data=dd) cbind(cox1$coef,scox1$coef) cbind(cox2$coef,scox2$coef) } } \author{ Thomas Scheike } \keyword{survival} timereg/man/plot.aalen.Rd0000644000176200001440000000523513377441614015024 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.aalen.r \name{plot.aalen} \alias{plot.aalen} \alias{plot.cox.aalen} \alias{plot.timecox} \alias{plot.prop.excess} \title{Plots estimates and test-processes} \usage{ \method{plot}{aalen}(x, pointwise.ci = 1, hw.ci = 0, sim.ci = 0, robust.ci = 0, col = NULL, specific.comps = FALSE, level = 0.05, start.time = 0, stop.time = 0, add.to.plot = FALSE, mains = TRUE, xlab = "Time", ylab = "Cumulative coefficients", score = FALSE, ...) } \arguments{ \item{x}{the output from the "aalen" function.} \item{pointwise.ci}{if >1 pointwise confidence intervals are plotted with lty=pointwise.ci} \item{hw.ci}{if >1 Hall-Wellner confidence bands are plotted with lty=hw.ci. Only 0.95 \% bands can be constructed.} \item{sim.ci}{if >1 simulation based confidence bands are plotted with lty=sim.ci. These confidence bands are robust to non-martingale behaviour.} \item{robust.ci}{robust standard errors are used to estimate standard error of estimate, otherwise martingale based standard errors are used.} \item{col}{specifice colors of different components of plot, in order: c(estimate,pointwise.ci,robust.ci,hw.ci,sim.ci) so for example, when we ask to get pointwise.ci, hw.ci and sim.ci we would say c(1,2,3,4) to use colors as specified.} \item{specific.comps}{all components of the model is plotted by default, but a list of components may be specified, for example first and third "c(1,3)".} \item{level}{gives the significance level.} \item{start.time}{start of observation period where estimates are plotted.} \item{stop.time}{end of period where estimates are plotted. Estimates thus plotted from [start.time, max.time].} \item{add.to.plot}{to add to an already existing plot.} \item{mains}{add names of covariates as titles to plots.} \item{xlab}{label for x-axis.} \item{ylab}{label for y-axis.} \item{score}{to plot test processes for test of time-varying effects along with 50 random realization under the null-hypothesis.} \item{...}{unused arguments - for S3 compatibility} } \description{ This function plots the non-parametric cumulative estimates for the additive risk model or the test-processes for the hypothesis of time-varying effects with re-sampled processes under the null. } \examples{ # see help(aalen) data(sTRACE) out<-aalen(Surv(time,status==9)~chf+vf,sTRACE,max.time=7,n.sim=100) par(mfrow=c(2,2)) plot(out,pointwise.ci=1,hw.ci=1,sim.ci=1,col=c(1,2,3,4)) par(mfrow=c(2,2)) plot(out,pointwise.ci=0,robust.ci=1,hw.ci=1,sim.ci=1,col=c(1,2,3,4)) } \references{ Martinussen and Scheike, Dynamic Regression models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/cum.residuals.Rd0000644000176200001440000000616513377441614015550 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mgresid.r \name{cum.residuals} \alias{cum.residuals} \title{Model validation based on cumulative residuals} \usage{ cum.residuals(object, data = sys.parent(), modelmatrix = 0, cum.resid = 1, n.sim = 500, weighted.test = 0, max.point.func = 50, weights = NULL) } \arguments{ \item{object}{an object of class 'aalen', 'timecox', 'cox.aalen' where the residuals are returned ('residuals=1')} \item{data}{data frame based on which residuals are computed.} \item{modelmatrix}{specifies a grouping of the data that is used for cumulating residuals. Must have same size as data and be ordered in the same way.} \item{cum.resid}{to compute residuals versus each of the continuous covariates in the model.} \item{n.sim}{number of simulations in resampling.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing constant effects of covariates.} \item{max.point.func}{limits the amount of computations, only considers a max of 50 points on the covariate scales.} \item{weights}{weights for sum of martingale residuals, now for cum.resid=1.} } \value{ returns an object of type "cum.residuals" with the following arguments: \item{cum}{cumulative residuals versus time for the groups specified by modelmatrix. } \item{var.cum}{the martingale based pointwise variance estimates.} \item{robvar.cum}{robust pointwise variances estimates of cumulatives.} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum value.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands for cumulative residuals.} \item{obs.test}{absolute value of supremum of observed test-process.} \item{pval.test}{p-value for supremum test statistic.} \item{sim.test}{resampled absolute value of supremum cumulative residuals.} \item{proc.cumz}{observed cumulative residuals versus all continuous covariates of model.} \item{sim.test.proccumz}{list of 50 random realizations of test-processes under model for all continuous covariates.} } \description{ Computes cumulative residuals and approximative p-values based on resampling techniques. } \examples{ data(sTRACE) # Fits Aalen model and returns residuals fit<-aalen(Surv(time,status==9)~age+sex+diabetes+chf+vf, data=sTRACE,max.time=7,n.sim=0,residuals=1) # constructs and simulates cumulative residuals versus age groups fit.mg<-cum.residuals(fit,data=sTRACE,n.sim=100, modelmatrix=model.matrix(~-1+factor(cut(age,4)),sTRACE)) par(mfrow=c(1,4)) # cumulative residuals with confidence intervals plot(fit.mg); # cumulative residuals versus processes under model plot(fit.mg,score=1); summary(fit.mg) # cumulative residuals vs. covariates Lin, Wei, Ying style fit.mg<-cum.residuals(fit,data=sTRACE,cum.resid=1,n.sim=100) par(mfrow=c(2,4)) plot(fit.mg,score=2) summary(fit.mg) } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/sim.cox.Rd0000644000176200001440000000417513517763134014351 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{sim.cox} \alias{sim.cox} \alias{read.fit} \title{Simulation of output from Cox model.} \usage{ \method{sim}{cox}(cox, n, data = NULL, cens = NULL, rrc = NULL, entry = NULL, ...) } \arguments{ \item{cox}{output form coxph or cox.aalen model fitting cox model.} \item{n}{number of simulations.} \item{data}{to extract covariates for simulations (draws from observed covariates).} \item{cens}{specifies censoring model, if "is.matrix" then uses cumulative hazard given, if "is.scalar" then uses rate for exponential, and if not given then takes average rate of in simulated data from cox model.} \item{rrc}{possible vector of relative risk for cox-type censoring.} \item{entry}{delayed entry variable for simulation.} \item{...}{arguments for rchaz, for example entry-time} } \description{ Simulates data that looks like fit from Cox model. Censor data automatically for highest value of the event times by using cumulative hazard. } \examples{ data(TRACE) cox <- coxph(Surv(time,status==9)~vf+chf+wmi,data=TRACE) sim1 <- sim.cox(cox,1000,data=TRACE) cc <- coxph(Surv(time,status)~vf+chf+wmi,data=sim1) cbind(cox$coef,cc$coef) cor(sim1[,c("vf","chf","wmi")]) cor(TRACE[,c("vf","chf","wmi")]) cox <- cox.aalen(Surv(time, status==9) ~ prop(vf)+prop(chf)+prop(wmi),TRACE,robust=0) sim2 <- sim.cox(cox,1000,data=TRACE) cc <- cox.aalen(Surv(time, status)~prop(vf)+prop(chf)+prop(wmi),data=sim2,robust=0) ### plot(cox) lines(cc$cum,type="s",col=2) cbind(cox$gamma,cc$gamma) \donttest{ ### do not test to avoid dependence on mets library(mets) cox <- phreg(Surv(time, status==9)~vf+chf+wmi,data=TRACE) sim3 <- sim.cox(cox,1000,data=TRACE) cc <- phreg(Surv(time, status)~vf+chf+wmi,data=sim3) cbind(cox$coef,cc$coef) basehazplot.phreg(cox,se=TRUE) lines(cc$cumhaz,col=2) cox <- phreg(Surv(time,status==9)~strata(chf)+vf+wmi,data=TRACE) sim3 <- sim.cox(cox,1000,data=TRACE) cc <- phreg(Surv(time, status)~strata(chf)+vf+wmi,data=sim3) cbind(cox$coef,cc$coef) basehazplot.phreg(cox) basehazplot.phreg(cc,add=TRUE) } } \author{ Thomas Scheike } \keyword{survival} timereg/man/prop.odds.subdist.Rd0000644000176200001440000001511613377441614016352 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/prop-odds-subdist.r \name{prop.odds.subdist} \alias{prop.odds.subdist} \title{Fit Semiparametric Proportional 0dds Model for the competing risks subdistribution} \usage{ prop.odds.subdist(formula, data = sys.parent(), cause = 1, beta = NULL, Nit = 10, detail = 0, start.time = 0, max.time = NULL, id = NULL, n.sim = 500, weighted.test = 0, profile = 1, sym = 0, cens.model = "KM", cens.formula = NULL, clusters = NULL, max.clust = 1000, baselinevar = 1, weights = NULL, cens.weights = NULL) } \arguments{ \item{formula}{a formula object, with the response on the left of a '~' operator, and the terms on the right. The response must be an object as returned by the `Event' function.} \item{data}{a data.frame with the variables.} \item{cause}{cause indicator for competing risks.} \item{beta}{starting value for relative risk estimates} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{detail}{if 0 no details is printed during iterations, if 1 details are given.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. This is very useful to obtain stable estimates, especially for the baseline. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{n.sim}{number of simulations in resampling.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{profile}{use profile version of score equations.} \item{sym}{to use symmetrized second derivative in the case of the estimating equation approach (profile=0). This may improve the numerical performance.} \item{cens.model}{specifies censoring model. So far only Kaplan-Meier "KM".} \item{cens.formula}{possible formula for censoring distribution covariates. Default all !} \item{clusters}{to compute cluster based standard errors.} \item{max.clust}{number of maximum clusters to be used, to save time in iid decomposition.} \item{baselinevar}{set to 0 to save time on computations.} \item{weights}{additional weights.} \item{cens.weights}{specify censoring weights related to the observations.} } \value{ returns an object of type 'cox.aalen'. With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{the martingale based pointwise variance estimates. } \item{robvar.cum}{robust pointwise variances estimates. } \item{gamma}{estimate of proportional odds parameters of model.} \item{var.gamma}{variance for gamma. } \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals. Estimated martingale increments (dM) and corresponding time vector (time).} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time.} \item{loglike}{modified partial likelihood, pseudo profile likelihood for regression parameters.} \item{D2linv}{inverse of the derivative of the score function.} \item{score}{value of score for final estimates.} \item{test.procProp}{observed score process for proportional odds regression effects.} \item{pval.Prop}{p-value based on resampling.} \item{sim.supProp}{re-sampled supremum values.} \item{sim.test.procProp}{list of 50 random realizations of test-processes for constant proportional odds under the model based on resampling.} } \description{ Fits a semiparametric proportional odds model: \deqn{ logit(F_1(t;X,Z)) = log( A(t)) + \beta^T Z } where A(t) is increasing but otherwise unspecified. Model is fitted by maximising the modified partial likelihood. A goodness-of-fit test by considering the score functions is also computed by resampling methods. } \details{ An alternative way of writing the model : \deqn{ F_1(t;X,Z) = \frac{ \exp( \beta^T Z )}{ (A(t)) + \exp( \beta^T Z) } } such that \eqn{\beta} is the log-odds-ratio of cause 1 before time t, and \eqn{A(t)} is the odds-ratio. The modelling formula uses the standard survival modelling given in the \bold{survival} package. The data for a subject is presented as multiple rows or "observations", each of which applies to an interval of observation (start, stop]. The program essentially assumes no ties, and if such are present a little random noise is added to break the ties. } \examples{ library(timereg) data(bmt) # Fits Proportional odds model out <- prop.odds.subdist(Event(time,cause)~platelet+age+tcell,data=bmt, cause=1,cens.model="KM",detail=0,n.sim=1000) summary(out) par(mfrow=c(2,3)) plot(out,sim.ci=2); plot(out,score=1) # simple predict function without confidence calculations pout <- predictpropodds(out,X=model.matrix(~platelet+age+tcell,data=bmt)[,-1]) matplot(pout$time,pout$pred,type="l") # predict function with confidence intervals pout2 <- predict(out,Z=c(1,0,1)) plot(pout2,col=2) pout1 <- predictpropodds(out,X=c(1,0,1)) lines(pout1$time,pout1$pred,type="l") # Fits Proportional odds model with stratified baseline, does not work yet! ###out <- Gprop.odds.subdist(Surv(time,cause==1)~-1+factor(platelet)+ ###prop(age)+prop(tcell),data=bmt,cause=bmt$cause, ###cens.code=0,cens.model="KM",causeS=1,detail=0,n.sim=1000) ###summary(out) ###par(mfrow=c(2,3)) ###plot(out,sim.ci=2); ###plot(out,score=1) } \references{ Eriksson, Li, Zhang and Scheike (2014), The proportional odds cumulative incidence model for competing risks, Biometrics, to appear. Scheike, A flexible semiparametric transformation model for survival data, Lifetime Data Anal. (2007). Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/plot.cum.residuals.Rd0000644000176200001440000000552613377441614016525 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/mgresid.r \name{plot.cum.residuals} \alias{plot.cum.residuals} \title{Plots cumulative residuals} \usage{ \method{plot}{cum.residuals}(x, pointwise.ci = 1, hw.ci = 0, sim.ci = 0, robust = 1, specific.comps = FALSE, level = 0.05, start.time = 0, stop.time = 0, add.to.plot = FALSE, mains = TRUE, main = NULL, xlab = NULL, ylab = "Cumulative MG-residuals", ylim = NULL, score = 0, conf.band = FALSE, ...) } \arguments{ \item{x}{the output from the "cum.residuals" function.} \item{pointwise.ci}{if >1 pointwise confidence intervals are plotted with lty=pointwise.ci} \item{hw.ci}{if >1 Hall-Wellner confidence bands are plotted with lty=hw.ci. Only 95\% bands can be constructed.} \item{sim.ci}{if >1 simulation based confidence bands are plotted with lty=sim.ci. These confidence bands are robust to non-martingale behaviour.} \item{robust}{if "1" robust standard errors are used to estimate standard error of estimate, otherwise martingale based estimate are used.} \item{specific.comps}{all components of the model is plotted by default, but a list of components may be specified, for example first and third "c(1,3)".} \item{level}{gives the significance level. Default is 0.05.} \item{start.time}{start of observation period where estimates are plotted. Default is 0.} \item{stop.time}{end of period where estimates are plotted. Estimates thus plotted from [start.time, max.time].} \item{add.to.plot}{to add to an already existing plot. Default is "FALSE".} \item{mains}{add names of covariates as titles to plots.} \item{main}{vector of names for titles in plots.} \item{xlab}{label for x-axis. NULL is default which leads to "Time" or "". Can also give a character vector.} \item{ylab}{label for y-axis. Default is "Cumulative MG-residuals".} \item{ylim}{limits for y-axis.} \item{score}{if '0' plots related to modelmatrix are specified, thus resulting in grouped residuals, if '1' plots for modelmatrix but with random realizations under model, if '2' plots residuals versus continuous covariates of model with random realizations under the model.} \item{conf.band}{makes simulation based confidence bands for the test processes under the 0 based on variance of these processes limits for y-axis. These will give additional information of whether the observed cumulative residuals are extreme or not when based on a variance weighted test.} \item{...}{unused arguments - for S3 compatibility} } \description{ This function plots the output from the cumulative residuals function "cum.residuals". The cumulative residuals are compared with the performance of similar processes under the model. } \examples{ # see cum.residuals for examples } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/pe.sasieni.Rd0000644000176200001440000000515213377441614015023 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.pe-sasieni.r \name{pe.sasieni} \alias{pe.sasieni} \alias{summary.pe-sasieni} \title{Fits Proportional excess hazards model with fixed offsets} \usage{ pe.sasieni(formula = formula(data), data = sys.parent(), id = NULL, start.time = 0, max.time = NULL, offsets = 0, Nit = 50, detail = 0, n.sim = 500) } \arguments{ \item{formula}{a formula object, with the response on the left of a `~' operator, and the terms on the right. The response must be a survival object as returned by the `Surv' function.} \item{data}{a data.frame with the variables.} \item{id}{gives the number of individuals.} \item{start.time}{starting time for considered time-period.} \item{max.time}{stopping considered time-period if different from 0. Estimates thus computed from [0,max.time] if max.time>0. Default is max of data.} \item{offsets}{fixed offsets giving the mortality.} \item{Nit}{number of itterations.} \item{detail}{if detail is one, prints iteration details.} \item{n.sim}{number of simulations, 0 for no simulations.} } \value{ Returns an object of type "pe.sasieni". With the following arguments: \item{cum}{baseline of Cox model excess risk.} \item{var.cum}{pointwise variance estimates for estimated cumulatives.} \item{gamma}{estimate of relative risk terms of model.} \item{var.gamma}{variance estimates for gamma.} \item{Ut}{score process for Cox part of model.} \item{D2linv}{The inverse of the second derivative.} \item{score}{final score} \item{test.Prop}{re-sampled absolute supremum values.} \item{pval.Prop}{p-value based on resampling.} } \description{ Fits proportional excess hazards model. The Sasieni proportional excess risk model. } \details{ The models are written using the survival modelling given in the survival package. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ data(mela.pop) out<-pe.sasieni(Surv(start,stop,status==1)~age+sex,mela.pop, id=1:205,Nit=10,max.time=7,offsets=mela.pop$rate,detail=0,n.sim=100) summary(out) ul<-out$cum[,2]+1.96*out$var.cum[,2]^.5 ll<-out$cum[,2]-1.96*out$var.cum[,2]^.5 plot(out$cum,type="s",ylim=range(ul,ll)) lines(out$cum[,1],ul,type="s"); lines(out$cum[,1],ll,type="s") # see also prop.excess function } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer Verlag (2006). Sasieni, P.D., Proportional excess hazards, Biometrika (1996), 127--41. Cortese, G. and Scheike, T.H., Dynamic regression hazards models for relative survival (2007), submitted. } \author{ Thomas Scheike } \keyword{survival} timereg/man/melanoma.Rd0000644000176200001440000000224513075643763014562 0ustar liggesusers\name{melanoma} \alias{melanoma} \non_function{} \title{The Melanoma Survival Data} \description{ The melanoma data frame has 205 rows and 7 columns. It contains data relating to survival of patients after operation for malignant melanoma collected at Odense University Hospital by K.T. Drzewiecki. } \format{ This data frame contains the following columns: \describe{ \item{no}{ a numeric vector. Patient code. } \item{status}{ a numeric vector code. Survival status. 1: dead from melanoma, 2: alive, 3: dead from other cause. } \item{days}{ a numeric vector. Survival time. } \item{ulc}{ a numeric vector code. Ulceration, 1: present, 0: absent. } \item{thick}{ a numeric vector. Tumour thickness (1/100 mm). } \item{sex}{ a numeric vector code. 0: female, 1: male. } } } \source{ Andersen, P.K., Borgan O, Gill R.D., Keiding N. (1993), \emph{Statistical Models Based on Counting Processes}, Springer-Verlag. Drzewiecki, K.T., Ladefoged, C., and Christensen, H.E. (1980), Biopsy and prognosis for cutaneous malignant melanoma in clinical stage I. Scand. J. Plast. Reconstru. Surg. 14, 141-144. } \examples{ data(melanoma) names(melanoma) } \keyword{datasets} timereg/man/pava.pred.Rd0000644000176200001440000000172513075643764014654 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/predict-timereg.r \name{pava.pred} \alias{pava.pred} \title{Make predictions of predict functions in rows mononotone} \usage{ pava.pred(pred, increasing = TRUE) } \arguments{ \item{pred}{predictions, either vector or rows of predictions.} \item{increasing}{increasing or decreasing.} } \value{ mononotone predictions. } \description{ Make predictions of predict functions in rows mononotone using the pool-adjacent-violators-algorithm } \examples{ data(bmt); ## competing risks add<-comp.risk(Event(time,cause)~platelet+age+tcell,data=bmt,cause=1) ndata<-data.frame(platelet=c(1,0,0),age=c(0,1,0),tcell=c(0,0,1)) out<-predict(add,newdata=ndata,uniform=0) par(mfrow=c(1,1)) head(out$P1) matplot(out$time,t(out$P1),type="s") ###P1m <- t(apply(out$P1,1,pava)) P1monotone <- pava.pred(out$P1) head(P1monotone) matlines(out$time,t(P1monotone),type="s") } \author{ Thomas Scheike } \keyword{survival} timereg/man/comp.risk.Rd0000644000176200001440000002714113377441614014674 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/comprisk.r \name{comp.risk} \alias{comp.risk} \title{Competings Risks Regression} \usage{ comp.risk(formula, data = sys.parent(), cause, times = NULL, Nit = 50, clusters = NULL, est = NULL, fix.gamma = 0, gamma = 0, n.sim = 0, weighted = 0, model = "fg", detail = 0, interval = 0.01, resample.iid = 1, cens.model = "KM", cens.formula = NULL, time.pow = NULL, time.pow.test = NULL, silent = 1, conv = 1e-06, weights = NULL, max.clust = 1000, n.times = 50, first.time.p = 0.05, estimator = 1, trunc.p = NULL, cens.weights = NULL, admin.cens = NULL, conservative = 1, monotone = 0, step = NULL) } \arguments{ \item{formula}{a formula object, with the response on the left of a '~' operator, and the terms on the right. The response must be a survival object as returned by the `Event' function. The status indicator is not important here. Time-invariant regressors are specified by the wrapper const(), and cluster variables (for computing robust variances) by the wrapper cluster().} \item{data}{a data.frame with the variables.} \item{cause}{For competing risk models specificies which cause we consider.} \item{times}{specifies the times at which the estimator is considered. Defaults to all the times where an event of interest occurs, with the first 10 percent or max 20 jump points removed for numerical stability in simulations.} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{clusters}{specifies cluster structure, for backwards compability.} \item{est}{possible starting value for nonparametric component of model.} \item{fix.gamma}{to keep gamma fixed, possibly at 0.} \item{gamma}{starting value for constant effects.} \item{n.sim}{number of simulations in resampling.} \item{weighted}{Not implemented. To compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{model}{"additive", "prop"ortional, "rcif", or "logistic".} \item{detail}{if 0 no details are printed during iterations, if 1 details are given.} \item{interval}{specifies that we only consider timepoints where the Kaplan-Meier of the censoring distribution is larger than this value.} \item{resample.iid}{to return the iid decomposition, that can be used to construct confidence bands for predictions} \item{cens.model}{specified which model to use for the ICPW, KM is Kaplan-Meier alternatively it may be "cox"} \item{cens.formula}{specifies the regression terms used for the regression model for chosen regression model. When cens.model is specified, the default is to use the same design as specified for the competing risks model.} \item{time.pow}{specifies that the power at which the time-arguments is transformed, for each of the arguments of the const() terms, default is 1 for the additive model and 0 for the proportional model.} \item{time.pow.test}{specifies that the power the time-arguments is transformed for each of the arguments of the non-const() terms. This is relevant for testing if a coefficient function is consistent with the specified form A_l(t)=beta_l t^time.pow.test(l). Default is 1 for the additive model and 0 for the proportional model.} \item{silent}{if 0 information on convergence problems due to non-invertible derviates of scores are printed.} \item{conv}{gives convergence criterie in terms of sum of absolute change of parameters of model} \item{weights}{weights for estimating equations.} \item{max.clust}{sets the total number of i.i.d. terms in i.i.d. decompostition. This can limit the amount of memory used by coarsening the clusters. When NULL then all clusters are used. Default is 1000 to save memory and time.} \item{n.times}{only uses 50 points for estimation, if NULL then uses all points, subject to p.start condition.} \item{first.time.p}{first point for estimation is pth percentile of cause jump times.} \item{estimator}{default estimator is 1.} \item{trunc.p}{truncation weight for delayed entry, P(T > entry.time | Z_i), typically Cox model.} \item{cens.weights}{censoring weights can be given here rather than calculated using the KM, cox or aalen models.} \item{admin.cens}{censoring times for the administrative censoring} \item{conservative}{set to 0 to compute correct variances based on censoring weights, default is conservative estimates that are much quicker.} \item{monotone}{monotone=0, uses estimating equations \deqn{ (D_\beta P_1) w(t) ( Y(t)/G_c(t) - P_1(t,X)) and } montone 1 uses \deqn{ w(t) ( Y(t)/G_c(t) - P_1(t,X)) and }} \item{step}{step size for Fisher-Scoring algorithm.} } \value{ returns an object of type 'comprisk'. With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{pointwise variances estimates. } \item{gamma}{estimate of proportional odds parameters of model.} \item{var.gamma}{variance for gamma. } \item{score}{sum of absolute value of scores.} \item{gamma2}{estimate of constant effects based on the non-parametric estimate. Used for testing of constant effects.} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{conf.band}{resampling based constant to construct 95\% uniform confidence bands.} \item{B.iid}{list of iid decomposition of non-parametric effects.} \item{gamma.iid}{matrix of iid decomposition of parametric effects.} \item{test.procBeqC}{observed test process for testing of time-varying effects} \item{sim.test.procBeqC}{50 resample processes for for testing of time-varying effects} \item{conv}{information on convergence for time points used for estimation.} } \description{ Fits a semiparametric model for the cause-specific quantities : \deqn{ P(T < t, cause=1 | x,z) = P_1(t,x,z) = h( g(t,x,z) ) } for a known link-function \eqn{h()} and known prediction-function \eqn{g(t,x,z)} for the probability of dying from cause 1 in a situation with competing causes of death. } \details{ We consider the following models : 1) the additive model where \eqn{h(x)=1-\exp(-x)} and \deqn{ g(t,x,z) = x^T A(t) + (diag(t^p) z)^T \beta } 2) the proportional setting that includes the Fine & Gray (FG) "prop" model and some extensions where \eqn{h(x)=1-\exp(-\exp(x))} and \deqn{ g(t,x,z) = (x^T A(t) + (diag(t^p) z)^T \beta) } The FG model is obtained when \eqn{x=1}, but the baseline is parametrized as \eqn{\exp(A(t))}. The "fg" model is a different parametrization that contains the FG model, where \eqn{h(x)=1-\exp(-x)} and \deqn{ g(t,x,z) = (x^T A(t)) \exp((diag(t^p) z)^T \beta) } The FG model is obtained when \eqn{x=1}. 3) a "logistic" model where \eqn{h(x)=\exp(x)/( 1+\exp(x))} and \deqn{ g(t,x,z) = x^T A(t) + (diag(t^p) z)^T \beta } The "logistic2" is \deqn{ P_1(t,x,z) = x^T A(t) exp((diag(t^p) z)^T \beta)/ (1+ x^T A(t) exp((diag(t^p) z)^T \beta)) } The simple logistic model with just a baseline can also be fitted by an alternative procedure that has better small sample properties see prop.odds.subist(). 4) the relative cumulative incidence function "rcif" model where \eqn{h(x)=\exp(x)} and \deqn{ g(t,x,z) = x^T A(t) + (diag(t^p) z)^T \beta } The "rcif2" \deqn{ P_1(t,x,z) = (x^T A(t)) \exp((diag(t^p) z)^T \beta) } Where p by default is 1 for the additive model and 0 for the other models. In general p may be powers of the same length as z. Since timereg version 1.8.4. the response must be specified with the \code{\link{Event}} function instead of the \code{\link{Surv}} function and the arguments. For example, if the old code was comp.risk(Surv(time,cause>0)~x1+x2,data=mydata,cause=mydata$cause,causeS=1) the new code is comp.risk(Event(time,cause)~x1+x2,data=mydata,cause=1) Also the argument cens.code is now obsolete since cens.code is an argument of \code{\link{Event}}. } \examples{ data(bmt); clust <- rep(1:204,each=2) addclust<-comp.risk(Event(time,cause)~platelet+age+tcell+cluster(clust),data=bmt, cause=1,resample.iid=1,n.sim=100,model="additive") ### addclust<-comp.risk(Event(time,cause)~+1+cluster(clust),data=bmt,cause=1, resample.iid=1,n.sim=100,model="additive") pad <- predict(addclust,X=1) plot(pad) add<-comp.risk(Event(time,cause)~platelet+age+tcell,data=bmt, cause=1,resample.iid=1,n.sim=100,model="additive") summary(add) par(mfrow=c(2,4)) plot(add); ### plot(add,score=1) ### to plot score functions for test ndata<-data.frame(platelet=c(1,0,0),age=c(0,1,0),tcell=c(0,0,1)) par(mfrow=c(2,3)) out<-predict(add,ndata,uniform=1,n.sim=100) par(mfrow=c(2,2)) plot(out,multiple=0,uniform=1,col=1:3,lty=1,se=1) add<-comp.risk(Event(time,cause)~platelet+age+tcell,data=bmt, cause=1,resample.iid=0,n.sim=0,cens.model="cox", cens.formula=~factor(platelet),model="additive") out<-predict(add,ndata,se=0,uniform=0) par(mfrow=c(2,2)) plot(out,multiple=0,se=0,uniform=0,col=1:3,lty=1) ## fits additive model with some constant effects add.sem<-comp.risk(Event(time,cause)~ const(platelet)+const(age)+const(tcell),data=bmt, cause=1,resample.iid=1,n.sim=100,model="additive") summary(add.sem) out<-predict(add.sem,ndata,uniform=1,n.sim=100) par(mfrow=c(2,2)) plot(out,multiple=0,uniform=1,col=1:3,lty=1,se=0) ## Fine & Gray model fg<-comp.risk(Event(time,cause)~ const(platelet)+const(age)+const(tcell),data=bmt, cause=1,resample.iid=1,model="fg",n.sim=100) summary(fg) out<-predict(fg,ndata,uniform=1,n.sim=100) par(mfrow=c(2,2)) plot(out,multiple=1,uniform=0,col=1:3,lty=1,se=0) ## extended model with time-varying effects fg.npar<-comp.risk(Event(time,cause)~platelet+age+const(tcell), data=bmt,cause=1,resample.iid=1,model="prop",n.sim=100) summary(fg.npar); out<-predict(fg.npar,ndata,uniform=1,n.sim=100) head(out$P1[,1:5]); head(out$se.P1[,1:5]) par(mfrow=c(2,2)) plot(out,multiple=1,uniform=0,col=1:3,lty=1,se=0) ## Fine & Gray model with alternative parametrization for baseline fg2<-comp.risk(Event(time,cause)~const(platelet)+const(age)+const(tcell),data=bmt, cause=1,resample.iid=1,model="prop",n.sim=100) summary(fg2) ################################################################# ## Delayed entry models, ################################################################# nn <- nrow(bmt) entrytime <- rbinom(nn,1,0.5)*(bmt$time*runif(nn)) bmt$entrytime <- entrytime times <- seq(5,70,by=1) bmtw <- prep.comp.risk(bmt,times=times,time="time",entrytime="entrytime",cause="cause") ## non-parametric model outnp <- comp.risk(Event(time,cause)~tcell+platelet+const(age), data=bmtw,cause=1,fix.gamma=1,gamma=0, cens.weights=bmtw$cw,weights=bmtw$weights,times=times,n.sim=0) par(mfrow=c(2,2)) plot(outnp) outnp <- comp.risk(Event(time,cause)~tcell+platelet, data=bmtw,cause=1, cens.weights=bmtw$cw,weights=bmtw$weights,times=times,n.sim=0) par(mfrow=c(2,2)) plot(outnp) ## semiparametric model out <- comp.risk(Event(time,cause)~const(tcell)+const(platelet),data=bmtw,cause=1, cens.weights=bmtw$cw,weights=bmtw$weights,times=times,n.sim=0) summary(out) } \references{ Scheike, Zhang and Gerds (2008), Predicting cumulative incidence probability by direct binomial regression,Biometrika, 95, 205-220. Scheike and Zhang (2007), Flexible competing risks regression modelling and goodness of fit, LIDA, 14, 464-483. Martinussen and Scheike (2006), Dynamic regression models for survival data, Springer. } \author{ Thomas Scheike } \keyword{survival} timereg/man/qcut.Rd0000644000176200001440000000102713075643764013743 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/qcut.r \name{qcut} \alias{qcut} \title{Cut a variable} \usage{ qcut(x, cuts = 4, breaks = NULL, ...) } \arguments{ \item{x}{variable to cut} \item{cuts}{number of groups, 4 gives quartiles} \item{breaks}{can also give breaks} \item{...}{other argument for cut function of R} } \description{ Calls the cut function to cut variables on data frame. } \examples{ data(sTRACE) gx <- qcut(sTRACE$age) table(gx) } \author{ Thomas Scheike } \keyword{survival} timereg/man/wald.test.Rd0000644000176200001440000000311413075643764014673 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/base.r \name{wald.test} \alias{wald.test} \title{Makes wald test} \usage{ wald.test(object = NULL, coef = NULL, Sigma = NULL, contrast, coef.null = NULL, null = NULL, print.coef = TRUE, alpha = 0.05) } \arguments{ \item{object}{timereg object} \item{coef}{estimates from some model} \item{Sigma}{variance of estimates} \item{contrast}{contrast matrix for testing} \item{coef.null}{which indeces to test to 0} \item{null}{mean of null, 0 by default} \item{print.coef}{print the coefficients of the linear combinations.} \item{alpha}{significance level for CI for linear combinations of coefficients.} } \description{ Makes wald test, either by contrast matrix or testing components to 0. Can also specify the regression coefficients and the variance matrix. Also makes confidence intervals of the defined contrasts. Reads coefficientes and variances from timereg and coxph objects. } \examples{ data(sTRACE) # Fits Cox model out<-cox.aalen(Surv(time,status==9)~prop(age)+prop(sex)+ prop(vf)+prop(chf)+prop(diabetes),data=sTRACE,n.sim=0) wald.test(out,coef.null=c(1,2,3)) ### test age=sex vf=chf wald.test(out,contrast=rbind(c(1,-1,0,0,0),c(0,0,1,-1,0))) ### now same with direct specifation of estimates and variance wald.test(coef=out$gamma,Sigma=out$var.gamma,coef.null=c(1,2,3)) wald.test(coef=out$gamma,Sigma=out$robvar.gamma,coef.null=c(1,2,3)) ### test age=sex vf=chf wald.test(coef=out$gamma,Sigma=out$var.gamma, contrast=rbind(c(1,-1,0,0,0),c(0,0,1,-1,0))) } \author{ Thomas Scheike } \keyword{survival} timereg/man/rchaz.Rd0000644000176200001440000000413013517763134014067 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/sim-pc-hazard.r \name{rchaz} \alias{rchaz} \alias{pc.hazard} \alias{simrchaz} \alias{addCums} \title{Simulation of Piecewise constant hazard model (Cox).} \usage{ rchaz(cumhazard, rr, n = NULL, entry = NULL, cum.hazard = TRUE, cause = 1, extend = FALSE) } \arguments{ \item{cumhazard}{cumulative hazard, or piece-constant rates for periods defined by first column of input.} \item{rr}{number of simulations or vector of relative risk for simuations.} \item{n}{number of simulations given as "n"} \item{entry}{delayed entry time for simuations.} \item{cum.hazard}{specifies wheter input is cumulative hazard or rates.} \item{cause}{name of cause} \item{extend}{to extend piecewise constant with constant rate. Default is average rate over time from cumulative (when TRUE), if numeric then uses given rate.} } \description{ Simulates data from piecwise constant baseline hazard that can also be of Cox type. Censor data at highest value of the break points. } \examples{ rates <- c(0,0.01,0.052,0.01,0.04) breaks <- c(0,10, 20, 30, 40) haz <- cbind(breaks,rates) n <- 1000 X <- rbinom(n,1,0.5) beta <- 0.2 rrcox <- exp(X * beta) cumhaz <- cumsum(c(0,diff(breaks)*rates[-1])) cumhaz <- cbind(breaks,cumhaz) pctime <- rchaz(haz,n=1000,cum.hazard=FALSE) par(mfrow=c(1,2)) ss <- aalen(Surv(time,status)~+1,data=pctime,robust=0) plot(ss) lines(cumhaz,col=2,lwd=2) pctimecox <- rchaz(cumhaz,rrcox) pctime <- cbind(pctime,X) ssx <- cox.aalen(Surv(time,status)~+prop(X),data=pctimecox,robust=0) plot(ssx) lines(cumhaz,col=2,lwd=2) ### simulating data with hazard as real data data(TRACE) par(mfrow=c(1,2)) ss <- cox.aalen(Surv(time,status==9)~+prop(vf),data=TRACE,robust=0) par(mfrow=c(1,2)) plot(ss) ### pctime <- rchaz(ss$cum,n=1000) ### sss <- aalen(Surv(time,status)~+1,data=pctime,robust=0) lines(sss$cum,col=2,lwd=2) pctime <- rchaz(ss$cum,rrcox) pctime <- cbind(pctime,X) ### sss <- cox.aalen(Surv(time,status)~+prop(X),data=pctime,robust=0) summary(sss) plot(ss) lines(sss$cum,col=3,lwd=3) } \author{ Thomas Scheike } \keyword{survival} timereg/man/dynreg.Rd0000644000176200001440000001515713377441614014263 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.dynreg.r \name{dynreg} \alias{dynreg} \title{Fit time-varying regression model} \usage{ dynreg(formula, data = sys.parent(), aalenmod, bandwidth = 0.5, id = NULL, bhat = NULL, start.time = 0, max.time = NULL, n.sim = 500, meansub = 1, weighted.test = 0, resample = 0) } \arguments{ \item{formula}{a formula object with the response on the left of a '~' operator, and the independent terms on the right as regressors.} \item{data}{a data.frame with the variables.} \item{aalenmod}{Aalen model for measurement times. Specified as a survival model (see aalen function).} \item{bandwidth}{bandwidth for local iterations. Default is 50\% of the range of the considered observation period.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{bhat}{initial value for estimates. If NULL local linear estimate is computed.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. Default is max of data.} \item{n.sim}{number of simulations in resampling.} \item{meansub}{if '1' then the mean of the responses is subtracted before the estimation is carried out.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{resample}{returns resample processes.} } \value{ returns an object of type "dynreg". With the following arguments: \item{cum}{the cumulative regression coefficients. This is the efficient estimator based on an initial smoother obtained by local linear regression : \deqn{ \hat B(t) = \int_0^t \tilde \beta(s) ds+ \hspace{4 cm}}{} \deqn{ }{}\deqn{\int_0^t X^{-} (Diag(z) -Diag( X^T(s) \tilde \beta(s)) ) dp(ds \times dz), }{} where \eqn{\tilde \beta(t)} is an initial estimate either provided or computed by local linear regression. To plot this estimate use type="eff.smooth" in the plot() command. } \item{var.cum}{the martingale based pointwise variance estimates.} \item{robvar.cum}{robust pointwise variances estimates.} \item{gamma}{estimate of semi-parametric components of model.} \item{var.gamma}{variance for gamma.} \item{robvar.gamma}{robust variance for gamma.} \item{cum0}{simple estimate of cumulative regression coefficients that does not use use an initial smoothing based estimate \deqn{ \hat B_0(t) = \int_0^t X^{-} Diag(z) dp(ds \times dz). } To plot this estimate use type="0.mpp" in the plot() command. } \item{var.cum0}{the martingale based pointwise variance estimates of cum0.} \item{cum.ms}{estimate of cumulative regression coefficients based on initial smoother (but robust to this estimator). \deqn{ \hat B_{ms}(t) = \int_0^t X^{-} (Diag(z)-f(s)) dp(ds \times dz), } where \eqn{f} is chosen as the matrix \deqn{ f(s) = Diag( X^T(s) \tilde \beta(s)) ( I - X_\alpha(s) X_\alpha^-(s) ), } where \eqn{X_{\alpha}} is the design for the sampling intensities. This is also an efficient estimator when the initial estimator is consistent for \eqn{\beta(t)} and then asymptotically equivalent to cum, but small sample properties appear inferior. Its variance is estimated by var.cum. To plot this estimate use type="ms.mpp" in the plot() command. } \item{cum.ly}{estimator where local averages are subtracted. Special case of cum.ms. To plot this estimate use type="ly.mpp" in plot. } \item{var.cum.ly}{the martingale based pointwise variance estimates. } \item{gamma0}{estimate of parametric component of model. } \item{var.gamma0}{estimate of variance of parametric component of model. } \item{gamma.ly}{estimate of parametric components of model. } \item{var.gamma.ly}{estimate of variance of parametric component of model. } \item{gamma.ms}{estimate of variance of parametric component of model. } \item{var.gamma.ms}{estimate of variance of parametric component of model.} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands.} \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect.} \item{sim.test.procBeqC}{list of 50 random realizations of test-processes under null based on resampling.} \item{covariance}{covariances for nonparametric terms of model.} } \description{ Fits time-varying regression model with partly parametric components. Time-dependent variables for longitudinal data. The model assumes that the mean of the observed responses given covariates is a linear time-varying regression model : } \details{ \deqn{ E( Z_{ij} | X_{ij}(t) ) = \beta^T(t) X_{ij}^1(t) + \gamma^T X_{ij}^2(t) } where \eqn{Z_{ij}} is the j'th measurement at time t for the i'th subject with covariates \eqn{X_{ij}^1} and \eqn{X_{ij}^2}. Resampling is used for computing p-values for tests of timevarying effects. The data for a subject is presented as multiple rows or 'observations', each of which applies to an interval of observation (start, stop]. For counting process data with the )start,stop] notation is used the 'id' variable is needed to identify the records for each subject. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ \donttest{ ## this runs slowly and is therfore donttest data(csl) indi.m<-rep(1,length(csl$lt)) # Fits time-varying regression model out<-dynreg(prot~treat+prot.prev+sex+age,data=csl, Surv(lt,rt,indi.m)~+1,start.time=0,max.time=2,id=csl$id, n.sim=100,bandwidth=0.7,meansub=0) summary(out) par(mfrow=c(2,3)) plot(out) # Fits time-varying semi-parametric regression model. outS<-dynreg(prot~treat+const(prot.prev)+const(sex)+const(age),data=csl, Surv(lt,rt,indi.m)~+1,start.time=0,max.time=2,id=csl$id, n.sim=100,bandwidth=0.7,meansub=0) summary(outS) } } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/res.mean.Rd0000644000176200001440000001775113377441614014505 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/ipcw-residualmean.r \name{res.mean} \alias{res.mean} \title{Residual mean life (restricted)} \usage{ res.mean(formula, data = sys.parent(), cause = 1, restricted = NULL, times = NULL, Nit = 50, clusters = NULL, gamma = 0, n.sim = 0, weighted = 0, model = "additive", detail = 0, interval = 0.01, resample.iid = 1, cens.model = "KM", cens.formula = NULL, time.pow = NULL, time.pow.test = NULL, silent = 1, conv = 1e-06, estimator = 1, cens.weights = NULL, conservative = 1, weights = NULL) } \arguments{ \item{formula}{a formula object, with the response on the left of a '~' operator, and the terms on the right. The response must be a survival object as returned by the `Event' function. The status indicator is not important here. Time-invariant regressors are specified by the wrapper const(), and cluster variables (for computing robust variances) by the wrapper cluster().} \item{data}{a data.frame with the variables.} \item{cause}{For competing risk models specificies which cause we consider.} \item{restricted}{gives a possible restriction times for means.} \item{times}{specifies the times at which the estimator is considered. Defaults to all the times where an event of interest occurs, with the first 10 percent or max 20 jump points removed for numerical stability in simulations.} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{clusters}{specifies cluster structure, for backwards compability.} \item{gamma}{starting value for constant effects.} \item{n.sim}{number of simulations in resampling.} \item{weighted}{Not implemented. To compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{model}{"additive", "prop"ortional.} \item{detail}{if 0 no details are printed during iterations, if 1 details are given.} \item{interval}{specifies that we only consider timepoints where the Kaplan-Meier of the censoring distribution is larger than this value.} \item{resample.iid}{to return the iid decomposition, that can be used to construct confidence bands for predictions} \item{cens.model}{specified which model to use for the ICPW, KM is Kaplan-Meier alternatively it may be "cox" or "aalen" model for further flexibility.} \item{cens.formula}{specifies the regression terms used for the regression model for chosen regression model. When cens.model is specified, the default is to use the same design as specified for the competing risks model. "KM","cox","aalen","weights". "weights" are user specified weights given is cens.weight argument.} \item{time.pow}{specifies that the power at which the time-arguments is transformed, for each of the arguments of the const() terms, default is 1 for the additive model and 0 for the proportional model.} \item{time.pow.test}{specifies that the power the time-arguments is transformed for each of the arguments of the non-const() terms. This is relevant for testing if a coefficient function is consistent with the specified form A_l(t)=beta_l t^time.pow.test(l). Default is 1 for the additive model and 0 for the proportional model.} \item{silent}{if 0 information on convergence problems due to non-invertible derviates of scores are printed.} \item{conv}{gives convergence criterie in terms of sum of absolute change of parameters of model} \item{estimator}{specifies what that is estimated.} \item{cens.weights}{censoring weights for estimating equations.} \item{conservative}{for slightly conservative standard errors.} \item{weights}{weights for estimating equations.} } \value{ returns an object of type 'comprisk'. With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval.} \item{var.cum}{pointwise variances estimates. } \item{gamma}{estimate of proportional odds parameters of model.} \item{var.gamma}{variance for gamma. } \item{score}{sum of absolute value of scores.} \item{gamma2}{estimate of constant effects based on the non-parametric estimate. Used for testing of constant effects.} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{conf.band}{resampling based constant to construct 95\% uniform confidence bands.} \item{B.iid}{list of iid decomposition of non-parametric effects.} \item{gamma.iid}{matrix of iid decomposition of parametric effects.} \item{test.procBeqC}{observed test process for testing of time-varying effects} \item{sim.test.procBeqC}{50 resample processes for for testing of time-varying effects} \item{conv}{information on convergence for time points used for estimation.} } \description{ Fits a semiparametric model for the residual life (estimator=1): \deqn{ E( \min(Y,\tau) -t | Y>=t) = h_1( g(t,x,z) ) } or cause specific years lost of Andersen (2012) (estimator=3) \deqn{ E( \tau- \min(Y_j,\tau) | Y>=0) = \int_0^t (1-F_j(s)) ds = h_2( g(t,x,z) ) } where \eqn{Y_j = \sum_j Y I(\epsilon=j) + \infty * I(\epsilon=0)} or (estimator=2) \deqn{ E( \tau- \min(Y_j,\tau) | Y<\tau, \epsilon=j) = h_3( g(t,x,z) ) = h_2(g(t,x,z)) F_j(\tau,x,z) } where \eqn{F_j(s,x,z) = P(Y<\tau, \epsilon=j | x,z )} for a known link-function \eqn{h()} and known prediction-function \eqn{g(t,x,z)} } \details{ Uses the IPCW for the score equations based on \deqn{ w(t) \Delta(\tau)/P(\Delta(\tau)=1| T,\epsilon,X,Z) ( Y(t) - h_1(t,X,Z)) } and where \eqn{\Delta(\tau)} is the at-risk indicator given data and requires a IPCW model. Since timereg version 1.8.4. the response must be specified with the \code{\link{Event}} function instead of the \code{\link{Surv}} function and the arguments. } \examples{ data(bmt); tau <- 100 ### residual restricted mean life out<-res.mean(Event(time,cause>=1)~factor(tcell)+factor(platelet),data=bmt,cause=1, times=0,restricted=tau,n.sim=0,model="additive",estimator=1); summary(out) out<-res.mean(Event(time,cause>=1)~factor(tcell)+factor(platelet),data=bmt,cause=1, times=seq(0,90,5),restricted=tau,n.sim=0,model="additive",estimator=1); par(mfrow=c(1,3)) plot(out) ### restricted years lost given death out21<-res.mean(Event(time,cause)~factor(tcell)+factor(platelet),data=bmt,cause=1, times=0,restricted=tau,n.sim=0,model="additive",estimator=2); summary(out21) out22<-res.mean(Event(time,cause)~factor(tcell)+factor(platelet),data=bmt,cause=2, times=0,restricted=tau,n.sim=0,model="additive",estimator=2); summary(out22) ### total restricted years lost out31<-res.mean(Event(time,cause)~factor(tcell)+factor(platelet),data=bmt,cause=1, times=0,restricted=tau,n.sim=0,model="additive",estimator=3); summary(out31) out32<-res.mean(Event(time,cause)~factor(tcell)+factor(platelet),data=bmt,cause=2, times=0,restricted=tau,n.sim=0,model="additive",estimator=3); summary(out32) ### delayed entry nn <- nrow(bmt) entrytime <- rbinom(nn,1,0.5)*(bmt$time*runif(nn)) bmt$entrytime <- entrytime bmtw <- prep.comp.risk(bmt,times=tau,time="time",entrytime="entrytime",cause="cause") out<-res.mean(Event(time,cause>=1)~factor(tcell)+factor(platelet),data=bmtw,cause=1, times=0,restricted=tau,n.sim=0,model="additive",estimator=1, cens.model="weights",weights=bmtw$cw,cens.weights=1/bmtw$weights); summary(out) } \references{ Andersen (2013), Decomposition of number of years lost according to causes of death, Statistics in Medicine, 5278-5285. Scheike, and Cortese (2015), Regression Modelling of Cause Specific Years Lost, Scheike, Cortese and Holmboe (2015), Regression Modelling of Restricted Residual Mean with Delayed Entry, } \author{ Thomas Scheike } \keyword{survival} timereg/man/prop.Rd0000644000176200001440000000112513075643764013746 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.cox-aalen.r \name{prop} \alias{prop} \title{Identifies the multiplicative terms in Cox-Aalen model and proportional excess risk model} \usage{ prop(x) } \arguments{ \item{x}{variable} } \description{ Specifies which of the regressors that belong to the multiplicative part of the Cox-Aalen model } \details{ \deqn{ \lambda_{i}(t) = Y_i(t) ( X_{i}^T(t) \alpha(t) ) \exp(Z_{i}^T(t) \beta ) } for this model prop specified the covariates to be included in \eqn{Z_{i}(t)} } \author{ Thomas Scheike } \keyword{survival} timereg/man/cox.aalen.Rd0000644000176200001440000002146013413657513014633 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/new.cox-aalen.r \name{cox.aalen} \alias{cox.aalen} \title{Fit Cox-Aalen survival model} \usage{ cox.aalen(formula = formula(data), data = sys.parent(), beta = NULL, Nit = 20, detail = 0, start.time = 0, max.time = NULL, id = NULL, clusters = NULL, n.sim = 500, residuals = 0, robust = 1, weighted.test = 0, covariance = 0, resample.iid = 1, weights = NULL, rate.sim = 1, beta.fixed = 0, max.clust = 1000, exact.deriv = 1, silent = 1, max.timepoint.sim = 100, basesim = 0, offsets = NULL, strata = NULL, propodds = 0, caseweight = NULL) } \arguments{ \item{formula}{a formula object with the response on the left of a '~' operator, and the independent terms on the right as regressors. The response must be a survival object as returned by the `Surv' function. Terms with a proportional effect are specified by the wrapper prop(), and cluster variables (for computing robust variances) by the wrapper cluster().} \item{data}{a data.frame with the variables.} \item{beta}{starting value for relative risk estimates.} \item{Nit}{number of iterations for Newton-Raphson algorithm.} \item{detail}{if 0 no details is printed during iterations, if 1 details are given.} \item{start.time}{start of observation period where estimates are computed.} \item{max.time}{end of observation period where estimates are computed. Estimates thus computed from [start.time, max.time]. Default is max of data.} \item{id}{For timevarying covariates the variable must associate each record with the id of a subject.} \item{clusters}{cluster variable for computation of robust variances.} \item{n.sim}{number of simulations in resampling.} \item{residuals}{to returns residuals that can be used for model validation in the function cum.residuals. Estimated martingale increments (dM) and corresponding time vector (time). When rate.sim=1 returns estimated martingales, dM_i(t) and if rate.sim=0, returns a matrix of dN_i(t).} \item{robust}{to compute robust variances and construct processes for resampling. May be set to 0 to save memory and time, in particular for rate.sim=1.} \item{weighted.test}{to compute a variance weighted version of the test-processes used for testing time-varying effects.} \item{covariance}{to compute covariance estimates for nonparametric terms rather than just the variances.} \item{resample.iid}{to return i.i.d. representation for nonparametric and parametric terms. based on counting process or martingale resduals (rate.sim).} \item{weights}{weights for weighted analysis.} \item{rate.sim}{rate.sim=1 such that resampling of residuals is based on estimated martingales and thus valid in rate case, rate.sim=0 means that resampling is based on counting processes and thus only valid in intensity case.} \item{beta.fixed}{option for computing score process for fixed relative risk parameter} \item{max.clust}{sets the total number of i.i.d. terms in i.i.d. decompostition. This can limit the amount of memory used by coarsening the clusters. When NULL then all clusters are used. Default is 1000 to save memory and time.} \item{exact.deriv}{if 1 then uses exact derivative in last iteration, if 2 then uses exact derivate for all iterations, and if 0 then uses approximation for all computations and there may be a small bias in the variance estimates. For Cox model always exact and all options give same results.} \item{silent}{if 1 then opppresses some output.} \item{max.timepoint.sim}{considers only this resolution on the time scale for simulations, see time.sim.resolution argument} \item{basesim}{1 to get simulations for cumulative baseline, including tests for contant effects.} \item{offsets}{offsets for analysis on log-scale. RR=exp(offsets+ x beta).} \item{strata}{future option for making strata in a different day than through X design in cox-aalen model (~-1+factor(strata)).} \item{propodds}{if 1 will fit the proportional odds model. Slightly less efficient than prop.odds() function but much quicker, for large data this also works.} \item{caseweight}{these weights have length equal to number of jump times, and are multiplied all jump times dN. Useful for getting the program to fit for example the proportional odds model or frailty models.} } \value{ returns an object of type "cox.aalen". With the following arguments: \item{cum}{cumulative timevarying regression coefficient estimates are computed within the estimation interval. } \item{var.cum}{the martingale based pointwise variance estimates. } \item{robvar.cum}{robust pointwise variances estimates. } \item{gamma}{estimate of parametric components of model. } \item{var.gamma}{variance for gamma sandwhich estimator based on optional variation estimator of score and 2nd derivative.} \item{robvar.gamma}{robust variance for gamma. } \item{residuals}{list with residuals.} \item{obs.testBeq0}{observed absolute value of supremum of cumulative components scaled with the variance.} \item{pval.testBeq0}{p-value for covariate effects based on supremum test.} \item{sim.testBeq0}{resampled supremum values.} \item{obs.testBeqC}{observed absolute value of supremum of difference between observed cumulative process and estimate under null of constant effect.} \item{pval.testBeqC}{p-value based on resampling.} \item{sim.testBeqC}{resampled supremum values.} \item{obs.testBeqC.is}{observed integrated squared differences between observed cumulative and estimate under null of constant effect.} \item{pval.testBeqC.is}{p-value based on resampling.} \item{sim.testBeqC.is}{resampled supremum values.} \item{conf.band}{resampling based constant to construct robust 95\% uniform confidence bands. } \item{test.procBeqC}{observed test-process of difference between observed cumulative process and estimate under null of constant effect over time. } \item{sim.test.procBeqC}{list of 50 random realizations of test-processes under null based on resampling.} \item{covariance}{covariances for nonparametric terms of model.} \item{B.iid}{Resample processes for nonparametric terms of model.} \item{gamma.iid}{Resample processes for parametric terms of model.} \item{loglike}{approximate log-likelihood for model, similar to Cox's partial likelihood. Only computed when robust=1.} \item{D2linv}{inverse of the derivative of the score function.} \item{score}{value of score for final estimates.} \item{test.procProp}{observed score process for proportional part of model.} \item{var.score}{variance of score process (optional variation estimator for beta.fixed=1 and robust estimator otherwise).} \item{pval.Prop}{p-value based on resampling.} \item{sim.supProp}{re-sampled absolute supremum values.} \item{sim.test.procProp}{list of 50 random realizations of test-processes for proportionality under the model based on resampling.} } \description{ Fits an Cox-Aalen survival model. Time dependent variables and counting process data (multiple events per subject) are possible. } \details{ \deqn{ \lambda_{i}(t) = Y_i(t) ( X_{i}^T(t) \alpha(t) ) \exp(Z_{i}^T \beta ) } The model thus contains the Cox's regression model as special case. To fit a stratified Cox model it is important to parametrize the baseline apppropriately (see example below). Resampling is used for computing p-values for tests of time-varying effects. Test for proportionality is considered by considering the score processes for the proportional effects of model. The modelling formula uses the standard survival modelling given in the \bold{survival} package. The data for a subject is presented as multiple rows or 'observations', each of which applies to an interval of observation (start, stop]. For counting process data with the )start,stop] notation is used, the 'id' variable is needed to identify the records for each subject. The program assumes that there are no ties, and if such are present random noise is added to break the ties. } \examples{ library(timereg) data(sTRACE) # Fits Cox model out<-cox.aalen(Surv(time,status==9)~prop(age)+prop(sex)+ prop(vf)+prop(chf)+prop(diabetes),data=sTRACE) # makes Lin, Wei, Ying test for proportionality summary(out) par(mfrow=c(2,3)) plot(out,score=1) # Fits stratified Cox model out<-cox.aalen(Surv(time,status==9)~-1+factor(vf)+ prop(age)+prop(sex)+ prop(chf)+prop(diabetes),data=sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(1,2)); plot(out); # Same model, but needs to invert the entire marix for the aalen part: X(t) out<-cox.aalen(Surv(time,status==9)~factor(vf)+ prop(age)+prop(sex)+ prop(chf)+prop(diabetes),data=sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(1,2)); plot(out); # Fits Cox-Aalen model out<-cox.aalen(Surv(time,status==9)~prop(age)+prop(sex)+ vf+chf+prop(diabetes),data=sTRACE,max.time=7,n.sim=100) summary(out) par(mfrow=c(2,3)) plot(out) } \references{ Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/man/bmt.Rd0000644000176200001440000000133413075643763013551 0ustar liggesusers\name{bmt} \alias{bmt} \non_function{} \title{The Bone Marrow Transplant Data} \description{ Bone marrow transplant data with 408 rows and 5 columns. } \format{ The data has 408 rows and 5 columns. \describe{ \item{cause}{a numeric vector code. Survival status. 1: dead from treatment related causes, 2: relapse , 0: censored.} \item{time}{ a numeric vector. Survival time. } \item{platelet}{a numeric vector code. Plalelet 1: more than 100 x \eqn{10^9} per L, 0: less.} \item{tcell}{a numeric vector. T-cell depleted BMT 1:yes, 0:no.} \item{age}{a numeric vector code. Age of patient, scaled and centered ((age-35)/15).} } } \source{ Simulated data } \references{ NN } \examples{ data(bmt) names(bmt) } \keyword{datasets} timereg/man/diabetes.Rd0000644000176200001440000000203113075643763014542 0ustar liggesusers\name{diabetes} \alias{diabetes} \non_function{} \title{The Diabetic Retinopathy Data} \description{ The data was colleceted to test a laser treatment for delaying blindness in patients with dibetic retinopathy. The subset of 197 patiens given in Huster et al. (1989) is used. } \format{ This data frame contains the following columns: \describe{ \item{id}{a numeric vector. Patient code.} \item{agedx}{a numeric vector. Age of patient at diagnosis.} \item{time}{a numeric vector. Survival time: time to blindness or censoring.} \item{status}{ a numeric vector code. Survival status. 1: blindness, 0: censored.} \item{trteye}{a numeric vector code. Random eye selected for treatment. 1: left eye 2: right eye.} \item{treat}{a numeric vector. 1: treatment 0: untreated.} \item{adult}{a numeric vector code. 1: younger than 20, 2: older than 20.} } } \source{ Huster W.J. and Brookmeyer, R. and Self. S. (1989) MOdelling paired survival data with covariates, Biometrics 45, 145-56. } \examples{ data(diabetes) names(diabetes) } \keyword{datasets} timereg/man/krylow.pls.Rd0000644000176200001440000000247713075643763015124 0ustar liggesusers% Generated by roxygen2: do not edit by hand % Please edit documentation in R/krylow.pls.r \name{krylow.pls} \alias{krylow.pls} \title{Fits Krylow based PLS for additive hazards model} \usage{ krylow.pls(D, d, dim = 1) } \arguments{ \item{D}{defined above} \item{d}{defined above} \item{dim}{number of pls dimensions} } \value{ returns a list with the following arguments: \item{beta}{PLS regression coefficients} } \description{ Fits the PLS estimator for the additive risk model based on the least squares fitting criterion } \details{ \deqn{ L(\beta,D,d) = \beta^T D \beta - 2 \beta^T d } where \eqn{D=\int Z H Z dt} and \eqn{d=\int Z H dN}. } \examples{ ## makes data for pbc complete case data(mypbc) pbc<-mypbc pbc$time<-pbc$time+runif(418)*0.1; pbc$time<-pbc$time/365 pbc<-subset(pbc,complete.cases(pbc)); covs<-as.matrix(pbc[,-c(1:3,6)]) covs<-cbind(covs[,c(1:6,16)],log(covs[,7:15])) ## computes the matrices needed for the least squares ## criterion out<-aalen(Surv(time,status>=1)~const(covs),pbc,robust=0,n.sim=0) S=out$intZHZ; s=out$intZHdN; out<-krylow.pls(S,s,dim=2) } \references{ Martinussen and Scheike, The Aalen additive hazards model with high-dimensional regressors, submitted. Martinussen and Scheike, Dynamic Regression Models for Survival Data, Springer (2006). } \author{ Thomas Scheike } \keyword{survival} timereg/DESCRIPTION0000644000176200001440000000200213520020332013374 0ustar liggesusersPackage: timereg Title: Flexible Regression Models for Survival Data Version: 1.9.4 Date: 2019-07-29 Author: Thomas Scheike with contributions from Torben Martinussen, Jeremy Silver and Klaus Holst Maintainer: Thomas Scheike Description: Programs for Martinussen and Scheike (2006), `Dynamic Regression Models for Survival Data', Springer Verlag. Plus more recent developments. Additive survival model, semiparametric proportional odds model, fast cumulative residuals, excess risk models and more. Flexible competing risks regression including GOF-tests. Two-stage frailty modelling. PLS for the additive risk model. Lasso in the 'ahaz' package. LazyLoad: yes URL: https://github.com/scheike/timereg.git Depends: R (>= 2.15), survival Imports: lava, numDeriv, stats, graphics, grDevices, utils, methods Suggests: mets, License: GPL (>= 2) RoxygenNote: 6.1.1 NeedsCompilation: yes Packaged: 2019-07-30 09:30:05 UTC; bhd252 Repository: CRAN Date/Publication: 2019-07-30 10:50:02 UTC timereg/src/0000755000176200001440000000000013520007035012470 5ustar liggesuserstimereg/src/comprisk.c0000644000176200001440000010203713520007035014466 0ustar liggesusers//#include #include #include "matrix.h" void itfit(times,Ntimes,x,censcode,cause,KMc,z,n,px,Nit,betaS, score,hess,est,var,sim,antsim,rani,test,testOBS,Ut,simUt,weighted, gamma,vargamma,semi,zsem,pg,trans,gamma2,CA,line,detail,biid,gamiid,resample, timepow,clusters,antclust,timepowtest,silent,convc,weights,entry,trunkp,estimator,fixgamma,stratum,ordertime, conservative,ssf,KMtimes,gamscore,Dscore,monotone) double *times,*betaS,*x,*KMc,*z,*score,*hess,*est,*var,*test,*testOBS, *Ut,*simUt,*gamma,*zsem,*gamma2,*biid,*gamiid,*vargamma,*timepow, *timepowtest,*convc,*weights,*entry,*trunkp,*ssf,*KMtimes,*gamscore,*Dscore; int *n,*px,*Ntimes,*Nit,*cause,*censcode,*sim,*antsim,*rani,*weighted, *semi,*pg,*trans,*CA,*line,*detail,*resample,*clusters,*antclust,*silent,*estimator, *fixgamma,*stratum,*ordertime,*conservative,*monotone; { // {{ // {{{ allocation and reading of data from R matrix *wX,*X,*cX,*A,*AI,*cumAt[*antclust],*VAR,*Z,*censX; vector *VdB,*risk,*SCORE,*W,*Y,*Gc,*CAUSE,*bhat,*pbhat,*beta,*xi,*censXv, *rr,*rowX,*difbeta,*qs,*bhatub,*betaub,*dcovs,*pcovs,*zi,*rowZ,*zgam,*vcumentry; vector *cumhatA[*antclust],*cumA[*antclust],*bet1,*gam,*dp,*dp1,*dp2; int left=0,clusterj,osilent,convt=1,ps,sing,c,i,j,k,l,s,it,convproblems=0; double step,prede,varp=0.5,nrisk,time,sumscore,totrisk, *vcudif=calloc((*Ntimes)*(*px+1),sizeof(double)), *cifentry=calloc((*n),sizeof(double)), *cumentry=calloc((*n)*(*px+1),sizeof(double)); float gasdev(),expdev(),ran1(); ps=(*px); // }}} step=ssf[0]; if (*semi==0) { osilent=silent[0]; silent[0]=0; malloc_mats(*n,*px,&wX,&X,&cX,&censX,NULL); if (*trans==2) {malloc_mat(*n,*pg,Z);malloc_vecs(*pg,&zgam,&gam,&zi,&rowZ,NULL);} malloc_mats(ps,ps,&A,&AI,&VAR,NULL); malloc_vecs(*n,&rr,&bhatub,&risk,&W,&Y,&Gc,&CAUSE,&bhat,&pbhat,NULL); malloc_vecs(*px,&vcumentry,&bet1,&xi,&rowX,&censXv,NULL); malloc_vecs(ps,&dp,&dp1,&dp2,&dcovs,&pcovs,&betaub,&VdB,&qs,&SCORE,&beta,&difbeta,NULL); for (i=0;i<*antclust;i++) { malloc_vec(ps,cumhatA[i]); malloc_vec(ps,cumA[i]); malloc_mat(*Ntimes,ps,cumAt[i]); } for (c=0;c=time); totrisk=totrisk+VE(risk,j); extract_row(X,j,xi); if (it==0 && (s==0)) { scl_vec_mult(pow(weights[j],0.5),xi,rowX); replace_row(wX,j,rowX); } VE(bhat,j)=vec_prod(xi,bet1); if (*trans==1) { // {{{ VE(pbhat,j)=1-exp(-VE(bhat,j)); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult(1-VE(pbhat,j),xi,dp); } if (*trans==2) { VE(pbhat,j)=1-exp(-exp(VE(bhat,j))); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult((1-VE(pbhat,j))*exp(VE(bhat,j)),xi,dp); } if (*trans==6) { VE(pbhat,j)=1-exp(-VE(bhat,j)); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult((1-VE(pbhat,j)),xi,dp); } if (*trans==3) { VE(pbhat,j)=exp(VE(bhat,j))/(1+exp(VE(bhat,j))); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult(exp(VE(bhat,j))/pow((1+exp(VE(bhat,j))),2),xi,dp); } if (*trans==7) { VE(pbhat,j)=VE(bhat,j)/(1+VE(bhat,j)); scl_vec_mult(1/pow((1+VE(bhat,j)),2),xi,dp); } if (*trans==4) { VE(pbhat,j)=exp(VE(bhat,j)); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult(exp(VE(bhat,j)),xi,dp); } if (*trans==5) { VE(pbhat,j)=VE(bhat,j); varp=VE(pbhat,j)*(1-VE(pbhat,j)); scl_vec_mult(1,xi,dp); } // }}} scl_vec_mult(1,dp,dp1); if (*estimator<=2) scl_vec_mult(pow(weights[j],0.5),dp,dp); else scl_vec_mult(pow(weights[j],0.5)*(timeentry[j]),dp,dp); replace_row(cX,j,dp); // printf(" %d \n",cause[j]); printf(" %d \n",abs(cause[j])); VE(Y,j)=((x[j]<=time) & (abs(cause[j])==*CA))*1; if (cause[j]<0) VE(Y,j)=-1*VE(Y,j); if (it==(*Nit-1) && (*conservative==0)) { // {{{ for censoring distrubution if (*monotone==1) scl_vec_mult(1,xi,dp1); if (KMc[j]>0.001) scl_vec_mult(weights[j]*VE(Y,j)/KMc[j],dp1,dp1); else scl_vec_mult(weights[j]*VE(Y,j)/0.001,dp1,dp1); vec_add(censXv,dp1,censXv); replace_row(censX,j,dp1); } // }}} if (*estimator==1) { if (KMc[j]<0.001) VE(Y,j)=((VE(Y,j)/0.001)-VE(pbhat,j)); else VE(Y,j)=( (VE(Y,j)/KMc[j])-VE(pbhat,j))*(time>entry[j]); } else if (*estimator==2) // truncation, but not implemented { if (KMc[j]<0.001) VE(Y,j)=(1/0.001)*(VE(Y,j)-VE(pbhat,j)); else VE(Y,j)=(1/KMc[j])*(VE(Y,j)-VE(pbhat,j)/trunkp[j]); } else if (*estimator==3) { VE(Y,j)=(VE(Y,j)-VE(pbhat,j))*(timeentry[j]);; } else if (*estimator==4) { if (KMc[j]<0.001) VE(Y,j)=((VE(Y,j)/0.001)-VE(pbhat,j)); else VE(Y,j)=( (VE(Y,j)/KMc[j])-VE(pbhat,j)); if (varp>0.001) VE(Y,j)=VE(Y,j)/varp; else VE(Y,j)=VE(Y,j)/0.001; } VE(Y,j)=pow(weights[j],0.5)*VE(Y,j); prede=(VE(Y,j)); if (it==(*Nit-1)) ssf[0]+=pow(prede,2); } // j=0;j0.5) && (it==(*Nit-1))) ) { Rprintf("missing values in SCORE or lacking convergence %ld \n",(long int) s); convproblems=1; convt=0; silent[s]=2; it=*Nit-1; for (c=0;c0) convc[0]=1; if (*semi==0) { free_mats(&wX,&censX,&VAR,&X,&cX,&A,&AI,NULL); if (*trans==2) {free_mats(&Z,NULL); free_vecs(&zgam,&gam,&zi,&rowZ,NULL);} free_vecs(&censXv,&rr,&bhatub,&risk,&W,&Y,&Gc,&CAUSE,&bhat,&pbhat,NULL); free_vecs(&vcumentry,&bet1,&xi,&rowX,NULL); free_vecs(&dp,&dp1,&dp2,&dcovs,&pcovs,&betaub,&VdB,&qs,&SCORE,&beta,&difbeta,NULL); for (i=0;i<*antclust;i++) {free_vec(cumhatA[i]); free_vec(cumA[i]); free_mat(cumAt[i]);} } free(vcudif); free(cumentry); free(cifentry); } // }}} void itfitsemi(times,Ntimes,x,censcode,cause, KMc,z,antpers,px,Nit, score,hess,est,var,sim, antsim,rani,test,testOBS,Ut, simUt,weighted,gamma,vargamma,semi, zsem,pg,trans,gamma2,CA, line,detail,biid,gamiid,resample, timepow,clusters,antclust,timepowtest,silent,convc,weights,entry,trunkp, estimator,fixgamma,stratum,ordertime,conservative,ssf,KMtimes, gamscore,Dscore,monotone) double *times,*x,*KMc,*z,*score,*hess,*est,*var,*test,*testOBS,*Ut,*simUt,*gamma,*zsem, *vargamma,*gamma2,*biid,*gamiid,*timepow,*timepowtest,*entry,*trunkp,*convc,*weights,*ssf,*KMtimes,*gamscore,*Dscore; int *antpers,*px,*Ntimes,*Nit,*cause,*censcode,*sim,*antsim,*rani,*weighted,*monotone, *semi,*pg,*trans,*CA,*line,*detail,*resample,*clusters,*antclust,*silent,*estimator,*fixgamma,*stratum,*ordertime,*conservative; { // {{{ // {{{ allocation and reading of data from R matrix *ldesignX,*A,*AI,*cdesignX,*ldesignG,*cdesignG,*censX,*censZ; matrix *wX,*wZ; matrix *S,*dCGam,*CGam,*ICGam,*VarKorG,*dC,*XZ,*ZZ,*ZZI,*XZAI; matrix *Ct,*C[*Ntimes],*Acorb[*Ntimes],*tmpM2,*tmpM3,*tmpM4; matrix *Vargam,*dVargam,*M1M2[*Ntimes],*Delta,*dM1M2,*M1M2t,*RobVargam; matrix *W3t[*antclust],*W4t[*antclust]; // matrix *W3tcens[*antclust],*W4tcens[*antclust]; vector *W2[*antclust],*W3[*antclust]; // vector *W2cens[*antclust],*W3cens[*antclust]; vector *dB,*dN,*VdB,*AIXdN,*AIXlamt,*bhatt,*truncbhatt,*pbhat,*plamt,*ciftrunk; vector *korG,*pghat,*rowG,*gam,*dgam,*ZGdN,*IZGdN,*ZGlamt,*IZGlamt,*censZv,*censXv; vector *qs,*Y,*rr,*bhatub,*xi,*xit,*zit,*rowX,*rowZ,*difX,*zi,*z1,*tmpv1,*tmpv2,*lrisk; vector *dpx,*dpz,*dpx1,*dpz1; int sing,itt,i,j,k,l,s,c,pmax,totrisk,convproblems=0,nagam=0, *n= calloc(1,sizeof(int)), *nx= calloc(1,sizeof(int)), *px1= calloc(1,sizeof(int)); int left=0,clusterj,fixedcov,osilent,*strict=calloc(2,sizeof(int)),*indexleft=calloc((*antpers),sizeof(int)); double svarp=1,varp=0.5,nrisk,time,dummy,dtime,phattrunc,bhattrunc=0,lrr,lrrt; double *vcudif=calloc((*Ntimes)*(*px+1),sizeof(double)), *inc=calloc((*Ntimes)*(*px+1),sizeof(double)), *weightt=calloc((*Ntimes),sizeof(double)), *cifentry=calloc((*antpers),sizeof(double)), *cumentry=calloc((*antpers)*(*px+1),sizeof(double)); osilent=silent[0]; silent[0]=0; strict[0]=1; // float gasdev(),expdev(),ran1(); // robust[0]=1; fixedcov=1; n[0]=antpers[0]; nx[0]=antpers[0]; double step=ssf[0]; //if (*trans==1) for (j=0;j<*pg;j++) if (fabs(timepow[j]-1)>0.0001) {timem=1;break;} //if (*trans==2) for (j=0;j<*pg;j++) if (fabs(timepow[j])>0.0001) {timem=1;break;} for (j=0;j<*antclust;j++) { malloc_mat(*Ntimes,*px,W3t[j]); malloc_mat(*Ntimes,*px,W4t[j]); malloc_vec(*pg,W2[j]); malloc_vec(*px,W3[j]); } for (j=0;j<*Ntimes;j++) { malloc_mat(*pg,*px,Acorb[j]); malloc_mat(*px,*pg,C[j]); malloc_mat(*px,*pg,M1M2[j]); } malloc_mats(*antpers,*px,&wX,&censX,&ldesignX,&cdesignX,NULL); malloc_mats(*antpers,*pg,&wZ,&censZ,&ldesignG,&cdesignG,NULL); malloc_mats(*px,*px,&A,&AI,NULL); malloc_mats(*pg,*pg,&dVargam,&Vargam,&RobVargam,&tmpM2,&ZZ,&VarKorG,&ICGam,&CGam,&dCGam,&S,&ZZI,NULL); malloc_mats(*px,*pg,&XZAI,&tmpM3,&Ct,&dC,&XZ,&dM1M2,&M1M2t,NULL); malloc_mat(*px,*pg,tmpM4); malloc_mat(*Ntimes,*px,Delta); malloc_vecs(*px,&dpx1,&dpx,&censXv, &xit, &xi, &rowX, &difX, &tmpv1, &korG, &dB, &VdB, &AIXdN, &AIXlamt, &truncbhatt,&bhatt,NULL); malloc_vecs(*pg,&dpz1,&dpz,&censZv, &zit, &zi, &rowZ, &tmpv2,&z1,&rowG,&gam,&dgam, &ZGdN,&IZGdN,&ZGlamt,&IZGlamt,NULL); malloc_vecs(*antpers,&Y,&bhatub,&rr,&lrisk,&dN,&pbhat,&pghat,&plamt,&ciftrunk,NULL); malloc_vec((*px)+(*pg),qs); for (s=0;s<*Ntimes;s++) weightt[s]=1; if (*px>=*pg) pmax=*px; else pmax=*pg; // starting values for (j=0;j<*pg;j++) VE(gam,j)=gamma[j]; px1[0]=*px+1; for (c=0;c<*antpers;c++) if (trunkp[c]<1) {left=1; break;} for(j=0;j<*antpers;j++) for(i=0;i<=*px;i++) cumentry[i*(*antpers)+j]=0; // }}} if (fixedcov==1) // {{{ for (c=0;c<*antpers;c++) for(j=0;j=time); totrisk=totrisk+VE(lrisk,j); extract_row(ldesignX,j,xi); extract_row(ldesignG,j,zi); if (*estimator==2 && *monotone==1) { for(k=0;k0)) { for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*n)+j]; for (l=0;l<*pg;l++) { VE(zit,l)=pow(entry[j],timepow[l])*VE(zi,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } bhattrunc=vec_prod(xi,truncbhatt); phattrunc=1-exp(-exp(bhattrunc)*exp(lrrt)); } else phattrunc=0; for (l=0;l<*pg;l++) VE(zi,l)=pow(time,timepow[l])*VE(zi,l); if ((entry[j]>0)) { scl_vec_mult((1-phattrunc)*exp(bhattrunc)*exp(lrrt)/trunkp[j],xi,xit); scl_vec_mult((1-phattrunc)*exp(bhattrunc)*exp(lrrt)/trunkp[j],zit,zit); } scl_vec_mult((1-VE(plamt,j))*exp(VE(pbhat,j))*VE(rr,j)/trunkp[j],xi,dpx); scl_vec_mult((1-VE(plamt,j))*exp(VE(pbhat,j))*VE(rr,j)/trunkp[j],zi,dpz); if (entry[j]<1) { vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } } // }}} if (*trans==6) { // FG-parametrization model="fg" // {{{ for (l=0;l<*pg;l++) { lrr=lrr+VE(gam,l)*VE(zi,l)*pow(time,timepow[l]); VE(zi,l)= pow(time,timepow[l])*VE(zi,l); } VE(rr,j)=exp(lrr); VE(plamt,j)=1-exp(-VE(pbhat,j)*VE(rr,j)); varp=VE(plamt,j)*(1-VE(plamt,j)); scl_vec_mult((1-VE(plamt,j))*VE(rr,j),xi,dpx); scl_vec_mult((1-VE(plamt,j))*VE(pbhat,j)*VE(rr,j),zi,dpz); if ((entry[j]>0)) { // {{{ for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*n)+j]; extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for (l=0;l<*pg;l++) { VE(zit,l)=pow(entry[j],timepow[l])*VE(zit,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } bhattrunc=vec_prod(xit,truncbhatt); phattrunc=1-exp(-bhattrunc*exp(lrrt)); if (*monotone==0) { scl_vec_mult((1-phattrunc)*exp(lrrt),xit,xit); scl_vec_mult((1-phattrunc)*bhattrunc*exp(lrrt),zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); } VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } // }}} } // }}} if (*trans==3) { // logistic // {{{ for (l=0;l<*pg;l++) { lrr=lrr+VE(gam,l)*VE(zi,l)*pow(time,timepow[l]); VE(zi,l)= pow(time,timepow[l])*VE(zi,l); } VE(rr,j)=exp(lrr); VE(plamt,j)=exp(VE(pbhat,j)+lrr)/(1+exp(VE(pbhat,j)+lrr)); varp=VE(plamt,j)*(1-VE(plamt,j)); dummy=VE(plamt,j)/(1+exp(VE(pbhat,j)+lrr)); scl_vec_mult(dummy,xi,dpx); scl_vec_mult(dummy,zi,dpz); if ((trunkp[j]<1)) { extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*antpers)+j]; bhattrunc=vec_prod(xit,truncbhatt); for (l=0;l<*pg;l++) { VE(zit,l)=pow(entry[j],timepow[l])*VE(zit,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } phattrunc= exp(bhattrunc+lrrt)/(1+exp(bhattrunc+lrrt)); dummy= phattrunc/(1+exp(bhattrunc+lrrt)); scl_vec_mult(dummy,xit,xit); scl_vec_mult(dummy,zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } } // }}} if (*trans==7) { // logistic, baseline direct parametrization // {{{ for (l=0;l<*pg;l++) { VE(zi,l)= pow(time,timepow[l])*VE(zi,l); lrr=lrr+VE(gam,l)*VE(zi,l); } VE(rr,j)=exp(lrr); VE(plamt,j)=VE(pbhat,j)*exp(lrr)/(1+VE(pbhat,j)*exp(lrr)); varp=VE(plamt,j)*(1-VE(plamt,j)); dummy=exp(lrr)/pow(1+VE(pbhat,j)*exp(lrr),2); scl_vec_mult(dummy,xi,dpx); scl_vec_mult(VE(pbhat,j)*dummy,zi,dpz); if ((trunkp[j]<1)) { extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*antpers)+j]; bhattrunc=vec_prod(xit,truncbhatt); for (l=0;l<*pg;l++) { VE(zit,l)=pow(entry[j],timepow[l])*VE(zit,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } phattrunc= bhattrunc*exp(lrrt)/(1+bhattrunc*exp(lrrt)); dummy=exp(lrrt)/pow(1+bhattrunc*exp(lrrt),2); scl_vec_mult(dummy,xit,xit); scl_vec_mult(bhattrunc*dummy,zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } } // }}} if (*trans==4) { // relative risk // {{{ for (l=0;l<*pg;l++) { VE(zi,l)= pow(time,timepow[l])*VE(zi,l); lrr=lrr+VE(gam,l)*VE(zi,l); } VE(rr,j)=lrr; VE(plamt,j)=exp(VE(pbhat,j)+lrr); varp=VE(plamt,j)*(1-VE(plamt,j)); scl_vec_mult(VE(plamt,j),xi,dpx); scl_vec_mult(VE(plamt,j),zi,dpz); if ((trunkp[j]<1)) { /*{{{*/ extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*antpers)+j]; for (l=0;l<*pg;l++) { VE(zit,l)=pow(entry[j],timepow[l])*VE(zi,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } phattrunc= exp(vec_prod(xit,truncbhatt)+exp(lrrt)); scl_vec_mult(phattrunc,xit,xit); scl_vec_mult(phattrunc,zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } /*}}}*/ } // }}} if (*trans==5) { // relative risk, param 2 // {{{ for (l=0;l<*pg;l++) { VE(zi,l)= pow(time,timepow[l])*VE(zi,l); lrr=lrr+VE(gam,l)*VE(zi,l); // *pow(time,timepow[l]); } VE(rr,j)=lrr; VE(plamt,j)=VE(pbhat,j)*exp(lrr); varp=VE(plamt,j)*(1-VE(plamt,j)); scl_vec_mult(exp(lrr),xi,dpx); scl_vec_mult(VE(plamt,j),zi,dpz); if ((trunkp[j]<1)) { /*{{{*/ extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*antpers)+j]; for (l=0;l<*pg;l++) { VE(zit,l)= pow(entry[j],timepow[l])*VE(zit,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } phattrunc= vec_prod(xit,truncbhatt)*exp(exp(lrrt)); scl_vec_mult(exp(lrrt),xit,xit); scl_vec_mult(phattrunc,zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; }/*}}}*/ } // }}} if (*trans==8) { // log-relative risk, // {{{ for (l=0;l<*pg;l++) { VE(zi,l)= pow(time,timepow[l])*VE(zi,l); lrr=lrr+VE(gam,l)*VE(zi,l); // *pow(time,timepow[l]); } VE(rr,j)=lrr; VE(plamt,j)=VE(pbhat,j)*exp(exp(lrr)); varp=VE(plamt,j)*(1-VE(plamt,j)); scl_vec_mult(exp(exp(lrr)),xi,xi); scl_vec_mult(VE(plamt,j)*exp(lrr),zi,zi); if ((trunkp[j]<1)) { extract_row(ldesignG,j,zit); extract_row(ldesignX,j,xit); for(i=1;i<=*px;i++) VE(truncbhatt,i-1)=cumentry[i*(*antpers)+j]; for (l=0;l<*pg;l++) { VE(zit,l)= pow(entry[j],timepow[l])*VE(zit,l); lrrt=lrrt+VE(gam,l)*VE(zit,l); } phattrunc= vec_prod(xit,truncbhatt)*exp(exp(lrrt)); scl_vec_mult(exp(exp(lrrt)),xit,xit); scl_vec_mult(phattrunc*exp(lrr),zit,zit); vec_subtr(dpx,xit,dpx); vec_subtr(dpz,zit,dpz); scl_vec_mult(1/trunkp[j],dpx,dpx); scl_vec_mult(1/trunkp[j],dpz,dpz); VE(plamt,j)=(VE(plamt,j)-phattrunc)/trunkp[j]; } // }}} } // }}} scl_vec_mult(1,dpx,dpx1); scl_vec_mult(1,dpz,dpz1); VE(Y,j)=((x[j]<=time) & (cause[j]==*CA))*1; if ((itt==(*Nit-1)) && (*conservative==0)) { // {{{ for censoring distribution correction if (*monotone==1) { scl_vec_mult(1,xi,dpx1); scl_vec_mult(1,zi,dpz1); } if (KMc[j]>0.001) scl_vec_mult(weights[j]*VE(Y,j)/KMc[j],dpx1,rowX); else scl_vec_mult(weights[j]*VE(Y,j)/0.001,dpx1,rowX); vec_add(censXv,rowX,censXv); replace_row(censX,j,rowX); if (KMc[j]>0.001) scl_vec_mult(weights[j]*VE(Y,j)/KMc[j],dpz1,rowZ); else scl_vec_mult(weights[j]*VE(Y,j)/0.001,dpz1,rowZ); vec_add(censZv,rowZ,censZv); replace_row(censZ,j,rowZ); } // }}} if (*estimator==4) { if (varp>0.01 && itt>2) svarp=1/pow(varp,0.5); else svarp=1/pow(0.01,0.5); } if (*estimator==1) scl_vec_mult(pow(weights[j],0.5)*(time>entry[j]),dpx,dpx); else scl_vec_mult(pow(weights[j]*KMtimes[s]/KMc[j],0.5)*(time>entry[j]),dpx,dpx); if (*estimator==1) scl_vec_mult(pow(weights[j],0.5)*(time>entry[j]),dpz,dpz); else scl_vec_mult(pow(weights[j]*KMtimes[s]/KMc[j],0.5)*(time>entry[j]),dpz,dpz); replace_row(cdesignX,j,dpx); replace_row(cdesignG,j,dpz); if (*estimator==1 ) { if (KMc[j]<0.001) VE(Y,j)=((VE(Y,j)/0.001)-VE(plamt,j))*(time>entry[j]); else VE(Y,j)=((VE(Y,j)/KMc[j])-VE(plamt,j))*(time>entry[j]); } else if (*estimator==2) VE(Y,j)=(VE(Y,j)-VE(plamt,j)); else if (*estimator==5) if (x[j]0.5 && (itt==(*Nit-2))) silent[s]=2; // lacking convergence for this time if (itt==(*Nit-1)) // {{{ for (i=0;i<*antpers;i++) { // vec_zeros(tmpv1); vec_zeros(z1); j=clusters[i]; if (*monotone==0) extract_row(cdesignX,i,xi); if (*monotone==1) extract_row(wX,i,xi); scl_vec_mult(VE(Y,i),xi,xi); Mv(AI,xi,rowX); for (l=0;l<*px;l++) ME(W3t[j],s,l)+=VE(rowX,l); if (*fixgamma==0) { // {{{ if (*monotone==0) extract_row(cdesignG,i,zi); if (*monotone==1) extract_row(wZ,i,zi); scl_vec_mult(VE(Y,i),zi,zi); vM(C[s],rowX,tmpv2); vec_subtr(zi,tmpv2,rowZ); vec_add(rowZ,W2[j],W2[j]); } // }}} if (*conservative==0) { // {{{ censoring terms k=ordertime[i]; nrisk=(*antpers)-i; clusterj=clusters[k]; if (cause[k]==(*censcode)) { Mv(AI,censXv,rowX); for (l=0;l<*px;l++) ME(W3t[clusterj],s,l)+=VE(rowX,l)/nrisk; if (*fixgamma==0) { vM(C[s],rowX,tmpv2); vec_subtr(censZv,tmpv2,rowZ); // scl_vec_mult(dtime,rowZ,rowZ); for (l=0;l<*pg;l++) VE(W2[clusterj],l)+=VE(rowZ,l)/nrisk; } for (j=i;j<*antpers;j++) { clusterj=clusters[ordertime[j]]; for (l=0;l<*px;l++) ME(W3t[clusterj],s,l)-=VE(rowX,l)/pow(nrisk,2); if (*fixgamma==0) { for (l=0;l<*pg;l++) VE(W2[clusterj],l)-=VE(rowZ,l)/pow(nrisk,2); } } } // fewer where I(s <= T_i) , because s is increasing extract_row(censX,k,xi); vec_subtr(censXv,xi,censXv); extract_row(censZ,k,zi); vec_subtr(censZv,zi,censZv); } // conservative==0 }}} } // if (itt==(*Nit-1)) for (i=0;i<*antpers;i++) // }}} } // sing=0 if (*detail==1) { Rprintf("it %d, timepoint s %d, Estimate beta \n",itt,s); print_vec(bhatt); Rprintf("Information -D^2 l\n"); print_mat(AI); } } /* s=1,...Ntimes */ dummy=0; if (*fixgamma==0) { for (k=0;k<*pg;k++) dummy=dummy+fabs(VE(dgam,k)); invertS(CGam,ICGam,osilent); Mv(ICGam,IZGdN,dgam); if (isnan(vec_sum(dgam))) { if (convproblems==1) convproblems=3; else convproblems=2; if (osilent==0) print_mat(ICGam); if (osilent==0 && (nagam==0)) Rprintf("Missing values in gamma increment, omitted \n"); vec_zeros(dgam); nagam=1; } if (itt<(*Nit-1)) { scl_vec_mult(step,dgam,dgam); vec_add(gam,dgam,gam); } } // do not update estimates for last itteration if (itt<(*Nit-1)) for (s=0;s<*Ntimes;s++) { if (*fixgamma==0) vM(Acorb[s],dgam,korG); est[s]=times[s]; var[s]=times[s]; for (k=1;k<=*px;k++) { est[k*(*Ntimes)+s]=est[k*(*Ntimes)+s]+inc[k*(*Ntimes)+s]-VE(korG,k-1); dummy=dummy+fabs(inc[k*(*Ntimes)+s]-VE(korG,k-1)); } } /* s=1,...Ntimes */ if (dummy<*convc && itt<*Nit-2) itt=*Nit-2; if (*detail==1) { Rprintf(" iteration %d %d \n",itt,*Nit); Rprintf("Total sum of squares %lf \n",ssf[0]); Rprintf("Total sum of changes %lf \n",dummy); Rprintf("Gamma parameters \n"); print_vec(gam); Rprintf("Change in Gamma \n"); print_vec(dgam); Rprintf("===========================================================\n"); } // score for gamma part of model if (itt==(*Nit-1)) for (k=0;k<*pg;k++) gamscore[k]= VE(dgam,k); } /*itt lkke */ // }}} //head_matrix(cdesignX); head_matrix(wX); R_CheckUserInterrupt(); /* ROBUST VARIANCES */ vec_zeros(rowX); for (s=0;s<*Ntimes;s++) { // {{{ robust variances vec_zeros(VdB); for (i=0;i<*antclust;i++) { if (*fixgamma==0) { Mv(ICGam,W2[i],tmpv2); vM(Acorb[s],tmpv2,rowX); } extract_row(W3t[i],s,tmpv1); vec_subtr(tmpv1,rowX,difX); replace_row(W4t[i],s,difX); vec_star(difX,difX,tmpv1); vec_add(tmpv1,VdB,VdB); if (*resample==1) { if ((s==0) & (*fixgamma==0)) for (c=0;c<*pg;c++) gamiid[c*(*antclust)+i]=gamiid[c*(*antclust)+i]+VE(tmpv2,c); for (c=0;c<*px;c++) {l=i*(*px)+c; biid[l*(*Ntimes)+s]=biid[l*(*Ntimes)+s]+VE(difX,c);} } if (*fixgamma==0) if (s==0) { for (j=0;j<*pg;j++) for (k=0;k<*pg;k++) ME(RobVargam,j,k)=ME(RobVargam,j,k)+VE(tmpv2,j)*VE(tmpv2,k);} } /* for (i=0;i<*antclust;i++) */ for (k=1;k<*px+1;k++) var[k*(*Ntimes)+s]=VE(VdB,k-1); } /* s=0..Ntimes*/ // }}} /* MxA(RobVargam,ICGam,tmpM2); MxA(ICGam,tmpM2,RobVargam);*/ /* print_mat(RobVargam); */ if (*fixgamma==0) { for (j=0;j<*pg;j++) {gamma[j]=VE(gam,j); for (k=0;k<*pg;k++) { vargamma[k*(*pg)+j]=ME(RobVargam,j,k); Dscore[k*(*pg)+j]=ME(ICGam,j,k); } } } if (convproblems>=1) convc[0]=convproblems; R_CheckUserInterrupt(); if (*sim==1) { comptestfunc(times,Ntimes,px,est,var,vcudif,antsim,test,testOBS,Ut,simUt,W4t,weighted,antclust,gamma2,line,timepowtest); } // {{{ freeing free_mats(&wX,&wZ,&censX,&censZ,&ldesignX,&A,&AI,&cdesignX,&ldesignG,&cdesignG, &S,&dCGam,&CGam,&ICGam,&VarKorG,&dC,&XZ,&ZZ,&ZZI,&XZAI, &Ct,&tmpM2,&tmpM3,&tmpM4,&Vargam,&dVargam, &Delta,&dM1M2,&M1M2t,&RobVargam,NULL); free_vecs(&dpx1,&dpz1,&dpx,&dpz,&censXv,&censZv,&qs,&Y,&rr,&bhatub,&dB,&dN,&VdB,&AIXdN,&AIXlamt, &bhatt,&pbhat,&plamt,&korG,&pghat,&rowG,&gam,&dgam,&ZGdN,&IZGdN, &ZGlamt,&IZGlamt,&xit,&xi,&rowX,&rowZ,&difX,&zit,&zi,&z1,&tmpv1,&tmpv2,&lrisk,&ciftrunk,&truncbhatt, NULL); for (j=0;j<*Ntimes;j++) {free_mat(Acorb[j]);free_mat(C[j]);free_mat(M1M2[j]);} for (j=0;j<*antclust;j++) {free_mat(W3t[j]); free_mat(W4t[j]); free_vec(W2[j]); free_vec(W3[j]); } free(n); free(nx); free(px1); free(strict); free(indexleft); free(vcudif); free(inc); free(weightt); free(cifentry); free(cumentry); // }}} } // }}} double mypow(double x,double p) { double val,log(),exp(); val=exp(log(x)*p); return(val); } timereg/src/cox-aalen-stratum.c0000644000176200001440000007251113520007035016206 0ustar liggesusers//#include #include #include "matrix.h" #include #include void scorestratum(times,Ntimes,designX,nx,px,designG,pg,antpers,start,stop, betaS,Nit,cu,vcu,w,mw,loglike,Iinv,Vbeta,detail,offs,mof,sim,antsim, rani,Rvcu,RVbeta, test,testOBS,Ut,simUt,Uit,XligZ,aalen,nb,id,status,wscore,ridge,ratesim, score,dhatMit,gammaiid,dmgiid, retur,robust,covariance,Vcovs,addresamp,addproc, resample,gamiid,biid,clusters,antclust,vscore,betafixed,weights,entry,exactderiv, timegroup,maxtimepoint,stratum,silent,strata) double *designX,*designG,*times,*betaS,*start,*stop,*cu,*w,*loglike,*Vbeta,*RVbeta,*vcu,*offs,*Rvcu,*Iinv,*test,*testOBS,*Ut,*simUt,*Uit,*aalen,*ridge,*score,*dhatMit,*gammaiid,*dmgiid,*Vcovs,*addproc,*gamiid,*biid,*vscore,*weights; int*covariance,*nx,*px,*pg,*antpers,*Ntimes,*mw,*Nit,*detail,*mof,*sim,*antsim,*rani,*XligZ,*nb,*id,*status,*wscore,*ratesim,*retur,*robust,*addresamp,*resample,*clusters,*antclust,*betafixed,*entry,*exactderiv,*timegroup,*maxtimepoint,*stratum,*silent,*strata; { int timing=0; clock_t c0,c1; c0=clock(); // {{{ setting up memory matrix *X,*Z,*WX,*WZ,*cdesX,*cdesX2,*cdesX3,*CtVUCt,*A,*AI; matrix *Vcov,*dYI,*Ct,*dM1M2,*M1M2t,*COV,*ZX,*ZP,*ZPX; matrix *tmp2,*tmp3,*dS,*S1,*SI,*S2,*M1,*VU,*ZXAI,*VUI; matrix *RobVbeta,*Delta,*tmpM1,*Utt,*Delta2,*tmpM2; // matrix *St[*maxtimepoint],*M1M2[*Ntimes],*C[*maxtimepoint],*ZXAIs[*Ntimes],*dYIt[*Ntimes]; // matrix *St[*Ntimes], // matrix *M1M2[*Ntimes],*C[*Ntimes],*ZXAIs[*Ntimes],*AIs[*Ntimes]; matrix *Stg[*maxtimepoint],*Cg[*maxtimepoint]; matrix *W3t[*antclust],*W4t[*antclust],*W2t[*antclust],*Uti[*antclust]; matrix *ZPX1,*ZPZ1,*ZPXo,*ZPZo; matrix *W2var[*antclust]; matrix *ZPZ[(*px)*(*stratum)+1]; vector *dA,*VdA,*MdA,*delta,*zav,*lamt,*lamtt; vector *xi,*zi,*U,*beta,*xtilde,*Gbeta,*zcol,*one,*difzzav; vector *offset,*weight,*varUthat[*maxtimepoint],*Uprofile; // vector *ZXdA[*Ntimes]; vector *ta,*ahatt,*vrisk,*tmpv1,*tmpv2,*rowX,*rowZ,*difX,*VdB; vector *W2[*antclust],*W3[*antclust],*reszpbeta,*res1dim; matrix *dAt; vector *Ui[*antclust]; int stratas=0,cin=0,ci=0,c,pers=0,i=0,j,k,l,s,s1,it,count,pmax, *imin=calloc(1,sizeof(int)), *cluster=calloc(*antpers,sizeof(int)), *ipers=calloc(*Ntimes,sizeof(int)); double S0stratum,S0,RR=1,time=0,ll,lle,llo; double tau,hati,random,scale,sumscore; double *cug=calloc((*maxtimepoint)*(*px+1),sizeof(double)), *timesg=calloc((*maxtimepoint),sizeof(double)); double norm_rand(); void GetRNGstate(),PutRNGstate(); /* float gasdev(),expdev(),ran1(); */ GetRNGstate(); /* to use R random normals */ if (*robust==1) { for (j=0;j<*antclust;j++) { if (*sim>=2) { malloc_mat(*maxtimepoint,*px,W4t[j]); malloc_mat(*maxtimepoint,*px,W3t[j]); } malloc_mat(*maxtimepoint,*pg,W2t[j]); malloc_mat(*maxtimepoint,*pg,Uti[j]); malloc_mat(*pg,*pg,W2var[j]); malloc_vec(*pg,Ui[j]); if (*sim>=2) malloc_vec(*px,W3[j]); } for(j=0;j<*maxtimepoint;j++) malloc_vec(*pg,varUthat[j]); } for (j=0;j<*antclust;j++) malloc_vec(*pg,W2[j]); for (c=0;c<*nx;c++) cluster[id[c]]=clusters[c]; if (*sim>=1) { if (*sim>=2) { malloc_mat(*maxtimepoint,*px,Delta); malloc_mat(*maxtimepoint,*px,tmpM1); } malloc_mat(*maxtimepoint,*pg,Delta2); malloc_mat(*maxtimepoint,*pg,tmpM2); } malloc_mat(*maxtimepoint,*pg,Utt); malloc_mats(*antpers,*px,&WX,&X,&cdesX,&cdesX2,&cdesX3,NULL); malloc_mats(*antpers,*pg,&WZ,&ZP,&Z,NULL); malloc_mats(*px,*px,&Vcov,&COV,&A,&AI,&M1,&CtVUCt,NULL); malloc_mats(*pg,*pg,&RobVbeta,&tmp2,&dS,&S1,&S2,&SI,&VU,&VUI,NULL); for (c=0;c<(*px)*(*stratum)+1;c++) malloc_mat(*pg,*pg,ZPZ[c]); malloc_mats(*pg,*px,&ZXAI,&ZX,&dM1M2,&M1M2t,NULL); malloc_mats(*px,*pg,&tmp3,&ZPX,&dYI,&Ct,NULL); malloc_mats(*px,*pg,&ZPX1,NULL); malloc_mats(*pg,*pg,&ZPZ1,NULL); malloc_mats(*px,*pg,&ZPXo,NULL); malloc_mats(*pg,*pg,&ZPZo,NULL); malloc_mat(*Ntimes,*px,dAt); malloc_vec(1,reszpbeta); malloc_vec(1,res1dim); malloc_vecs(*antpers,&weight,&lamtt,&lamt,&zcol,&Gbeta,&one,&offset,NULL); malloc_vecs(*px,&ahatt,&tmpv1,&difX,&VdB,&rowX,&xi,&dA,&VdA,&MdA,NULL); malloc_vecs(*px,&xtilde,NULL); malloc_vecs(*pg,&tmpv2,&rowZ,&zi,&U,&beta,&delta,&zav,&difzzav,&Uprofile,NULL); malloc_vec(*nb,ta); malloc_vec(*antpers,vrisk); for(j=0;j<*maxtimepoint;j++) { malloc_mat(*px,*pg,Cg[j]); malloc_mat(*pg,*pg,Stg[j]);} matrix *Cn,*M1M2n,*ZXAIn,*AIn; malloc_mat((*px)*(*Ntimes),*pg,Cn); malloc_mat((*px)*(*Ntimes),*px,AIn); malloc_mat(*pg,(*px)*(*Ntimes),M1M2n); malloc_mat(*pg,(*px)*(*Ntimes),ZXAIn); // for(j=0;j<*Ntimes;j++) { // malloc_mat(*px,*pg,C[j]); // malloc_mat(*pg,*px,M1M2[j]); // malloc_mat(*pg,*px,ZXAIs[j]); //// malloc_vec(*px,dAt[j]); malloc_mat(*px,*pg,dYIt[j]); //// malloc_vec(*pg,ZXdA[j]); malloc_mat(*pg,*pg,St[j]); // } pmax=max(*px,*pg); ll=0; for(j=0;j<*pg;j++) VE(beta,j)=betaS[j]; for(j=0;j<*antpers;j++) {VE(weight,j)=1; VE(offset,j)=1;} // }}} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: setting up allocation %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); cu[0]=times[0]; for (it=0;it<*Nit || (*Nit==0 && it==0);it++) // {{{ iterations start for cox-aalen model { if (it>0) { vec_zeros(U); mat_zeros(S1); mat_zeros(A); if (*stratum==1) for(j=0;j<*px;j++) mat_zeros(ZPZ[j]); else mat_zeros(ZPZ[0]); mat_zeros(ZPX); mat_zeros(ZX); mat_zeros(X); mat_zeros(Z); mat_zeros(WX); mat_zeros(WZ); } sumscore=0; S0=0; ci=0; R_CheckUserInterrupt(); for (s=1;s<*Ntimes;s++) // {{{ going through time { time=times[s]; // vec_zeros(lamt); // {{{ reading design and computing matrix products if (s==1) { // {{{ for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { if (( (start[c]=time)) ) { for(j=0;j=0) ) ci=ci-1; } // }}} vec_zeros(rowX); vec_zeros(rowZ); if (s>1) // {{{ modifying design for next time points while ((stop[ci]=0) ) { VE(Gbeta,id[ci])=0; // vec_prod(zi,beta); for(j=0;j<*px;j++) VE(xi,j)=designX[j*(*nx)+ci]; for(j=0;j<*pg;j++) { VE(zi,j)=designG[j*(*nx)+ci]; VE(Gbeta,id[ci])+=VE(zi,j)*VE(beta,j); } RR=exp(VE(Gbeta,id[ci])); if (entry[ci]==1) { replace_row(X,id[ci],xi); replace_row(Z,id[ci],zi); scl_vec_mult(RR*weights[ci],xi,tmpv1);replace_row(WX,id[ci],tmpv1); scl_vec_mult(weights[ci],zi,tmpv2);replace_row(WZ,id[ci],tmpv2); VE(weight,id[ci])=weights[ci]; if (*mof==1) VE(offset,id[ci])=offs[ci]; } else { replace_row(X,id[ci],rowX);replace_row(Z,id[ci],rowZ); replace_row(WX,id[ci],rowX); replace_row(WZ,id[ci],rowZ); VE(Gbeta,id[ci])=0; VE(weight,id[ci])=0; if (*mof==1) VE(offset,id[ci])=offs[ci]; } S0+=entry[ci]*RR*weights[ci]; for(j=0;j=3) { Rprintf("___________ s=%d jump.time=%lf jump.person=%d \n",s,time,pers); // Rprintf("Z matrix\n"); // print_mat(Z); // Rprintf("X matrix (at risk)\n"); // print_mat(X); Rprintf("ZPZ matrix, pers %d %d (at risk)\n",pers,stratas); print_mat(ZPZ[stratas]); if (s>0) { print_mat(A); print_mat(ZX); print_mat(A); } } // }}} if (*stratum==0) invertS(A,AI,*silent); if (ME(AI,0,0)==0 && *stratum==0 && *silent==0) { Rprintf("additive design X'X not invertible at time (number, value): %d %lf \n",s,time); print_mat(A); } if (ME(AI,0,0)==0 && *stratum==0 && *silent==2) { Rprintf("additive design X'X not invertible at time (number, value) : %d %lf \n",s,time); print_mat(A); Rprintf("print only first time with non-invertible design X'X\n"); silent[0]=0; } if (*stratum==1) { // for (k=0;k<*px;k++) // if (fabs(ME(A,k,k))<0.000001) ME(AI,k,k)=0; else ME(AI,k,k)=1/ME(A,k,k); for (k=0;k<*px;k++) ME(AI,k,k)=0; // mat_zeros(AI); if (*px>1) { k=stratas; ME(AI,k,k) = 1/ME(A,k,k); } else ME(AI,0,0) = 1/ME(A,0,0); } scale=VE(weight,pers); if ((*stratum==1) && (*px>1) ) { S0stratum=ME(AI,stratas,stratas); scl_mat_mult(scale*S0stratum,ZPZ[stratas],ZPZo); scl_mat_mult(scale*S0stratum,ZPX,ZPXo); // printf(" %d %lf \n",strata[pers],S0stratum); // print_mat(AI); // print_mat(ZPX); // print_mat(ZPZ[strata[pers]]); // printf(" %lf %lf \n",1/S0,S0stratum); } else { scl_mat_mult(scale/S0,ZPZ[0],ZPZo); scl_mat_mult(scale/S0,ZPX,ZPXo); } extract_row(X,pers,xi); scl_vec_mult(scale,xi,xi); Mv(AI,xi,dA); MxA(ZX,AI,ZXAI); if (it==(*Nit-1)) { replace_row(dAt,s,dA); for (j=0;j<*pg;j++) for (i=0;i<*px;i++) ME(ZXAIn,j,(s-1)*(*px)+i)=ME(ZXAI,j,i); } if (s<0) { Rprintf(" test mester %d %lf %d %d \n",s,time,pers,stratas); print_vec(xi); print_mat(AI); print_vec(dA); } /* First derivative U and Second derivative S */ extract_row(Z,pers,zi); // scl_vec_mult(scale,zi,zi); Mv(ZX, dA, zav); vec_subtr(zi,zav,difzzav); scl_vec_mult(scale,difzzav,difzzav); vec_add(difzzav,U,U); if (*betafixed==0) { MxA(ZXAI,ZPXo,tmp2); mat_subtr(ZPZo,tmp2, dS); mat_add(dS,S1,S1); scl_mat_mult(1,S1,Stg[timegroup[s]]); } if (s<0) { // {{{ Rprintf(" %d %d %lf %lf \n",pers,s,time,scale); print_vec(xi); print_vec(dA); print_vec(zi); print_vec(zav); print_vec(difzzav); print_vec(U); print_mat(A); print_mat(AI); } // }}} if (*betafixed==0 && *stratum==0) // {{{ computes second derivative for general cox-aalen model if ( (((*exactderiv==1) && (it==(*Nit-1)) && (*px>1))) || ((*exactderiv==2) && (*px>1)) ) { mat_zeros(ZPZ1); mat_zeros(ZPX1); for (i=0;i<*antpers;i++) { extract_row(WX,i,xi); VE(lamt,i)=vec_prod(xi,dA); extract_row(Z,i,zi); scl_vec_mult(VE(lamt,i),zi,rowZ); replace_row(ZP,i,rowZ); extract_row(X,i,xi); for(j=0;j=1) { Rprintf("=============Iteration %d =============== \n",it); Rprintf("Estimate beta \n"); print_vec(beta); Rprintf("delta beta \n"); print_vec(delta); Rprintf("Score D l\n"); print_vec(U); Rprintf("Information -D^2 l\n"); print_mat(SI); }; if (*betafixed==0 && (*Nit>0)) vec_add(beta,delta,beta); for (k=0;k<*pg;k++) sumscore=sumscore+fabs(VE(U,k)); if ((sumscore<0.0000001) & (it<(*Nit)-2)) { it=*Nit-2; } } /* it */ // }}} //scl_mat_mult( (double) 1/(*antclust),SI,SI); if (*detail>=2) Rprintf("Fitting done \n"); if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: fitting done %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); // vec_zeros(Gbeta); lle=0; llo=0; ci=0; for (k=0;k<*pg;k++) score[k]=VE(U,k); mat_zeros(A); mat_zeros(ZX); mat_zeros(X); mat_zeros(Z); mat_zeros(WX); mat_zeros(WZ); for (s=1;s<*Ntimes;s++) { // {{{ terms for robust variances time=times[s]; cu[s]=times[s]; vcu[s]=times[s]; Rvcu[timegroup[s]]=times[s]; cug[timegroup[s]]=times[s]; timesg[timegroup[s]]=times[s]; Ut[timegroup[s]]=times[s]; R_CheckUserInterrupt(); if (*robust==1) { sumscore=0; S0=0; // {{{ reading design and computing matrix products if (s==1) { // {{{ for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { if (( (start[c]=time)) ) { for(j=0;j=0) ) ci=ci-1; } // }}} vec_zeros(rowX); vec_zeros(rowZ); if (s>1) // {{{ modifying design for next time points while ((stop[ci]=0) ) { VE(Gbeta,id[ci])=0; // vec_prod(zi,beta); for(j=0;j<*px;j++) VE(xi,j)=designX[j*(*nx)+ci]; for(j=0;j<*pg;j++) { VE(zi,j)=designG[j*(*nx)+ci]; VE(Gbeta,id[ci])+=VE(zi,j)*VE(beta,j); } RR=exp(VE(Gbeta,id[ci])); if (entry[ci]==1) { replace_row(X,id[ci],xi); replace_row(Z,id[ci],zi); scl_vec_mult(RR*weights[ci],xi,tmpv1);replace_row(WX,id[ci],tmpv1); scl_vec_mult(weights[ci],zi,tmpv2);replace_row(WZ,id[ci],tmpv2); VE(weight,id[ci])=weights[ci]; if (*mof==1) VE(offset,id[ci])=offs[ci]; } else { replace_row(X,id[ci],rowX);replace_row(Z,id[ci],rowZ); replace_row(WX,id[ci],rowX); replace_row(WZ,id[ci],rowZ); VE(Gbeta,id[ci])=0; VE(weight,id[ci])=0; if (*mof==1) VE(offset,id[ci])=offs[ci]; } S0+=entry[ci]*RR*weights[ci]; // for(j=0;j=2) { extract_row(WX,pers,xi); extract_row(dAt,s,dA); hati=vec_prod(xi,dA); lle=lle+log(hati); } } /* terms for robust variance */ if (*robust==1) { for (j=0;j<*pg;j++) for (i=0;i<*px;i++) ME(ZXAI,j,i)=ME(ZXAIn,j,(s-1)*(*px)+i); for (j=0;j<*px;j++) for (i=0;i<*px;i++) ME(AI,j,i)=ME(AIn,(s-1)*(*px)+j,i); if (*ratesim==1 || *retur>=1) for (i=0;i<*antpers;i++) // {{{ { cin=cluster[i]; extract_row(WX,i,rowX); extract_row(X,i,xi); extract_row(Z,i,zi); hati=vec_prod(rowX,dA); if (*ratesim==1) { // Rprintf("%d %d %d %d %lf \n",s,i,ipers[s],pers,hati); Mv(ZXAI,xi,tmpv2); vec_subtr(zi,tmpv2,tmpv2); scl_vec_mult(VE(weight,i),tmpv2,tmpv2); if (i==pers) vec_add(tmpv2,W2[cin],W2[cin]); if (*ratesim==1) {scl_vec_mult(hati,tmpv2,rowZ); vec_subtr(W2[cin],rowZ,W2[cin]); } if (*sim>=2) { Mv(AI,xi,rowX); scl_vec_mult(VE(weight,i),rowX,rowX); if (i==pers) {vec_add(rowX,W3[cin],W3[cin]); } llo=llo+hati; if (*ratesim==1) {scl_vec_mult(hati,rowX,rowX); vec_subtr(W3[cin],rowX,W3[cin]);} } } if (*retur==1) dhatMit[i*(*Ntimes)+s]=1*(i==pers)-hati; if (*retur==2) dhatMit[i]=dhatMit[i]+1*(i==pers)-hati; } /* i 1.. antpers */ // }}} } if ((*ratesim==0) && (*robust==1)) { // {{{ compute resampling counting process LWY style version cin=cluster[pers]; if (*sim>=2) { extract_row(WX,pers,rowX); extract_row(X,pers,xi); } extract_row(Z,pers,zi); // hati=vec_prod(rowX,dA); // if (*detail==2) Rprintf(" %d %d \n",cin,pers); Mv(ZXAI,xi,tmpv2); vec_subtr(zi,tmpv2,tmpv2); scl_vec_mult(VE(weight,pers),tmpv2,tmpv2); // squaring to deal with counting process'es // should use cholesky square of variance matrix // for (k=0;k<*pg;k++) VE(tmpv2,k)=pow(VE(tmpv2,k),2); vec_add(tmpv2,W2[cin],W2[cin]); if (*sim>=2) { Mv(AI,xi,rowX); scl_vec_mult(VE(weight,pers),rowX,rowX); vec_add(rowX,W3[cin],W3[cin]); } for (s1=timegroup[s];s1<*maxtimepoint;s1++) { // for (k=0;k<*pg;k++) VE(tmpv2,k)=sqrt(VE(W2[cin],k)); replace_row(W2t[cin],s1,tmpv2); if (*sim>=2) replace_row(W3t[cin],s1,W3[cin]); } } // }}} if (*robust==1 && *ratesim==1) for (j=0;j<*antclust;j++) { replace_row(W2t[j],timegroup[s],W2[j]); if (*sim>=2) replace_row(W3t[j],timegroup[s],W3[j]); } /* MG baseret varians beregning */ for (j=0;j<*pg;j++) for (i=0;i<*px;i++) { ME(M1M2t,j,i)=ME(M1M2n,j,(s-1)*(*px)+i); ME(Ct,i,j)= ME(Cn,(s-1)*(*px)+i,j); } MxA(Ct,VU,tmp3); MAt(tmp3,Ct,CtVUCt); MxA(Ct,SI,tmp3); // printf(" %d %d %d %d \n",0,(s-1)*(*px),*pg,s*(*px)); // print_mat(M1M2t); // print_mat(M1M2n); // print_mat(M1M2t); // mat_subsec(M1M2n,0,(s-1)*(*px),*pg,s*(*px),M1M2t); // MxA(tmp3,M1M2[s],COV); // print_mat(COV); MxA(tmp3,M1M2t,COV); for (k=1;k<=*px;k++) { if (*betafixed==0) vcu[k*(*Ntimes)+s]+=ME(CtVUCt,k-1,k-1) +2*ME(COV,k-1,k-1); // else vcu[k*(*Ntimes)+s]=vcu[k*(*Ntimes)+s]; } for (k=1;k<=*pg;k++) Ut[k*(*maxtimepoint)+timegroup[s]]=ME(Utt,timegroup[s],k-1); } // }}} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: robust variance terms 1 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} if (*detail>=2) Rprintf("Robust variances 1 \n"); R_CheckUserInterrupt(); ll=lle-llo; /* likelihood beregnes */ if (*detail==1) Rprintf("loglike is %lf \n",ll); if ((*robust==1)) // {{{ robust variances { for (s=1;s<*maxtimepoint;s++) { vec_zeros(VdB); mat_zeros(Vcov); for (j=0;j<*antclust;j++) { // {{{ if (s==1 && *detail==3) { Rprintf("================================================= %d \n",j); print_mat(W2t[j]); print_vec(W2[j]); print_mat(Stg[s]); print_mat(S1); print_mat(SI); } // counting process style simulation // if (s==1) // if (*ratesim==0) for (k=0;k<*pg;k++) VE(W2[j],k)=sqrt(VE(W2[j],k)); Mv(SI,W2[j],tmpv2); if (*sim>=2) { Mv(Cg[s],tmpv2,rowX); extract_row(W3t[j],s,tmpv1); vec_add(tmpv1,rowX,difX); if (*betafixed==1) scl_vec_mult(1,tmpv1,difX); replace_row(W4t[j],s,difX); vec_star(difX,difX,tmpv1); vec_add(tmpv1,VdB,VdB); } if (s==1) if (*betafixed==0) { // for (c=0;c<*pg;c++) gamiid[c*(*antclust)+j]=gamiid[c*(*antclust)+j]+VE(tmpv2,c); for (c=0;c<*pg;c++) gamiid[c*(*antclust)+j]=VE(tmpv2,c); } if (*resample==1) { for (c=0;c<*px;c++) {l=j*(*px)+c; // biid[l*(*maxtimepoint)+s]=biid[l*(*maxtimepoint)+s]+VE(difX,c); biid[l*(*maxtimepoint)+s]=VE(difX,c); } } if (*covariance==1 && (*sim>=2)) { for (k=0;k<*px;k++) for (c=0;c<*px;c++) ME(Vcov,k,c)=ME(Vcov,k,c)+VE(difX,k)*VE(difX,c); } Mv(Stg[s],tmpv2,rowZ); extract_row(W2t[j],s,tmpv2); if (*betafixed==0) { vec_subtr(tmpv2,rowZ,zi); replace_row(Uti[j],s,zi); } else replace_row(Uti[j],s,tmpv2); vec_star(zi,zi,tmpv2); vec_add(tmpv2,varUthat[s],varUthat[s]); } // }}} /* j in clusters */ if (*betafixed==0) for (i=0;i<*pg;i++) vscore[(i+1)*(*maxtimepoint)+s]=VE(varUthat[s],i); if (*sim>=2) // {{{ for (k=1;k<*px+1;k++) { Rvcu[k*(*maxtimepoint)+s]=VE(VdB,k-1); if (*covariance==1) { for (j=0;j<*px;j++) { l=(k-1)*(*px)+j; Vcovs[l*(*maxtimepoint)+s]=ME(Vcov,k-1,j); } } } // }}} } /* s=1 ..maxtimepoints */ } /* if robust==1 */ // }}} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: variance terms 2 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} if (*detail>=2) Rprintf("Robust variances 2 \n"); if (*betafixed==0) for (j=0;j<*antclust;j++) { Mv(SI,W2[j],tmpv2); for (c=0;c<*pg;c++) for (k=0;k<*pg;k++) ME(RobVbeta,c,k)=ME(RobVbeta,c,k)+VE(W2[j],c)*VE(W2[j],k); for (k=0;k<*pg;k++) gammaiid[j*(*pg)+k]=VE(tmpv2,k); } MxA(RobVbeta,SI,tmp2); MxA(SI,tmp2,RobVbeta); if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: variance terms 3 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); for(j=0;j<*pg;j++) { betaS[j]= VE(beta,j); loglike[0]=lle; loglike[1]=ll; for (k=0;k<*pg;k++){ Iinv[k*(*pg)+j]=ME(SI,j,k); Vbeta[k*(*pg)+j]=-ME(VU,j,k); RVbeta[k*(*pg)+j]=-ME(RobVbeta,j,k); } } if (timing==2) { // {{{ c1=clock(); printf ("\telapsed CPU time: variance terms 4 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} // for(j=0;j<*antclust;j++) print_mat(Uti[j]); if (*detail>=2) Rprintf("simulations starts \n"); if (*sim>=1) { // {{{ score process simulations // Rprintf("Simulations start N= %ld \n",(long int) *antsim); tau=times[*Ntimes-1]-times[0]; if (*sim>=2) for (i=1;i<=*px;i++) VE(rowX,i-1)=cug[i*(*maxtimepoint)+(*maxtimepoint-1)]; for (s=1;s<*maxtimepoint;s++) { // {{{ /* Beregning af OBS teststrrelser */ time=timesg[s]-times[0]; // FIX if (*sim>=2) { for (i=1;i<=*px;i++) { VE(xi,i-1)=fabs(cug[i*(*maxtimepoint)+s])/sqrt(Rvcu[i*(*maxtimepoint)+s]); if (VE(xi,i-1)>testOBS[i-1]) testOBS[i-1]=VE(xi,i-1); } scl_vec_mult(time/tau,rowX,difX); for (i=1;i<=*px;i++) VE(xi,i-1)=cug[i*(*maxtimepoint)+s]; vec_subtr(xi,difX,difX); for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>testOBS[l]) testOBS[l]=VE(difX,i); } } if (*wscore>=1) { /* sup beregnes i R */ if ((s>*wscore) && (s<*maxtimepoint-*wscore)) {extract_row(Utt,s,rowZ); for (i=0;i<*pg;i++) VE(rowZ,i) = VE(rowZ,i)/sqrt(VE(varUthat[s],i)); replace_row(Utt,s,rowZ); /* scaled score process */ } else {vec_zeros(rowZ); replace_row(Utt,s,rowZ);} } for (k=1;k<=*pg;k++) Ut[k*(*maxtimepoint)+s]=ME(Utt,s,k-1); } // }}} *s=1..maxtimepoint Beregning af obs teststrrelser if (*detail>=2) Rprintf("simulations starts for real %d \n",*sim); for (k=1;k<=*antsim;k++) { R_CheckUserInterrupt(); if (*sim>=2) mat_zeros(Delta); mat_zeros(Delta2); vec_zeros(tmpv1); for (i=0;i<*antclust;i++) { /* random=gasdev(&idum); */ random=norm_rand(); if (*sim>=2) { scl_mat_mult(random,W4t[i],tmpM1); mat_add(tmpM1,Delta,Delta); } scl_mat_mult(random,Uti[i],tmpM2); mat_add(tmpM2,Delta2,Delta2); } if (*sim>=2) extract_row(Delta,*maxtimepoint-1,tmpv1); for (s=1;s<*maxtimepoint;s++) { time=timesg[s]-times[0]; if (*sim>=2) { // {{{ scl_vec_mult(time/tau,tmpv1,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); if (*addresamp==1) { if (k<51) { for (i=0;i<*px;i++) {l=(k-1)*(*px)+i; addproc[l*(*maxtimepoint)+s]=ME(Delta,s,i);}} } for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>test[l*(*antsim)+k-1]) test[l*(*antsim)+k-1]=VE(difX,i); VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(Rvcu[(i+1)*(*maxtimepoint)+s]); if (VE(xi,i)>test[i*((*antsim))+k-1]) test[i*((*antsim))+k-1]=VE(xi,i); } } // }}} if (*wscore>=1) { extract_row(Delta2,s,zi); if ((s>*wscore) && (s<*maxtimepoint-*wscore)) { for (i=0;i<*pg;i++) {VE(zi,i)=fabs(ME(Delta2,s,i))/sqrt(VE(varUthat[s],i)); if (VE(zi,i)>simUt[i*(*antsim)+k-1]) simUt[i*(*antsim)+k-1]=VE(zi,i); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*maxtimepoint)+s]=ME(Delta2,s,i)/sqrt(VE(varUthat[s],i));}} } } /* weigted score */ else { extract_row(Delta2,s,zi); for (i=0;i<*pg;i++) { if (fabs(VE(zi,i))>simUt[i*(*antsim)+k-1]) simUt[i*(*antsim)+k-1]=fabs(VE(zi,i)); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*maxtimepoint)+s]=ME(Delta2,s,i);} } } /* else wscore=0 */ } /* s=1..Ntims */ } /* k=1..antsim */ } /* sim==1 */ // }}} if (timing==2) { // {{{ c1=clock(); printf ("\telapsed CPU time: before freeing %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} PutRNGstate(); /* to use R random normals */ // {{{ freeing if (*sim>=2) free_mats(&Delta,&tmpM1,NULL); if (*sim==1) free_mats(&Delta2,&tmpM2,NULL); free_mats(&Cn,&M1M2n,&ZXAIn,&AIn,NULL); free_mats(&dAt,&Utt,&WX,&X,&cdesX,&cdesX2,&cdesX3, &WZ,&ZP,&Z, &Vcov,&COV,&A,&AI,&M1,&CtVUCt, &RobVbeta,&tmp2,&dS,&S1,&S2,&SI,&VU,&VUI, &ZXAI,&ZX,&dM1M2,&M1M2t, &tmp3,&ZPX,&dYI,&Ct, &ZPX1,&ZPZ1, &ZPXo,&ZPZo,NULL); free_vecs(&reszpbeta,&res1dim,&weight,&lamtt,&lamt,&zcol,&Gbeta,&one,&offset, &ahatt,&tmpv1,&difX,&VdB,&rowX,&xi,&dA,&VdA,&MdA, &xtilde, &tmpv2,&rowZ,&zi,&U,&beta,&delta,&zav,&difzzav,&Uprofile, &ta,&vrisk,NULL); if (*robust==1) { for (j=0;j<*antclust;j++) { if (*sim>=2) free_mat(W3t[j]); if (*sim>=2) free_mat(W4t[j]); if (*sim>=2) free_vec(W3[j]); free_mat(W2t[j]); free_mat(W2var[j]); free_mat(Uti[j]); free_vec(Ui[j]); } for (j=0;j<*maxtimepoint;j++) free_vec(varUthat[j]); } for (j=0;j<*antclust;j++) free_vec(W2[j]); // for (j=0;j<*Ntimes;j++) { //// free_mat(C[j]);free_mat(M1M2[j]); free_mat(ZXAIs[j]); //// free_vec(ZXdA[j]); //// free_mat(St[j]); // } for (c=0;c<(*px)*(*stratum)+1;c++) free_mat(ZPZ[c]); for(j=0;j<*maxtimepoint;j++) { free_mat(Cg[j]); free_mat(Stg[j]);} free(cluster); free(ipers); free(imin); free(cug); free(timesg); // }}} if (timing==2) { // {{{ c1=clock(); printf ("\telapsed CPU time: after freeing %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} } timereg/src/matrix.h0000644000176200001440000002206713520007035014154 0ustar liggesusers#include #include #include #include #include #include #include #include #include #include #include #include #include #define ME(matrix,row,col) (((matrix)->entries)[(col) * ((matrix)->nr) + (row)]) #define ME3(matrix3,dim1,row,col) (((matrix3)->entries)[(dim1)*(((matrix3)->nr)*((matrix3)->nc))+(col)*((matrix3)->nr)+(row)]) #define VE(vector,index) (((vector)->entries)[(index)]) #define oops(s) {error((s));} #define max(a,b) ( ((a) > (b)) ? (a) : (b) ) #define min(a,b) ( ((a) > (b)) ? (b) : (a) ) #define malloc_mat(NR, NC, M) { (M) = Calloc(1,matrix); ((M)->nr) = (NR); ((M)->nc) = (NC); ((M)->entries) = Calloc(((NR)*(NC)) , double);} #define malloc_mat3(DIM,NR, NC, M3) {(M3) = Calloc(1,matrix); ((M3)->dim)=(DIM); ((M3)->nr) = (NR); ((M3)->nc) = (NC); ((M3)->entries) = Calloc(((DIM)*(NR)*(NC)) , double);} #define malloc_vec(L, V) { (V) = Calloc(1,vector); ((V)->length) = (L); ((V)->entries) = Calloc((L), double);} typedef struct{ int dim; int nr; int nc; double *entries; } matrix3; typedef struct{ int nr; int nc; double *entries; } matrix; typedef struct{ int length; double *entries; } vector; typedef struct{ double timec; int callc; } counter; /* void malloc_mat(int *nrow, int *ncol, matrix *M); */ void free_mat3(matrix3 *M); void free_mat(matrix *M); /* void malloc_vec(int *length, vector *V); */ void free_vec(vector *V); int nrow_matrix(matrix *M); int ncol_matrix(matrix *M); int length_vector(vector *v); void print_a_matrix(matrix *M); extern void F77_SUB(dpotri)(const char* uplo, const int* n, double* a, const int* lda, int* info); extern void F77_SUB(dpotrf)(const char* uplo, const int* n, double* a, const int* lda, int* info); extern void F77_SUB(dgemm)(const char *transa, const char *transb, const int *m, \ const int *n, const int *k, const double *alpha,\ const double *a, const int *lda,\ const double *b, const int *ldb,\ const double *beta, double *c, const int *ldc); extern void F77_SUB(dgemv)(const char *trans, const int *m, const int *n, const double *alpha, const double *a, const int *lda, const double *x, const int *incx, const double *beta, double *y, const int *incy); extern void F77_SUB(dgetrf)(const int* m, const int* n, double* a, const int* lda, int* ipiv, int* info); extern void F77_SUB(dgetri)(const int* n, double* a, const int* lda, int* ipiv, double* work, const int* lwork, int* info); extern void F77_SUB(dqrdc2)(double *x, int *ldx, int *n, int *p, double *tol, int *rank, double *qraux, int *pivot, double *work); extern void F77_SUB(dtrco)(double*, int*, int*, double*, double*, int*); extern void F77_SUB(dgecon)(const char* norm, const int* n, const double* a, const int* lda, const double* anorm, double* rcond, double* work, int* iwork, int* info); extern double F77_NAME(dlange)(const char* norm, const int* m, const int* n, const double* a, const int* lda, double* work); void MtM(matrix *M, matrix *A); void cumsumM(matrix *M, matrix *Mout,int rev,int weighted,double *weights); void cumsumM1pM2(matrix *M1, matrix *M2,matrix *At[],int rev,int weighted,double *weights,int nindex, int *index); void cumsumMpM(matrix *M1,matrix *At[],int rev,int weighted,double *weights, int nindex,int *index); void invertSPD(matrix *A, matrix *AI); void invertSPDunsafe(matrix *A, matrix *AI); void Mv(matrix *M, vector *v1, vector *v2); void vM(matrix *M, vector *v1, vector *v2); vector *vec_star(vector *v1, vector *v2, vector *v3); double vec_sum(vector *v); double vec_prod(vector *v1,vector *v2); double vec_min(vector *v, int *imin); void mat_zeros(matrix *M); void vec_zeros(vector *v); void print_mat(matrix *M); void print_vec(vector *v); vector *extract_row(matrix *M, int row_to_get, vector *v); void replace_row(matrix *M, int row_to_set, vector *v); void vec_add(vector *v1, vector *v2, vector *v3); vector *scl_vec_mult(double scalar, vector *v1, vector *v2); matrix *scl_mat_mult(double scalar, matrix *m1, matrix *m2); matrix *mat_copy(matrix *m1, matrix *m2); vector *vec_copy(vector *v1, vector *v2); void mat_subsec(matrix *m1, int rowStart, int colStart, int rowStop, int colStop, matrix *m2); matrix *mat_transp(matrix *m1, matrix *m2); void vec_subtr(vector *v1, vector *v2, vector *v3); void mat_subtr(matrix *m1, matrix *m2, matrix *m3); void mat_add(matrix *m1, matrix *m2, matrix *m3); void vec_add_mult(vector *v1, vector *v2, double s, vector *v3); void MtA(matrix *M, matrix *A, matrix *Mout); void MAt(matrix *M, matrix *A, matrix *Mout); void invert(matrix *A, matrix *AI); void invertS(matrix *A, matrix *AI,int silent); void invertUnsafe(matrix *A, matrix *AI); void invertUnsafeS(matrix *A, matrix *AI,int silent); void cholesky(matrix *A, matrix *AI); void choleskyunsafe(matrix *A, matrix *AI); void MxA(matrix *M, matrix *A, matrix *Mout); void R_CheckUserInterrupt(void); void print_clock(clock_t *intime,int i); void update_clock(clock_t *intime, counter *C); void zcntr(counter *C); void print_counter(int i, counter *C); void head_matrix(matrix *M); void head_vector(vector *V); void identity_matrix(matrix *M); void malloc_mats(int nrow, int ncol, ...); void malloc_vecs(int length, ...); void free_mats(matrix **M, ...); void free_vecs(vector **V, ...); vector *vec_ones(vector *v); void replace_col(matrix *M, int col_to_set, vector *v); vector *extract_col(matrix *M, int col_to_get, vector *v); void Cpred(double *cum,int *nx,int *px,double *xval,int *nxval,double *pred,int *tminus); void sindex(int *index, double *jump, double *eval, int *njump, int *neval,int *strict); void nclusters(int *npers,int *clusters, int *nclust, int *mclust); void clusterindex(int *clusters, int *nclust,int *npers,int *idclust, int *clustsize, int *mednum,int *num,int *firstclustid); void clusterindexdata(int *clusters,int *nclust,int *npers,int *idclust,int *clustsize,int *mednum, int *num,double *data, int *p,double *nydata); void comptest(double *times,int *Ntimes,int *px,double *cu,double *vcu, double *vcudif,int *antsim,double *test,double *testOBS, double *Ut,double *simUt,matrix **W4t,int *weighted,int *antpers); double tukey(double x,double b); void smoothB(double *designX,int *nx,int *p,double *bhat,int *nb,double *b,int *degree,int *coef); void comptestfunc(double *times,int *Ntimes,int *px,double *cu,double *vcu, double *vcudif,int *antsim,double *test,double *testOBS, double *Ut,double *simUt,matrix **W4t,int *weighted,int *antpers, double *gamma,int *line,double *timepowtest); void itfitsemi(double *times,int *Ntimes,double *x,int *delta,int *cause,double *KMc, double *z,int *antpers,int *px,int *Nit,double *score,double *hess, double *est,double *var,int *sim,int *antsim,int *rani,double *test, double *testOBS,double *Ut,double *simUt,int *weighted, double *gamma, double *vargamma,int *semi,double *zsem,int *pg,int *trans,double *gamma2, int *CA,int *line,int *detail,double *biid,double *gamiid,int *resample, double *timepow,int *clusters,int *antclust,double *timepowtest,int *silent,double *convc, double *weight,double *entry,double *trunkp,int *estimator,int *fixgamma ,int *stratum, int *ordertime,int *robust,double *ssf,double *KMtimes, double *gamscore,double *Dscore,int *monotone); void bubble_sort(double *val,int *list,int n); void LevenbergMarquardt(matrix *S,matrix *SI,vector *U,vector *delta,double *lm,double *step); void readXt2(int *antpers,int *nx,int *p,double *designX, double *start,double *stop,int *status,int pers,matrix *X,double time); void readXt(int *antpers,int *nx,int *p,double *designX,double *start,double *stop, int *status,int pers,matrix *X,double time,int *clusters,int *cluster,int *id) ; void readXZt(int *antpers,int *nx,int *px,double *designX,int *pg,double *designG, double *start,double *stop,int *status,int pers,matrix *X, matrix *WX,matrix *Z,matrix *WZ,double time,int *clusters, int *cluster,int *ls,int stat,int l,int *id,int s,int medw); void readXZtsimple(int *antpers,int *nx,int *px,double *designX,int *pg,double *designG, double *start,double *stop,int *status,int pers,matrix *X, matrix *Z,double time, int s, int *id); void resmeansemi(double *times,int *Ntimes,double *x,int *delta,int *cause, double *KMc,double *z,int *antpers,int *px,int *Nit, double *score,double *hess,double *est,double *var,int *sim, int *antsim,int *rani,double *test,double *testOBS,double *Ut, double *simUt,int *weighted,double *gamma,double *vargamma,int *semi, double *zsem,int *pg,int *trans,double *gamma2,int *CA, int *line,int *detail,double *biid,double *gamiid,int *resample, double *timepow,int *clusters,int *antclust,double *timepowtest,int *silent,double *convc,double *tau, int *funcrestrict,int *causeS, double *weights,double *KMtimes); timereg/src/comptest.c0000644000176200001440000002464513520007035014505 0ustar liggesusers//#include #include #include #include "matrix.h" void comptest(times,Ntimes,px,cu,vcu,vcudif,antsim,test,testOBS,Ut, simUt,W4t,weighted,antpers) double *times,*cu,*vcu,*vcudif,*test,*testOBS,*Ut,*simUt; int *px,*Ntimes,*antsim,*weighted,*antpers; matrix **W4t; { matrix *Delta,*tmpM1; vector *tmpv1,*rowX,*xi,*difX,*ssrow,*VdB; int i,k,l,s,c; double xij,vardif,tau,time,dtime,random,fabs(),sqrt(); double norm_rand(); void GetRNGstate(),PutRNGstate(); /* float gasdev(),expdev(),ran1(); */ malloc_vec(*px,tmpv1); malloc_vec(*px,rowX); malloc_vec(*px,xi); malloc_vec(*px,difX); malloc_vec(*px,ssrow); malloc_vec(*px,VdB); malloc_mat(*Ntimes,*px,Delta); malloc_mat(*Ntimes,*px,tmpM1); // Rprintf("Simulations start N= %ld \n",(long int) *antsim); GetRNGstate(); /* to use R random normals */ tau=times[(*Ntimes-1)]-times[0]; Ut[0]=times[0]; if (*weighted>=1) { for (s=1;s<*Ntimes;s++) { vec_zeros(VdB); for (i=0;i<*antpers;i++) { extract_row(W4t[i],s,tmpv1); extract_row(W4t[i],*Ntimes-1,rowX); scl_vec_mult((times[s]-times[0])/tau,rowX,rowX); vec_subtr(tmpv1,rowX,difX); vec_star(difX,difX,rowX); vec_add(rowX,VdB,VdB); } for (k=1;k<=*px;k++) { vcudif[k*(*Ntimes)+s]=VE(VdB,k-1); } } } /* weighted==1 */ for (i=1;i<=*px;i++){ VE(rowX,i-1)=cu[i*(*Ntimes)+(*Ntimes-1)]; } /* Computation of observed teststatistics */ for (s=1;s<*Ntimes;s++){ time=times[s];dtime=times[s]-times[s-1]; scl_vec_mult((time-times[0])/tau,rowX,difX); for (i=1;i<=*px;i++) { xij=fabs(cu[i*(*Ntimes)+s])/sqrt(vcu[i*(*Ntimes)+s]); /* Rprintf(" %lf %lf %ld \n",xij,testOBS[i-1],i); Rprintf(" %lf %lf \n",cu[i*(*Ntimes)+s],vcu[i*(*Ntimes)+s]); */ if (xij>testOBS[i-1]) { testOBS[i-1]=xij; } } for (i=1;i<=*px;i++){ VE(xi,i-1)=cu[i*(*Ntimes)+s]; } vec_subtr(xi,difX,difX); vec_star(difX,difX,ssrow); Ut[s]=time; for (i=0;i<*px;i++) { if (*weighted>=1) { vardif=vcudif[(i+1)*(*Ntimes)+s]; } else { vardif=1; } if (*weighted>=1) { if ((s>*weighted) && (s<*Ntimes-*weighted)){ VE(difX,i)=VE(difX,i)/sqrt(vardif); } else { VE(difX,i)=0.0; } } else { VE(difX,i)=VE(difX,i); } Ut[(i+1)*(*Ntimes)+s]=VE(difX,i); c=(*px)+i; if (fabs(VE(difX,i))>testOBS[c]) { testOBS[c]=fabs(VE(difX,i)); } c=2*(*px)+i; if ((s>*weighted) && (s<*Ntimes-*weighted)){ testOBS[c]=testOBS[c]+VE(ssrow,i)*dtime/vardif; } } } /* for (i=0;i<3*(*px);i++) Rprintf(" %lf \n",testOBS[i]); */ /* simulation of testprocesses and teststatistics */ for (k=1;k<=*antsim;k++) { mat_zeros(Delta); vec_zeros(tmpv1); for (i=0;i<*antpers;i++) { /* random=gasdev(&idum); */ random=norm_rand(); scl_mat_mult(random,W4t[i],tmpM1); mat_add(tmpM1,Delta,Delta); } extract_row(Delta,*Ntimes-1,tmpv1); for (s=1;s<*Ntimes;s++) { time=times[s]-times[0]; dtime=times[s]-times[s-1]; scl_vec_mult(time/tau,tmpv1,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); vec_star(difX,difX,ssrow); for (i=0;i<*px;i++) { VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(vcu[(i+1)*(*Ntimes)+s]); if (VE(xi,i)>test[i*(*antsim)+k-1]){ test[i*(*antsim)+k-1]=VE(xi,i); } if (*weighted>=1) { vardif=vcudif[(i+1)*(*Ntimes)+s]; } else { vardif=1; } if (*weighted>=1) { if ((s>*weighted) && (s<*Ntimes-*weighted)){ VE(difX,i)=VE(difX,i)/sqrt(vardif); } else { VE(difX,i)=0.0; } } else { VE(difX,i)=VE(difX,i); } if (k<51) { l=(k-1)*(*px)+i; simUt[l*(*Ntimes)+s]=VE(difX,i); } c=(*px+i); VE(difX,i)=fabs(VE(difX,i)); if (VE(difX,i)>test[c*(*antsim)+k-1]) { test[c*(*antsim)+k-1]=VE(difX,i); } c=2*(*px)+i; if ((s>*weighted) && (s<*Ntimes-*weighted)) { test[c*(*antsim)+k-1]+=VE(ssrow,i)*dtime/vardif; } } } /* s=1..Ntimes */ } /* k=1..antsim */ PutRNGstate(); /* to use R random normals */ free_mat(Delta); free_mat(tmpM1); free_vec(VdB); free_vec(rowX); free_vec(difX); free_vec(xi); free_vec(tmpv1); free_vec(ssrow); } void comptestM(times,Ntimes,px,cu,vcu,vcudif,antsim,test,testOBS,Ut,simUt,W4t,weighted,antpers,cu0,argmax) double *times,*cu,*vcu,*vcudif,*test,*testOBS,*Ut,*simUt,*cu0,*argmax; int *px,*Ntimes,*antsim,*weighted,*antpers; matrix *W4t[]; { matrix *Delta,*tmpM1; vector *tmpv1,*rowX,*xi,*difX,*ssrow,*VdB; int i,k,l,s,c,u,t; double xij,vardif,tau,time,dtime,random,fabs(),sqrt(); double ixij,mu,ms,mt,tu,ts,tt,uhat,dmus,dmts,icxij; double norm_rand(); void GetRNGstate(),PutRNGstate(); /* float gasdev(),expdev(),ran1(); */ malloc_vec(*px,tmpv1); malloc_vec(*px,rowX); malloc_vec(*px,xi); malloc_vec(*px,difX); malloc_vec(*px,ssrow); malloc_vec(*px,VdB); malloc_mat(*Ntimes,*px,Delta); malloc_mat(*Ntimes,*px,tmpM1); Rprintf("Simulations start N= %ld \n",(long int) *antsim); GetRNGstate(); /* to use R random normals */ tau=times[(*Ntimes-1)]-times[0]; if (*weighted>=1) { for (s=1;s<*Ntimes;s++) { vec_zeros(VdB); for (i=0;i<*antpers;i++) { extract_row(W4t[i],s,tmpv1); extract_row(W4t[i],*Ntimes-1,rowX); scl_vec_mult((times[s]-times[0])/tau,rowX,rowX); vec_subtr(tmpv1,rowX,difX); vec_star(difX,difX,rowX); vec_add(rowX,VdB,VdB); } for (k=1;k<=*px;k++) { vcudif[k*(*Ntimes)+s]=VE(VdB,k-1); } } } /* weighted==1 */ for (i=1;i<=*px;i++) { VE(rowX,i-1)=cu[i*(*Ntimes)+(*Ntimes-1)]; } uhat= VE(rowX,0)/tau; Ut[0]=times[0]; /* Computation of observed teststatistics */ for (s=1;s<*Ntimes;s++){ time=times[s]-times[0]; dtime=times[s]-times[s-1]; scl_vec_mult(time/tau,rowX,difX); for (i=1;i<=*px;i++) { xij=fabs(cu[i*(*Ntimes)+s])/sqrt(vcu[i*(*Ntimes)+s]); if (xij>testOBS[i-1]) { testOBS[i-1]=xij; } c=3*(*px); testOBS[c]=testOBS[c]+cu[i*(*Ntimes)+s]*cu[i*(*Ntimes)+s]*dtime; /* Rprintf(" %lf \n",testOBS[c]); */ } for (i=1;i<=*px;i++){ VE(xi,i-1)=cu[i*(*Ntimes)+s]; } vec_subtr(xi,difX,difX); vec_star(difX,difX,ssrow); Ut[s]=times[s]; for (i=0;i<*px;i++) { if (*weighted>=1){ vardif=vcudif[(i+1)*(*Ntimes)+s]; }else{ vardif=1; } if (*weighted>=1) { if ((s>*weighted) && (s<*Ntimes-*weighted)) { VE(difX,i)=VE(difX,i)/sqrt(vardif); } else { VE(difX,i)=0; } } else { VE(difX,i)=VE(difX,i); } Ut[(i+1)*(*Ntimes)+s]=VE(difX,i); c=(*px); if (fabs(VE(difX,i))>testOBS[c]) { testOBS[c]=fabs(VE(difX,i)); } c=2*(*px); if ((s>*weighted) && (s<*Ntimes-*weighted)) { testOBS[c]=testOBS[c]+VE(ssrow,i)*dtime/vardif; } } /* konveksitet */ if (s > *Ntimes){ ts=times[s]; ms=cu[1*(*Ntimes)+s]; for (t=s+1;t<*Ntimes;t++) { tt=times[t]; mt=cu[1*(*Ntimes)+t]; ixij=0; icxij=0; for (u=s;utestOBS[c]) { testOBS[c]=fabs(xij); /* Rprintf(" %lf %lf %lf %lf \n",ts,tt,tu,xij); */ } ixij=ixij+dtime*xij*xij; xij=(mu-ms)-(mt-ms)*(tu-ts)/(tt-ts); c=5*(*px); if (xij>testOBS[c]) { testOBS[c]=xij; } icxij=icxij+dtime*xij; } c=4*(*px); if (ixij>testOBS[c]){ testOBS[c]=ixij; } c=6*(*px); if (icxij>testOBS[c]){ testOBS[c]=icxij; } } } } /* simulation of testprocesses and teststatistics */ for (k=1;k<=*antsim;k++) { mat_zeros(Delta); vec_zeros(tmpv1); for (i=0;i<*antpers;i++) { /* random=gasdev(&idum); */ random=norm_rand(); scl_mat_mult(random,W4t[i],tmpM1); mat_add(tmpM1,Delta,Delta); } extract_row(Delta,*Ntimes-1,tmpv1); uhat=VE(tmpv1,0)/tau; for (s=1;s<*Ntimes;s++) { time=times[s]-times[0]; dtime=times[s]-times[s-1]; scl_vec_mult(time/tau,tmpv1,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); vec_star(difX,difX,ssrow); for (i=0;i<*px;i++) { VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(vcu[(i+1)*(*Ntimes)+s]); if (VE(xi,i)>test[i*(*antsim)+k-1]){ test[i*(*antsim)+k-1]=VE(xi,i); } c=3*(*px); test[c*(*antsim)+k-1]=test[c*(*antsim)+k-1]+ME(Delta,s,i)*ME(Delta,s,i)*dtime; if (*weighted>=1){ vardif=vcudif[(i+1)*(*Ntimes)+s]; } else { vardif=1; } if (*weighted>=1) { if ((s>*weighted) && (s<*Ntimes-*weighted)){ VE(difX,i)=VE(difX,i)/sqrt(vardif); } else{ VE(difX,i)=0.0; } } else { VE(difX,i)=VE(difX,i); } if (k<51) { l=(k-1)*(*px)+i; simUt[l*(*Ntimes)+s]=VE(difX,i); } c=(*px+i); VE(difX,i)=fabs(VE(difX,i)); if (VE(difX,i)>test[c*(*antsim)+k-1]){ test[c*(*antsim)+k-1]=VE(difX,i); } c=2*(*px)+i; if ((s>*weighted) && (s<*Ntimes-*weighted)){ test[c*(*antsim)+k-1]=test[c*(*antsim)+k-1]+VE(ssrow,i)*dtime/vardif; } } if (s>*Ntimes) { ts=times[s]; ms=ME(Delta,0,s); for (t=s+1;t<*Ntimes;t++){ tt=times[t]; mt=ME(Delta,0,t); ixij=0; icxij=0; for (u=s;utest[c*(*antsim)+k-1]) { test[c*(*antsim)+k-1]=fabs(xij); /* Rprintf("local %lf %lf %lf %lf \n",ts,tt,tu,xij); */ } ixij=ixij+dtime*xij*xij; dmus=cu0[i*(*Ntimes)+u]- cu0[i*(*Ntimes)+s]; dmts=cu0[i*(*Ntimes)+t]- cu0[i*(*Ntimes)+s]; xij=(mu-ms)-(mt-ms)*(tu-ts)/(tt-ts); xij=dmus+(mu-ms)-(dmts+mt-ms)*(tu-ts)/(tt-ts); c=5*(*px); if (xij>test[c*(*antsim)+k-1]) { test[c*(*antsim)+k-1]=xij; /* Rprintf("conveks %lf %lf %lf %lf \n",ts,tt,tu,xij); */ } icxij=icxij+dtime*xij; } c=4*(*px); if (ixij>test[c*(*antsim)+k-1]){ test[c*(*antsim)+k-1]=ixij; } c=6*(*px); if (icxij>test[c*(*antsim)+k-1]){ test[c*(*antsim)+k-1]=icxij; } } } } /* s=1..Ntimes */ } /* k=1..antsim */ PutRNGstate(); /* to use R random normals */ free_mat(Delta); free_mat(tmpM1); free_vec(VdB); free_vec(rowX); free_vec(difX); free_vec(xi); free_vec(tmpv1); free_vec(ssrow); } timereg/src/smooth2.c0000644000176200001440000000302513520007035014227 0ustar liggesusers//#include #include #include "matrix.h" void smooth2B(designX,nx,p,bhat,nb,b,degree,coef) double *designX,*bhat,*b; int *coef,*nx,*p,*degree,*nb; { matrix *mat1,*mat2,*I,*XWy,*Y,*sm1,*sm2,*sY,*RES; matrix *sm1sm2t; // not in original int med,j,k,s,count,starti=0,d; double x,w; malloc_mats(*nx,*degree+1,&mat1,&mat2,NULL); malloc_mats(*nx,*p-1,&Y,NULL); malloc_mats((*degree+1),*p-1,&XWy,&RES,NULL); malloc_mats((*degree+1),*degree+1,&I,NULL); for (s=0;s<*nb;s++){ med=0; x=bhat[s]; count=0; for (j=starti;((j<*nx) && (designX[j]x-(*b)) && (med==0)) {med=1; starti=j;} if (fabs(designX[j]-x)<*b) { w=tukey(designX[j]-x,*b);/*Rprintf("%lf %lf \n",designX[j]-x,w);*/ ME(mat1,count,0)=1.0; ME(mat2,count,0)=w; for (d=1;d<=*degree;d++) { ME(mat1,count,d)=pow(designX[j]-x,d); ME(mat2,count,d)=w*ME(mat1,count,d); } for (k=1;k<*p;k++){ ME(Y,count,k-1)=w*designX[k*(*nx)+j]; } count=count+1; } } /* */ malloc_mats(count,*degree+1,&sm1,&sm2,NULL); malloc_mats(count,*p-1,&sY,NULL); malloc_mat(count,count,sm1sm2t); mat_subsec(mat1,0,0,count-1,*degree,sm1); mat_subsec(mat2,0,0,count-1,*degree,sm2); mat_subsec(Y,0,0,count-1,*p-2,sY); MtA(sm1,sm2,sm1sm2t); invert(sm1sm2t,I); MtA(sm1,sY,XWy); MxA(I,XWy,RES); for (k=1;k<*p;k++){ bhat[k*(*nb)+s]=ME(RES,*coef,k-1); } free_mats(&sm1,&sm2,&sY,sm1sm2t,NULL); } free_mats(&mat1,&mat2,&Y,&XWy,&RES,&I,NULL); } timereg/src/pred.c0000644000176200001440000001020113520007035013560 0ustar liggesusers//#include #include /* organize indeces to different clusters in matrix of size nclust x maxclust */ void nclusters(int *npers,int *clusters, int *nclust, int *uniqueclust, int *mclust) { int i,maxclust=0; for (i=0;i<*npers;i++){ if (nclust[clusters[i]]==0) uniqueclust[0]+=1; nclust[clusters[i]]+=1; if (nclust[clusters[i]]>maxclust) maxclust=nclust[clusters[i]]; } mclust[0]=maxclust; } void clusterindex(int *clusters,int *nclust,int *npers,int *idclust,int *clustsize,int *mednum,int *num, int *firstclustid) { int i; if (*mednum==0) { for (i=0;i<*npers;i++){ idclust[(clustsize[clusters[i]])*(*nclust)+clusters[i]]=i; clustsize[clusters[i]]+=1; if (clustsize[clusters[i]]==1) firstclustid[clusters[i]]=i; } } else { for (i=0;i<*npers;i++){ idclust[num[i]*(*nclust)+clusters[i]]=i; clustsize[clusters[i]]+=1; if (clustsize[clusters[i]]==1) firstclustid[clusters[i]]=i; } } } void atriskindex(double *start,double *stop,int *id,int *n,double *times,int *ntimes,int *nrisk,int *riskindex) { int i,j; for (j=0;j<*ntimes;j++) for (i=0;i<*n;i++) if ((start[i]=times[j])) { riskindex[(nrisk[j])*(*ntimes)+j]=id[i]; nrisk[j]+=1; } } void clusterindexdata(int *clusters,int *nclust,int *npers,int *idclust,int *clustsize,int *mednum, int *num,double *data, int *p,double *nydata) { int i,j; if (*mednum==0) { for (i=0;i<*npers;i++){ idclust[(clustsize[clusters[i]])*(*nclust)+clusters[i]]=i; for (j=0;j<*p;j++) nydata[(clustsize[clusters[i]]*(*p)+j)*(*nclust)+clusters[i]]=data[(*npers)*j+i]; clustsize[clusters[i]]+=1; } } else { for (i=0;i<*npers;i++){ idclust[num[i]*(*nclust)+clusters[i]]=i; for (j=0;j<*p;j++) nydata[(num[i]*(*p)+j)*(*nclust)+clusters[i]]=data[(*npers)*j+i]; clustsize[clusters[i]]+=1; } } } /* compute the values of a step function, ie how many of the jumps are smaller or equal to the eval points from prodlim THomas Gerds */ void sindex(int *index, double *jump, double *eval, int *N, int *NT, int *strict){ int i,t; index[0] = 0; i = 0; if (*strict==0){ for (t=0;t<*NT;t++){ while(i<*N && jump[i]<=eval[t]) i++; index[t] = i; } } else{ for (t=0;t<*NT;t++){ while(i<*N && jump[i] < eval[t]) i++; index[t] = i; } } } void bubble_sort(double *val,int *list,int n) { int c, d, t; // ini for (c = 0 ; c < ( n - 1 ); c++) list[c]=c; for (c = 0 ; c < ( n - 1 ); c++) { for (d = 0 ; d < n - c - 1; d++) { if (val[list[d]] > val[list[d+1]]) { /* Swapping */ t = list[d]; list[d] = list[d+1]; list[d+1] = t; } } } } void Cpred(cum,nx,px,xval,nxval,pred,tminus) double *cum,*xval,*pred; int *nxval,*nx,*px,*tminus; { // {{{ int j,s,c; double timex,sc1,sc2,smax,smin; smin=cum[0]; smax=cum[*nx-1]; for (s=0;s<*nxval;s++) { timex=xval[s]; pred[s]=timex; c=*nx-1; sc1=smax; sc2=smax+1; if (*tminus==0) { // {{{ if (timex< cum[0]) { // pred[s]=0; for(j=1;j<*px;j++) pred[j*(*nxval)+s]=0; } else if (timex> cum[*nx-1]) { // pred[s]=*nx; for(j=1;j<*px;j++) pred[j*(*nxval)+s]=cum[j*(*nx)+(*nx-1)]; } else { while ((!((timex=sc1))) && (c>=0)) { /* Rprintf(" %lf %lf %lf %ld \n",timex,sc2,sc1,c); */ sc1=cum[c-1];sc2=cum[c];c=c-1; } /* Rprintf("fr pred %lf %lf %lf %ld \n",timex,sc2,sc1,c); */ for(j=1;j<*px;j++) pred[j*(*nxval)+s]=cum[j*(*nx)+c]; // pred[s]=c+1; } // }}} } else { // tminus=TRUE if (timex<= cum[0]) { for(j=1;j<*px;j++) pred[j*(*nxval)+s]=0; } else if (timex> smax) { for(j=1;j<*px;j++) pred[j*(*nxval)+s]=cum[j*(*nx)+(*nx-1)]; } else { while ((!((timex<=sc2) && (timex>sc1))) && (c>=0)) { /* Rprintf(" %lf %lf %lf %ld \n",timex,sc2,sc1,c); */ sc1=cum[c-1];sc2=cum[c];c=c-1; } // printf("fr pred %lf %lf %lf %ld \n",timex,sc2,sc1,c); for(j=1;j<*px;j++) pred[j*(*nxval)+s]=cum[j*(*nx)+c]; } } } } // }}} timereg/src/breslow.c0000644000176200001440000005645613520007035014331 0ustar liggesusers//#include #include #include "matrix.h" void OSbreslow(times,Ntimes,designX,nx,p,antpers,start,stop,nb,bhat,cu,vcu,it,b,degree,schoen,sim,antsim,test,rani,testOBS,rvcu,cumlam,nullresid,status,id,sim2,Ut,simUt,weighted,robust) double *designX,*times,*start,*stop,*cu,*vcu,*bhat,*b,*schoen,*test,*testOBS,*rvcu,*cumlam,*Ut,*simUt; int *nx,*p,*antpers,*Ntimes,*nb,*it,*degree,*rani,*sim,*antsim,*nullresid,*status,*id,*sim2,*weighted,*robust; { matrix *ldesignX,*A,*AI,*AIX,*cdesignX,*XmavX,*cXmavX,*Aav; vector *diag,*dB,*dN,*VdB,*AIXdN,*AIXlamt,*ta,*bhatt,*pbhat,*plamt,*avx,*lrisk; vector *ssrow2,*ssrow,*vtmp,*xi,*rowX,*cumi[*antpers],*difX,*cumBLi[*antpers],*Btau,*Base[*antpers],*score; matrix *cumBL[*antpers],*cumB[*antpers],*BLsubbetaLam[*antpers]; matrix *Delta2,*Delta,*tmpM1,*tmpM2,*varBL; int supsup=0,itt,i,j,k,s,c,count,pers=0, *imin=calloc(1,sizeof(int)), *coef=calloc(1,sizeof(int)),*ps=calloc(1,sizeof(int)); double time2,rr,time=0,time1,dummy,dtime,S0,lam0t,sdBt,tau,random; double *Basei=calloc(*antpers,sizeof(double)),rvarbase, *vcudif=calloc((*Ntimes)*(*p+2),sizeof(double)); double norm_rand(); void GetRNGstate(),PutRNGstate(); if (*sim==1) { malloc_mat(*Ntimes,*p,Delta); malloc_mat(*Ntimes,*p,Delta2); malloc_mat(*Ntimes,*p,tmpM1); malloc_mat(*Ntimes,*p,tmpM2); }; if (*robust==1) { malloc_mat(*Ntimes,*p,varBL); for (j=0;j<*antpers;j++) { malloc_mat(*Ntimes,(*p)+1,cumB[j]); malloc_vec(*p,cumi[j]); malloc_mat(*Ntimes,*p,BLsubbetaLam[j]); malloc_mat(*Ntimes,*p,cumBL[j]); malloc_vec(*p,cumBLi[j]); malloc_vec(*Ntimes,Base[j]); Basei[j]=0.0; } } malloc_mat(*antpers,*p,ldesignX); malloc_mat(*antpers,*p,cdesignX); malloc_mat(*antpers,*p,XmavX); malloc_mat(*antpers,*p,cXmavX); malloc_mat(*p,*antpers,AIX); malloc_mat(*p,*p,A); malloc_mat(*p,*p,AI); malloc_mat(*p,*p,Aav); malloc_vec(*p,score); malloc_vec(*p,ssrow2); malloc_vec(*p,ssrow); malloc_vec(*p,Btau); malloc_vec(*p,vtmp); malloc_vec(*p,difX); malloc_vec(*p,xi); malloc_vec(*p,rowX); malloc_vec(*p,avx); malloc_vec(*p,diag); malloc_vec(*p,dB); malloc_vec(*p,VdB); malloc_vec(*p,AIXdN); malloc_vec(*p,AIXlamt); malloc_vec(*p,bhatt); malloc_vec(*p,dN); malloc_vec(*p,pbhat); malloc_vec(*p,plamt); malloc_vec(*p,lrisk); malloc_vec(*nb,ta); coef[0]=1; ps[0]=(*p)+2; tau=times[*Ntimes-1]; for (itt=0;itt<*it;itt++){ vec_zeros(score); for (s=1;s<*Ntimes;s++){ time=times[s]; dtime=time-times[s-1]; vec_zeros(lrisk); mat_zeros(ldesignX); for (c=0,count=0;((c<*nx) && (count!=*antpers));c++){ if ((start[c]=time)) { for(j=0;j<*p;j++) { ME(ldesignX,id[c],j)=designX[j*(*nx)+c]; } VE(lrisk,id[c])=1; if (time==stop[c] && status[c]==1){ pers=id[c]; } count=count+1; } } for(j=0;j<*nb;j++){ VE(ta,j)=fabs(bhat[j]-time); } dummy=vec_min(ta,imin); lam0t=bhat[1*(*nb)+(*imin)]; for(j=2;j<=(*p)+1;j++){ VE(bhatt,j-2)=bhat[j*(*nb)+(*imin)]; } Mv(ldesignX,bhatt,pbhat); for (j=0;j<*antpers;j++) { VE(plamt,j)=VE(lrisk,j)*exp(VE(pbhat,j)); scl_vec_mult(VE(plamt,j),extract_row(ldesignX,j,dB),dB); replace_row(cdesignX,j,dB); /* sampling corrected design */ } S0=vec_sum(plamt); vM(ldesignX,plamt,avx); scl_vec_mult(1/S0,avx,avx); for (j=0;j<*p;j++){ for (i=0;i<*p;i++) { ME(Aav,j,i)=VE(avx,i)*VE(avx,j)*S0; } } MtA(cdesignX,ldesignX,A); mat_subtr(A,Aav,A); invert(A,AI); extract_row(ldesignX,pers,AIXdN); vec_subtr(AIXdN,avx,AIXdN); Mv(AI,AIXdN,dB); vec_add(dB,score,score); schoen[s]=time; cu[s]=time; vcu[s]=time; rvcu[s]=time; cu[1*(*Ntimes)+s]=cu[1*(*Ntimes)+s-1]+(1/S0); vcu[1*(*Ntimes)+s]=0; for (k=2;k<=(*p)+1;k++){ cu[k*(*Ntimes)+s]=cu[k*(*Ntimes)+s-1]+dtime*VE(bhatt,k-2)+VE(dB,k-2)/lam0t; cumlam[k*(*Ntimes)+s]=cumlam[k*(*Ntimes)+s-1]+(dtime*VE(bhatt,k-2)*lam0t)+VE(dB,k-2); if (itt==(*it-1)) { schoen[(k-1)*(*Ntimes)+s]=VE(dB,k-2)*S0; vcu[k*(*Ntimes)+s]=vcu[k*(*Ntimes)+s-1]+(dtime/lam0t)*ME(AI,k-2,k-2); } } /* Rprintf(" \n"); */ if (itt==(*it-1)) { if (*robust==1) { vec_zeros(VdB); rvarbase=0; for (j=0;j<*antpers;j++) { extract_row(ldesignX,j,rowX); vec_subtr(rowX,avx,vtmp); Mv(AI,vtmp,xi); if (*nullresid>=0) { k=*nullresid; rr=VE(pbhat,j)+VE(rowX,k)*(cu[(k+2)*(*Ntimes)+(*Ntimes-1)]/tau-VE(bhatt,k)); rr=VE(lrisk,j)*exp(rr); } else { rr=VE(plamt,j); } scl_vec_mult(rr/S0,xi,rowX); vec_subtr(cumBLi[j],rowX,cumBLi[j]); if (j==pers){ vec_add(xi,cumBLi[j],cumBLi[j]); } replace_row(cumBL[j],s,cumBLi[j]); /* BLAM(t) sum iid */ vec_star(avx,xi,rowX); dummy=vec_sum(rowX); dummy=(1/S0)-dummy; Basei[j]=Basei[j]-dummy*rr/S0; if (j==pers){ Basei[j]=dummy+Basei[j]; } VE(Base[j],s)=Basei[j]; /* Baseline sum iid */ rvarbase=rvarbase+Basei[j]*Basei[j]; ME(cumB[j],s,0) =Basei[j]; scl_vec_mult(1/lam0t,xi,xi); scl_vec_mult(rr/S0,xi,rowX); vec_subtr(cumi[j],rowX,cumi[j]); if (j==pers) { vec_add(xi,cumi[j],cumi[j]); } /* set_row(cumB[j],s,cumi[j]); */ /* B(t) sum iid */ for (k=1;k<(*p)+1;k++) { ME(cumB[j],s,k)=VE(cumi[j],k-1); } vec_star(cumi[j],cumi[j],difX); vec_add(difX,VdB,VdB); } rvcu[1*(*Ntimes)+s]=rvarbase; for (k=2;k<(*p)+2;k++) { rvcu[k*(*Ntimes)+s]=VE(VdB,k-2); } } } } /* s */ smoothB(cu,Ntimes,ps,bhat,nb,b,degree,coef); } /* itterations lkke */ for (i=2;i<(*p)+2;i++) { VE(Btau,i-2)=cu[i*(*Ntimes)+(*Ntimes-1)]; } cu[0]=times[0]; vcu[0]=times[0]; tau=time; rvcu[0]=times[0]; /* Beregning af iid bidrag til BLam(t) - beta Lam(t) */ if (*robust==1){ for (s=1;s<*Ntimes;s++) { vec_zeros(VdB); for (j=0;j<*antpers;j++) { scl_vec_mult(1/tau,cumi[j],rowX); scl_vec_mult(cu[1*(*Ntimes)+s],rowX,difX); scl_vec_mult(VE(Base[j],s)/tau,Btau,xi); extract_row(cumBL[j],s,vtmp); vec_add(vtmp,xi,xi); vec_subtr(xi,difX,xi); replace_row(BLsubbetaLam[j],s,xi); vec_star(xi,xi,difX); vec_add(difX,VdB,VdB); replace_row(varBL,s,VdB); } } /* s */ } /* korrektion for lam0t i \int beta(s) lam0t(s) ds */ /* for (s=1;s<*Ntimes;s++) { time=times[s]; v_zero(dN);dtime=time-times[s-1]; for(j=0;j<*nb;j++) ta->ve[j]=fabs(bhat[j]-time); dummy=v_min(ta,imin); lam0t=bhat[1*(*nb)+(*imin)]; for(j=2;j<=(*p)+1;j++) {bhatt->ve[j-2]=bhat[j*(*nb)+(*imin)]/lam0t; cu[k*(*Ntimes)+s]=cu[k*(*Ntimes)+s-1]+dtime*bhatt->ve[k-2]; }; } */ if (*sim==1) { if (*sim2!=1) { ps[0]=*p+1; comptest(times,Ntimes,ps,cu,rvcu,vcudif,antsim,test,testOBS,Ut,simUt, cumB,weighted,antpers); /* for (s=0;s<*Ntimes;s++) { for(j=0;j<(*p)+2;j++) Rprintf(" %lf ",cu[j*(*Ntimes)+s]); Rprintf(" \n"); } */ } else { for (i=2;i<=*p+1;i++){ VE(bhatt,i-2)=cu[i*(*Ntimes)+(*Ntimes-1)]; } for (s=1;s<*Ntimes-1;s++){ /* Beregning af obs teststrrelser */ time=times[s]; dtime=time-times[s-1]; for (i=2;i<=*p+1;i++) { VE(xi,i-2)=fabs(cu[i*(*Ntimes)+s])/sqrt(rvcu[i*(*Ntimes)+s]); if (VE(xi,i-2)>testOBS[i-2]){ testOBS[i-2]=VE(xi,i-2); } } scl_vec_mult(time/tau,bhatt,difX); for (i=2;i<=*p+1;i++){ VE(xi,i-2)=cu[i*(*Ntimes)+s]; } vec_subtr(xi,difX,difX); vec_star(difX,difX,ssrow); for (i=0;i<*p;i++) {c=(*p+i); VE(difX,i)=fabs(VE(difX,i));/*sqrt(rvcu[(i+2)*(*Ntimes)+s]);*/ if (VE(difX,i)>testOBS[c]){ testOBS[c]=VE(difX,i);} c=2*(*p)+i; testOBS[c]=testOBS[c]+VE(ssrow,i)*dtime; } scl_vec_mult(1/tau,bhatt,rowX); scl_vec_mult(cu[1*(*Ntimes)+s],rowX,rowX); for (i=2;i<=*p+1;i++) { VE(xi,i-2)=cumlam[i*(*Ntimes)+s]; } vec_subtr(xi,rowX,difX); vec_star(difX,difX,ssrow); for (i=0;i<*p;i++) { c=3*(*p)+i; VE(difX,i)=fabs(VE(difX,i)); if (VE(difX,i)>testOBS[c]) testOBS[c]=VE(difX,i); c=4*(*p)+i; testOBS[c]=testOBS[c]+VE(ssrow,i)*dtime; } /* sup| BLAM(t)-beta*LAM(t)| */ /* Beregning af sup_a,s | B(a+t)-B(t) - gam t | */ if (supsup==1) { for (j=s+1;j<*Ntimes;j++){ time1=times[j]; time2=time1-time; scl_vec_mult(time2/tau,bhatt,difX); for (i=2;i<=*p+1;i++) { VE(xi,i-2)=cu[i*(*Ntimes)+s]; VE(vtmp,i-2)=cu[i*(*Ntimes)+j]; } vec_subtr(vtmp,xi,rowX); vec_subtr(rowX,difX,xi); c=5*(*p)+i; for (i=2;i<=*p+1;i++) { if (fabs(VE(xi,i-2))>testOBS[c]) testOBS[c]=fabs(VE(xi,i-2)); } } } /* supsup==1 */ } /*s=1..Ntimes Beregning af obs teststrrelser */ Rprintf(" Simulations start N= %ld \n",(long int) *antsim); GetRNGstate(); /* to use R random normals */ for (k=1;k<*antsim;k++) { if (k%50==0) Rprintf(" %ld Simulations \n",(long int) k); mat_zeros(Delta); mat_zeros(Delta2); vec_zeros(vtmp); for (i=0;i<*antpers;i++) { random=norm_rand(); scl_mat_mult(random,cumB[i],tmpM1); mat_add(tmpM1,Delta,Delta); if (*sim2==1) { scl_mat_mult(random,BLsubbetaLam[i],tmpM2); mat_add(tmpM2,Delta2,Delta2); } extract_row(cumB[i],*Ntimes-1,rowX); scl_vec_mult(random,rowX,rowX); vec_add(rowX,vtmp,vtmp); } for (s=1;s<*Ntimes;s++){ time=times[s]; dtime=time-times[s-1]; scl_vec_mult(time/tau,vtmp,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); vec_star(difX,difX,ssrow); if (*sim2==1) { extract_row(Delta2,s,dB); vec_star(dB,dB,ssrow2); } for (i=0;i<*p;i++) { VE(difX,i)=fabs(VE(difX,i)); VE(dB,i)=fabs(VE(dB,i)); sdBt=sqrt(rvcu[(i+2)*(*Ntimes)+s]+0.03); VE(xi,i)=fabs(ME(Delta,s,i))/sdBt; if (VE(xi,i)>test[i*(*antsim)+k]) test[i*(*antsim)+k]=VE(xi,i); c=(*p+i); if (VE(difX,i)>test[c*(*antsim)+k]) test[c*(*antsim)+k]=VE(difX,i); c=2*(*p)+i; test[c*(*antsim)+k]=test[c*(*antsim)+k]+VE(ssrow,i)*dtime; if (*sim==1) { c=(3*(*p)+i); if ((VE(dB,i))>test[c*(*antsim)+k]) test[c*(*antsim)+k]=VE(dB,i); c=4*(*p)+i; test[c*(*antsim)+k]=test[c*(*antsim)+k]+VE(ssrow2,i)*dtime; } } if (supsup==1) { for (j=s+1;j<*Ntimes;j++){ /* Beregning af sup_a,s | B(a+t)-B(t) - gam t | */ time1=times[j]; time2=time1-time; scl_vec_mult(time2/tau,vtmp,difX); extract_row(Delta,j,xi); vec_subtr(xi,rowX,dB); vec_subtr(difX,dB,dB); for (i=0;i<=*p+1;i++) { c=5*(*p)+i; if (fabs(VE(dB,i))>test[c*(*antsim)+k]) test[c*(*antsim)+k]=fabs(VE(dB,i)); } } } /* supsup==1 */ } } } /* s=1..Ntimes k=1.. antsim, sim2==1*/ PutRNGstate(); /* to use R random normals */ } /* sim==1 */ if (*sim==1) { free_mat(Delta); free_mat(Delta2); free_mat(tmpM1); free_mat(tmpM2); } if (*robust==1) { free_mat(varBL); for (j=0;j<*antpers;j++) { free_mat(cumB[j]); free_vec(cumi[j]); free_vec(Base[j]); free_mat(cumBL[j]); free_vec(cumBLi[j]); free_mat(BLsubbetaLam[j]); } } free_vec(diag); free_vec(dB); free_vec(dN); free_vec(VdB); free_vec(AIXdN); free_vec(AIXlamt); free_vec(ta); free_vec(bhatt); free_vec(pbhat); free_vec(plamt); free_vec(avx); free_vec(lrisk); free_vec(ssrow2); free_vec(ssrow); free_vec(vtmp); free_vec(xi); free_vec(rowX); free_vec(difX); free_vec(Btau); free_mat(ldesignX); free_mat(A); free_mat(AI); free_mat(AIX); free_mat(cdesignX); free_mat(XmavX); free_mat(cXmavX); free_mat(Aav); free(coef); free(ps); free(imin); free(vcudif); free(Basei); } void semibreslow(times,Ntimes,designX, nx,px,designG, ng,pg,antpers, start,stop,nb, bhat,cu,vcu, rvcu,gamma,Vgamma, robVgamma,b,degree, it,sim,antsim, test,rani,testOBS, status,id,schoen, simUt,Ut,weighted,robust) double *designX,*times,*start,*stop,*cu,*vcu,*bhat,*b,*designG,*gamma,*Vgamma,*test,*testOBS,*rvcu,*robVgamma,*schoen,*simUt,*Ut; int *nx,*px,*antpers,*Ntimes,*nb,*ng,*pg,*it,*degree,*sim,*antsim, *rani,*status,*id,*weighted,*robust; { matrix *ldesignX, *A,*AI,*cdesignX,*ldesignG,*cdesignG; matrix *XmavX,*ZmavZ,*E2x,*E2z,*E2xz,*XX; matrix *S,*dCGam,*CGam,*ICGam,*VarKorG,*dC,*XZ,*ZZ,*ZZI,*XZAI; matrix *Ct,*C[*Ntimes],*Acorb[*Ntimes],*ZXAI,*tmpM4; matrix *RobVargam,*tmpM3,*cumB[*antpers]; matrix *W3t[*antpers],*W4t[*antpers],*AIxit[*antpers]; vector *dB,*dN,*VdB,*AIXdN,*AIXlamt,*ta,*bhatt,*pbhat,*plamt; vector *difX,*korG,*pghat,*gam,*dgam,*ZGdN,*IZGdN,*ZGlamt,*IZGlamt; vector *zi,*z1,*lrisk,*avx,*avz,*rowG,*xi,*rowX,*rowZ,*tmpv2; vector *cumi[*antpers],*W2[*antpers],*W3[*antpers]; vector *Base[*antpers]; int itt,i,j,k,s,c,count,pers=0,pmax, *imin=calloc(1,sizeof(int)), *coef=calloc(1,sizeof(int)),*ps=calloc(1,sizeof(int)); double time,dummy,dtime,lam0t,S0, *Basei=calloc((*antpers),sizeof(double)), *vcudif=calloc((*Ntimes)*(*px+2),sizeof(double)),dum2,rvarbase; if (*robust==1){ for (j=0;j<*antpers;j++) { malloc_mat(*Ntimes,*px,cumB[j]); malloc_vec(*px,cumi[j]); malloc_mat(*Ntimes,*px,W3t[j]); malloc_vec(*Ntimes,Base[j]); Basei[j]=0.0; malloc_mat(*Ntimes,*px+1,W4t[j]); malloc_vec(*pg,W2[j]); malloc_vec(*px,W3[j]); malloc_mat(*Ntimes,*px,AIxit[j]); } } malloc_mat(*antpers,*px,XmavX); malloc_mat(*antpers,*px,ldesignX); malloc_mat(*antpers,*px,cdesignX); malloc_mat(*antpers,*pg,ZmavZ); malloc_mat(*antpers,*pg,ldesignG); malloc_mat(*antpers,*pg,cdesignG); malloc_mat(*px,*px,XX); malloc_mat(*px,*px,E2x); malloc_mat(*px,*px,A); malloc_mat(*px,*px,AI); malloc_mat(*pg,*pg,tmpM3); malloc_mat(*pg,*pg,RobVargam); malloc_mat(*pg,*pg,E2z); malloc_mat(*pg,*pg,ZZ); malloc_mat(*pg,*pg,VarKorG); malloc_mat(*pg,*pg,ICGam); malloc_mat(*pg,*pg,CGam); malloc_mat(*pg,*pg,dCGam); malloc_mat(*pg,*pg,S); malloc_mat(*pg,*pg,ZZI); malloc_mat(*px,*pg,E2xz); malloc_mat(*px,*pg,XZ); malloc_mat(*px,*pg,XZAI); malloc_mat(*pg,*px,Ct); malloc_mat(*pg,*px,dC); malloc_mat(*pg,*px,ZXAI); malloc_mat(*pg,*px,tmpM4); for (j=0;j<*Ntimes;j++) { malloc_mat(*pg,*px,Acorb[j]); malloc_mat(*pg,*px,C[j]); } malloc_vec(*px,difX); malloc_vec(*px,xi); malloc_vec(*px,rowX); malloc_vec(*px,avx); malloc_vec(*px,korG); malloc_vec(*px,dB); malloc_vec(*px,VdB); malloc_vec(*px,AIXdN); malloc_vec(*px,AIXlamt); malloc_vec(*px,bhatt); malloc_vec(*pg,tmpv2); malloc_vec(*pg,rowZ); malloc_vec(*pg,avz); malloc_vec(*pg,rowG); malloc_vec(*pg,zi); malloc_vec(*pg,z1); malloc_vec(*pg,gam); malloc_vec(*pg,dgam); malloc_vec(*pg,ZGdN); malloc_vec(*pg,IZGdN); malloc_vec(*pg,ZGlamt); malloc_vec(*pg,IZGlamt); malloc_vec(*antpers,lrisk); malloc_vec(*antpers,dN); malloc_vec(*antpers,pbhat); malloc_vec(*antpers,pghat); malloc_vec(*antpers,plamt); malloc_vec(*nb,ta); coef[0]=1; ps[0]=(*px)+2; if (*px>=*pg) pmax=*px; else pmax=*pg; for (j=0;j<*pg;j++){ VE(gam,j)=gamma[j]; } for (itt=0;itt<*it;itt++){ mat_zeros(Ct); mat_zeros(CGam); vec_zeros(IZGdN); vec_zeros(IZGlamt); for (s=1;s<*Ntimes;s++){ time=times[s]; dtime=time-times[s-1]; vec_zeros(lrisk); mat_zeros(ldesignX); mat_zeros(ldesignG); for (c=0,count=0;((c<*nx) && (count!=*antpers));c++){ if ((start[c]=time)) { for(j=0;j=time)) { for(j=0;jve[0],RobVargam->me[0][0]); */ } } } /* i =1 ..Antpers */ for (k=2;k<=*px+1;k++) { rvcu[k*(*Ntimes)+s]=VE(VdB,k-2); } } /* s=1 ..Ntimes */ MxA(RobVargam,ICGam,tmpM3); MxA(ICGam,tmpM3,RobVargam); } for (j=0;j<*pg;j++) { gamma[j]=VE(gam,j); for (k=0;k<*pg;k++) { Vgamma[k*(*pg)+j]=ME(ICGam,j,k); robVgamma[k*(*pg)+j]=ME(RobVargam,j,k); } } if (*sim==1) { ps[0]=(*px)+1; comptest(times,Ntimes,ps,cu,rvcu,vcudif,antsim,test,testOBS,Ut,simUt,W4t,weighted,antpers); } cu[0]=times[0]; vcu[0]=times[0]; /* korrektion for lam0t i \int beta(s) lam0t(s) ds */ /* for (s=1;s<*Ntimes;s++) { time=times[s]; dtime=time-times[s-1]; for(j=0;j<*nb;j++) ta->ve[j]=fabs(bhat[j]-time); dummy=v_min(ta,imin); lam0t=bhat[1*(*nb)+(*imin)]; for(j=2;j<=(*px)+1;j++) bhatt->ve[j-2]=bhat[j*(*nb)+(*imin)]; for (k=2;k<=(*px)+1;k++) { cu[k*(*Ntimes)+s]=cu[k*(*Ntimes)+s-1]+dtime*bhatt->ve[k-2]/lam0t; }; } */ free_mat(ldesignX); free_mat(A); free_mat(AI); free_mat(cdesignX); free_mat(ldesignG); free_mat(XmavX); free_mat(ZmavZ); free_mat(E2x); free_mat(E2xz); free_mat(XX); free_mat(S); free_mat(dCGam); free_mat(CGam); free_mat(ICGam); free_mat(VarKorG); free_mat(dC); free_mat(XZ); free_mat(ZZ); free_mat(ZZI); free_mat(XZAI); free_mat(Ct); free_mat(ZXAI); free_mat(tmpM4); free_mat(RobVargam); free_mat(tmpM3); free_vec(dB); free_vec(VdB); free_vec(AIXdN); free_vec(AIXlamt); free_vec(ta); free_vec(bhatt); free_vec(pbhat); free_vec(plamt); free_vec(difX); free_vec(korG); free_vec(pghat); free_vec(gam); free_vec(dgam); free_vec(ZGdN); free_vec(IZGdN); free_vec(IZGlamt); free_vec(zi); free_vec(z1); free_vec(lrisk); free_vec(avx); free_vec(avz); free_vec(rowG); free_vec(xi); free_vec(rowX); free_vec(rowZ); free_vec(tmpv2); for (j=0;j<*Ntimes;j++) { free_mat(Acorb[j]); free_mat(C[j]); } if (*robust==1){ for (j=0;j<*antpers;j++) { free_vec(Base[j]); free_vec(cumi[j]); free_mat(cumB[j]); free_vec(W2[j]); free_vec(W3[j]); free_mat(W3t[j]); free_mat(W4t[j]); free_mat(AIxit[j]); } } free(vcudif); free(Basei); free(coef); free(ps); free(imin); } timereg/src/Gprop-odds.c0000644000176200001440000004625013520007035014661 0ustar liggesusers//#include #include #include "matrix.h" /* ====================================================== */ void Gtranssurv(times,Ntimes,designX,nx,px,designG,ng,pg,antpers,start,stop, betaS,Nit,cu,vcu,loglike,Iinv,Vbeta,detail,sim,antsim, rani,Rvcu,RVbeta,test,testOBS,Ut,simUt,Uit,id,status,wscore, score,dhatMit,dhatMitiid,retur,exppar,sym,mlestart,stratum) double *designX,*designG,*times,*betaS,*start,*stop,*cu,*loglike,*Vbeta,*RVbeta, *vcu,*Rvcu,*Iinv,*test,*testOBS,*Ut,*simUt,*Uit,*score,*dhatMit,*dhatMitiid; int *nx,*px,*ng,*pg,*antpers,*Ntimes,*Nit,*detail,*sim,*antsim,*rani,*id,*status, *wscore,*retur,*exppar,*sym,*mlestart,*stratum; { // {{{ matrix *ldesignX,*cdesG,*ldesignG,*cdesX,*cdesX2,*cdesX3,*cdesX4,*CtVUCt,*A,*AI; matrix *dYI,*Ct,*dM1M2,*M1M2t,*COV,*ZX,*ddesG,*ZP,*ZPX; matrix *tmp1,*tmp2,*tmp5,*tmp3,*dS,*S1,*SI,*S2,*M1,*VU,*VUI, *tmp6; // Added tmp6 matrix *RobVbeta,*Delta,*tmpM1,*Utt,*Delta2,*tmpM2; matrix *St[*Ntimes],*M1M2[*Ntimes],*C[*Ntimes],*ZXAIs[*Ntimes],*dYIt[*Ntimes]; matrix *dW3t[*antpers],*W3t[*antpers],*W4t[*antpers],*W2t[*antpers],*AIxit[*antpers],*Uti[*antpers],*tmp4,*Fst[(*Ntimes)*(*Ntimes)]; matrix *dG[*Ntimes],*cumdG,*Ft[*Ntimes],*ZcX2AIs[*Ntimes],*ZcX2[*Ntimes],*S0tI[*Ntimes],*Ident,*gt[*Ntimes],*q2t[*Ntimes],*G1mG2t[*Ntimes],*q1t[*antpers]; vector *dA,*VdA,*MdA,*delta,*zav,*lamt,*plamt,*dlamt; vector *xi,*zi,*U,*beta,*xtilde,*Gbeta,*zcol,*one,*difzzav; vector *offset,*weight,*ZXdA[*Ntimes],*varUthat[*Ntimes],*Uprofile; vector *ta,*ahatt,*risk; vector *tmpv1,*tmpv2,*rowX,*rowZ,*difX,*VdB,*lht; vector *dLamt[*antpers],*dAt[*Ntimes]; vector *W2[*antpers],*W3[*antpers],*reszpbeta,*res1dim; int t,c,robust=1,pers=0,i,j,k,l,s,it,count,pmax; int *ipers=calloc(*Ntimes,sizeof(int)); double time=0,dummy,ll; double tau,dhati,hati=0,random,sumscore; double norm_rand(); void GetRNGstate(),PutRNGstate(); for(j=0;j<*Ntimes;j++) { malloc_mat(*px,*px,Ft[j]); malloc_mat(*pg,*px,ZcX2AIs[j]); malloc_mat(*pg,*px,gt[j]); malloc_mat(*pg,*px,G1mG2t[j]); malloc_mat(*pg,*px,q2t[j]); malloc_mat(*pg,*px,ZcX2[j]); malloc_mat(*px,*px,S0tI[j]); malloc_mat(*px,*pg,dG[j]); malloc_mat(*px,*pg,C[j]); malloc_mat(*pg,*px,M1M2[j]); malloc_mat(*pg,*px,ZXAIs[j]); malloc_mat(*px,*pg,dYIt[j]); malloc_vec(*px,dAt[j]); malloc_vec(*pg,ZXdA[j]); malloc_mat(*pg,*pg,St[j]); malloc_vec(*pg,varUthat[j]); for(i=0;i<=j;i++){ malloc_mat(*px,*px,Fst[j*(*Ntimes)+i]); } } for (j=0;j<*antpers;j++) { malloc_vec(*Ntimes,dLamt[j]); malloc_mat(*Ntimes,*px,W3t[j]); malloc_mat(*Ntimes,*px,dW3t[j]); malloc_mat(*Ntimes,*px,W4t[j]); malloc_mat(*Ntimes,*pg,W2t[j]); malloc_mat(*Ntimes,*pg,Uti[j]); malloc_vec(*pg,W2[j]); malloc_vec(*px,W3[j]); malloc_mat(*Ntimes,*pg,q1t[j]); malloc_mat(*Ntimes,*px,AIxit[j]); } malloc_mat(*Ntimes,*px,Delta); malloc_mat(*Ntimes,*px,tmpM1); malloc_mat(*Ntimes,*pg,Delta2); malloc_mat(*Ntimes,*pg,tmpM2); malloc_mat(*Ntimes,*pg,Utt); malloc_mats(*antpers,*px,&ldesignX,&cdesX,&cdesX2,&cdesX3,&cdesX4,NULL); malloc_mats(*antpers,*pg,&ZP,&cdesG,&ldesignG,&ddesG,NULL); malloc_mats(*px,*px,&tmp4,&Ident,&COV,&A,&AI,&M1,&CtVUCt,NULL); malloc_mats(*pg,*pg,&RobVbeta,&tmp1,&tmp2,&dS,&S1,&S2,&SI,&VU,&VUI,NULL); malloc_mats(*pg,*px,&tmp5,&tmp3,&ZX,&dM1M2,&M1M2t,NULL); malloc_mats(*px,*pg,&cumdG,&ZPX,&dYI,&Ct,NULL); malloc_mat(*px,*pg,tmp6); malloc_vec(1,reszpbeta); malloc_vec(1,res1dim); malloc_vec(*Ntimes,lht); malloc_vecs(*antpers,&risk,&weight,&dlamt,&plamt,&lamt,&zcol,&Gbeta,&one,&offset,NULL); malloc_vecs(*px,&ahatt,&tmpv1,&difX,&VdB,&rowX,&xi,&dA,&VdA,&MdA,NULL); malloc_vecs(*px,&ta,&xtilde,NULL); malloc_vecs(*pg,&tmpv2,&rowZ,&zi,&U,&beta,&delta,&zav,&difzzav,&Uprofile,NULL); identity_matrix(Ident); // if (*px>=*pg){ pmax=*px; } else { pmax=*pg; } pmax=max(*px,*pg); ll=0; vec_ones(one); for(j=0;j<*pg;j++){ VE(beta,j)=betaS[j]; } vec_ones(difX); cu[0]=times[0]; // }}} /* Main procedure ================================== */ for (it=0;it<*Nit;it++){ vec_zeros(U); mat_zeros(S1); sumscore=0; for (s=1;s<*Ntimes;s++){ // {{{ time=times[s]; mat_zeros(ldesignX); mat_zeros(ldesignG); // vec_zeros(risk); for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { // {{{ if ((start[c]=time)) { VE(risk,id[c])=1.0; for(j=0;j=0) // first time use update hazard est for (j=0;j<*antpers;j++){ if (s<0 && j<5 ) { Rprintf(" %ld %ld \n",(long int) s, (long int)j); print_vec(zi); } extract_row(ddesG,j,zi); scl_vec_mult(VE(lamt,j),zi,zi); replace_row(ZP,j,zi); } MtA(ldesignX,ZP,ZPX); MxA(AI,ZPX,tmp6); // Note the use of tmp6 here, instead of tmp3 mat_subtr(dG[s-1],tmp6,dG[s]); // Note the use of tmp6 here, instead of tmp3 if (s<0) { Rprintf(" %lf \n",ME(A,0,0)); print_mat(ZPX); print_mat(tmp3); print_mat(dG[s]); } MxA(ZXAIs[s],ZPX,SI); mat_transp(SI,tmp2); MtA(ldesignG,ZP,tmp1); mat_subtr( tmp1,tmp2, dS); if (s<0) { Rprintf("=================== %lf \n",ME(A,0,0)); print_mat(tmp1); print_mat(tmp2); print_mat(dS); } if (*sym==1) { mat_transp(dS,tmp1); mat_add(tmp1,dS,dS); scl_mat_mult(0.5,dS,dS); } /* else {m_transp(dS,tmp1); sm_mlt(1,tmp1,dS); } */ mat_add(dS,S1,S1); scl_mat_mult(1.0,S1,St[s]); /* variance and other things */ if (it==((*Nit)-1)) { // {{{ replace_row(Utt,s,U); for (j=0;j<*px;j++) { // {{{ for (i=0;i<*antpers;i++){ dummy=ME(ldesignX,i,j); extract_row(cdesX2,i,xi); scl_vec_mult(dummy,xi,xi); replace_row(cdesX3,i,xi); } MtA(ldesignX,cdesX3,A); MxA(AI,A,tmp4); Mv(tmp4,dA,xi); for (k=0;k<*px;k++){ ME(Ft[s],j,k)=VE(xi,k); } VE(lht,s)=VE(lht,s-1)-ME(A,0,0)*(ME(AI,0,0)*ME(AI,0,0)); /* Rprintf(" %ld %lf %lf \n",s,lht->ve[s],AI->me[0][0]); */ MtA(ldesignG,cdesX3,ZcX2[s]); /* m_mlt(ZcX2[s],AI,ZcX2AIs[s]); */ MxA(ZX,tmp4,tmp3); mat_subtr(tmp3,ZcX2[s],tmp5); Mv(tmp5,dA,zi); for (k=0;k<*pg;k++) { ME(G1mG2t[s],k,j)=ME(G1mG2t[s],k,j)+VE(zi,k); } } // }}} /* for (i=0;i<*px;i++){ for (j=0;j<*pg;j++) dM1M2->me[j][i]=dA->ve[i]*difzzav->ve[j]; for (i=0;i<*pg;i++) for (j=0;j<*pg;j++) VU->me[i][j]=VU->me[i][j]+difzzav->ve[i]*difzzav->ve[j]; m_mlt(AI,ZPX,dYIt[s]); m_sub(Ct,dYIt[s],Ct); C[s]=m_copy(Ct,C[s]); v_star(dA,dA,VdA); m_add(dM1M2,M1M2t,M1M2t); M1M2[s]=m_copy(M1M2t,M1M2[s]); for (k=1;k<=*px;k++) vcu[k*(*Ntimes)+s]=VdA->ve[k-1]+vcu[k*(*Ntimes)+s-1]; */ for (j=0;j<*antpers;j++){ extract_row(ldesignX,j,xi); Mv(S0tI[s],xi,rowX); replace_row(AIxit[j],s,rowX); extract_row(ldesignG,j,zi); Mv(ZX,rowX,rowZ); vec_subtr(zi,rowZ,zi); replace_row(q1t[j],s,zi); VE(dLamt[j],s)=VE(plamt,j)*vec_sum(vec_star(xi,dA,rowX)); } } // }}} /* if (it==((*Nit)-1)) */ } // }}} /* Ntimes */ invertS(S1,SI,1); Mv(SI,U,delta); vec_add(beta,delta,beta); if (*detail>=1) { // {{{ Rprintf("====================Iteration %ld ==================== \n",(long int) it); Rprintf("delta \n"); print_vec(delta); Rprintf("Estimate beta \n"); print_vec(beta); Rprintf("Score D l\n"); print_vec(U); Rprintf("Information -D^2 l\n"); print_mat(SI); Rprintf("simple D2 l\n"); print_mat(S1); } // }}} for (k=0;k<*pg;k++) sumscore += VE(U,k); if ((fabs(sumscore)<0.000001) & (it<*Nit-2)) it=*Nit-2; } /* it */ for (k=0;k<*pg;k++) score[k]=VE(U,k); /* computation of q(t) */ for (s=1;s<*Ntimes;s++) { // {{{ mat_zeros(M1M2t); for (t=s;t<*Ntimes;t++) { identity_matrix(tmp4); identity_matrix(M1); for (k=s;ks) { scl_mat_mult(1,M1,tmp4); } mat_subtr(Ident,Ft[k],A); MxA(tmp4,A,M1); } if (s<0) { Rprintf(" %ld %ld %lf \n",(long int) s,(long int) t,ME(M1,0,0)); matrix *tempTranspose; malloc_mat(ncol_matrix(G1mG2t[t]), nrow_matrix(G1mG2t[t]),tempTranspose); print_mat(mat_transp(G1mG2t[t],tempTranspose)); free_mat(tempTranspose); } MxA(G1mG2t[t],M1,dM1M2); mat_add(dM1M2,M1M2t,M1M2t); } scl_mat_mult(1,M1M2t,q2t[s]); /* m_mlt(M1M2t,S0tI[s],q2t[s]); */ if (s<0){ matrix *tempTranspose; malloc_mat(ncol_matrix(q2t[s]), nrow_matrix(q2t[s]),tempTranspose); print_mat(mat_transp(q2t[s],tempTranspose)); free_mat(tempTranspose); } } // }}} /* terms for robust variances ============================ */ if (robust==1) { // {{{ for (s=1;s<*Ntimes;s++) { // {{{ time=times[s]; cu[s]=times[s]; vcu[s]=times[s]; Rvcu[s]=times[s]; Ut[s]=times[s]; /* terms for robust variance */ for (i=0;i<*antpers;i++) { extract_row(AIxit[i],s,xi); Mv(q2t[s],xi,rowZ); extract_row(q1t[i],s,zi); if (s==0) { print_vec(rowZ); print_vec(zi); } vec_add(zi,rowZ,rowZ); if (s==0) { print_vec(rowZ); } /* mv_mlt(ZXAIs[s],xi,tmpv2); v_sub(zi,tmpv2,tmpv2); */ if (i==ipers[s]) { for (j=0;j<*pg;j++) { for (k=0;k<*pg;k++) { ME(VU,j,k) += VE(rowZ,j)*VE(rowZ,k); } } } scl_vec_mult(VE(dLamt[i],s),rowZ,tmpv2); vec_subtr(W2[i],tmpv2,W2[i]); if (i==ipers[s]) { vec_add(rowZ,W2[i],W2[i]); } /* if (*ratesim==1) {sv_mlt(hati,tmpv2,rowZ); v_sub(W2[i],rowZ,W2[i]);} */ replace_row(W2t[i],s,W2[i]); vec_zeros(W3[i]); for (t=1;t<=s;t++) { if (i==0) { identity_matrix(tmp4); identity_matrix(M1); for (k=t;k<=s;k++) { if (k>t) { scl_mat_mult(1.0,M1,tmp4); } if (k>t || t==s) { mat_subtr(Ident,Ft[k],A); MxA(tmp4,A,M1); } } scl_mat_mult(1,M1,Fst[s*(*Ntimes)+t]); } /* Fst[s*(*Ntimes)+t]->me[0][0]=exp(-lht->ve[t]+lht->ve[s]); */ extract_row(AIxit[i],t,xi); vM(Fst[s*(*Ntimes)+t],xi,rowX); scl_vec_mult(VE(dLamt[i],t),rowX,tmpv1); vec_subtr(W3[i],tmpv1,W3[i]); if (i==ipers[t]){ vec_add(rowX,W3[i],W3[i]); } } replace_row(W3t[i],s,W3[i]); /* if (hati>0) lle=lle+log(hati); llo=llo+hati; */ /* if (*ratesim==1) {sv_mlt(hati,rowX,rowX); v_sub(W3[i],rowX,W3[i]);} */ if (*retur==1){ dhatMit[i*(*Ntimes)+s]=1*(i==pers)-hati; } } /* i=1..antpers */ } // }}} /* s=1 ..Ntimes */ MxA(SI,VU,S2); MxA(S2,SI,VU); /* ROBUST VARIANCES */ for (s=1;s<*Ntimes;s++) { // {{{ if (s<0){ print_mat(dG[s]); } vec_zeros(VdB); for (i=0;i<*antpers;i++) { Mv(SI,W2[i],tmpv2); Mv(dG[s],tmpv2,rowX); extract_row(W3t[i],s,xi); if (s>*Ntimes-5 && i<0){ print_vec(xi); } vec_add(xi,rowX,difX); replace_row(W4t[i],s,difX); if (i==-5){ print_vec(difX); } vec_star(difX,difX,tmpv1); vec_add(tmpv1,VdB,VdB); Mv(St[s],tmpv2,rowZ); extract_row(W2t[i],s,tmpv2); vec_subtr(tmpv2,rowZ,zi); replace_row(Uti[i],s,zi); vec_star(zi,zi,tmpv2); vec_add(tmpv2,varUthat[s],varUthat[s]); if (s==1) { for (j=0;j<*pg;j++){ for (k=0;k<*pg;k++){ ME(RobVbeta,j,k) += VE(W2[i],j)*VE(W2[i],k); } } } if (*retur==1) { mat_zeros(ldesignX); mat_zeros(ldesignG); for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { if ((start[c]=time)) { // VE(risk,id[c])=1.0; for(j=0;jtestOBS[i-1]) testOBS[i-1]=VE(xi,i-1); } scl_vec_mult(time/tau,rowX,difX); for (i=1;i<=*px;i++) { VE(xi,i-1)=cu[i*(*Ntimes)+s]; } vec_subtr(xi,difX,difX); for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>testOBS[l]) testOBS[l]=VE(difX,i); } if (*wscore>=1) { /* sup beregnes i R */ if ((s>*wscore) && (s<*Ntimes-*wscore)) { extract_row(Utt,s,rowZ); for (i=0;i<*pg;i++) { VE(rowZ,i)=VE(rowZ,i)/sqrt(VE(varUthat[s],i)); } replace_row(Utt,s,rowZ); /* scaled score process */ } else { vec_zeros(rowZ); replace_row(Utt,s,rowZ); } } for (k=1;k<=*pg;k++){ Ut[k*(*Ntimes)+s]=ME(Utt,s,k-1); } } /*s=1..Ntimes Beregning af obs teststrrelser */ for (k=1;k<*antsim;k++) { mat_zeros(Delta); mat_zeros(Delta2); vec_zeros(tmpv1); for (i=0;i<*antpers;i++) { /* random=gasdev(&idum); */ random=norm_rand(); scl_mat_mult(random,W4t[i],tmpM1); mat_add(tmpM1,Delta,Delta); scl_mat_mult(random,Uti[i],tmpM2); mat_add(tmpM2,Delta2,Delta2); } extract_row(Delta,*Ntimes-1,tmpv1); for (s=1;s<*Ntimes;s++) { time=times[s]-times[0]; scl_vec_mult(time/tau,tmpv1,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>test[l*(*antsim)+k]) test[l*(*antsim)+k]=VE(difX,i); VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(Rvcu[(i+1)*(*Ntimes)+s]); if (VE(xi,i)>test[i*(*antsim)+k]) test[i*(*antsim)+k]=VE(xi,i); } if (*wscore>=1) { extract_row(Delta2,s,zi); if ((s>*wscore) && (s<*Ntimes-*wscore)) { for (i=0;i<*pg;i++) { VE(zi,i)=fabs(ME(Delta2,s,i))/sqrt(VE(varUthat[s],i)); if (VE(zi,i)>simUt[i*(*antsim)+k]) simUt[i*(*antsim)+k]=VE(zi,i); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*Ntimes)+s]=ME(Delta2,s,i)/sqrt(VE(varUthat[s],i)); } } } } else { /* weigted score */ extract_row(Delta2,s,zi); for (i=0;i<*pg;i++) { if (fabs(VE(zi,i))>simUt[i*(*antsim)+k]) simUt[i*(*antsim)+k]=fabs(VE(zi,i)); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*Ntimes)+s]=ME(Delta2,s,i); } } } /* else wscore=0 */ } /* s=1..Ntims */ } /* k=1..antsim */ PutRNGstate(); /* to use R random normals */ } // }}} /* sim==1 */ // {{{ freeing free_mats(&cumdG,&tmp4,&Ident,&ddesG,&Utt,&tmpM2,&VUI,&ZX,&COV, &dM1M2,&AI,&A,&tmp1,&tmp2,&tmp5,&tmp3,&ldesignX,&cdesX, &cdesX2,&cdesX4,&cdesX3,&cdesG,&ldesignG,&M1,&dS,&S1,&SI,NULL); free_mats(&tmp6,&S2,&VU,&ZP,&ZPX,&dYI,&Ct,&M1M2t,&RobVbeta,&Delta,&Delta2, &tmpM1,&CtVUCt,NULL); free_vecs(&lht,&risk,&ta,&ahatt,&Uprofile,&dlamt,&plamt,&lamt,&one,&xi,&zcol,&Gbeta,&VdA,&dA,&MdA,&xtilde,&zi,&U,&beta,&delta,&zav,&difzzav,&weight,&offset,&tmpv1,&tmpv2,&rowX,&rowZ,&difX,&VdB,&reszpbeta,&res1dim,NULL); for (j=0;j<*antpers;j++) { free_vec(dLamt[j]); free_mat(W3t[j]); free_mat(dW3t[j]); free_mat(W4t[j]); free_mat(W2t[j]); free_mat(Uti[j]); free_vec(W2[j]); free_vec(W3[j]); free_mat(q1t[j]); free_mat(AIxit[j]); } for (j=0;j<*Ntimes;j++) { free_mat(Ft[j]); free_mat(ZcX2AIs[j]); free_mat(gt[j]); free_mat(G1mG2t[j]); free_mat(q2t[j]); free_mat(ZcX2[j]); free_mat(S0tI[j]); free_mat(dG[j]); free_mat(C[j]); free_mat(M1M2[j]); free_mat(ZXAIs[j]); free_mat(dYIt[j]); free_vec(dAt[j]); free_vec(ZXdA[j]); free_mat(St[j]); free_vec(varUthat[j]); for(i=0;i<=j;i++) free_mat(Fst[j*(*Ntimes)+i]); } free(ipers); // }}} } timereg/src/dynadd.c0000644000176200001440000004773413520007035014116 0ustar liggesusers//#include #include #include "matrix.h" void dynadd(times,y,Ntimes,designX,nx,px,designA,na,pa,ahat,bhat,bhatny,nxval,antpers, start,stop,cu0,cuf,cuMS,vcu0,vcuf,robvcu,w,mw,rani,sim,antsim,cumBit,test, testOBS,status,Ut,simUt,b,cumly,retur,id,smoothXX,weighted,vculy,clusters,antclust) double *bhatny,*bhat,*ahat,*designX,*designA,*times,*y,*start,*stop,*cu0,*cuf,*cuMS, *vcu0,*vcuf,*w,*robvcu,*cumBit,*test,*testOBS,*Ut,*simUt,*b,*cumly,*vculy; int *sim,*antsim,*retur,*nxval,*nx,*px,*na,*pa,*antpers,*Ntimes,*mw,*rani,*status,*id,*smoothXX,*weighted,*clusters,*antclust; { matrix *ldesignX,*ldesignA,*cdesignX,*cdesignA,*Aa,*AaI,*A,*AI; matrix *XbXa,*XWX; vector *korf,*dB,*dA,*dR,*ahatt,*xt,*pdA,*diag,*xai,*sumx,*vone,*itot; vector *VdBly,*VdB,*VdB0,*VdBf,*fkor,*dkorB,*tmpv,*tmpv1,*tmpv2,*tmpv3,*tmpv4; vector *pahat,*pbhat,*bhatt,*pbahat,*pdbahat,*dBly; vector *dAt[*Ntimes]; matrix *cumBt[*antpers]; vector *cumhatB[*antpers],*cumB[*antpers],*cum; int silent=1; int pers=0,i,j,k,s,c,count,pmax,nmax,risk; int *coef=calloc(1,sizeof(int)),*imin=calloc(1,sizeof(int)), *ps=calloc(1,sizeof(int)),*degree=calloc(1,sizeof(int)); double time,zpers=0,dif,dtime,YoneN,kia; double *vcudif=calloc((*Ntimes)*(*px+1),sizeof(double)); for (i=0;i<*antpers;i++) { malloc_vec(*px,cumhatB[i]); malloc_vec(*px,cumB[i]); malloc_mat(*Ntimes,*px,cumBt[i]); } for (i=0;i<*Ntimes;i++) malloc_vec(*pa,dAt[i]); malloc_vec(*px,cum); malloc_mat(*px,*pa,XbXa); malloc_mat(*antpers,*px,ldesignX); malloc_mat(*antpers,*px,cdesignX); malloc_mat(*antpers,*pa,ldesignA); malloc_mat(*antpers,*pa,cdesignA); malloc_mat(*pa,*pa,Aa); malloc_mat(*pa,*pa,AaI); malloc_mats(*px,*px,&XWX,&A,&AI,NULL); malloc_vecs(*px,&dB,&diag,&sumx,&itot,&korf,&dBly,&bhatt,&fkor,&VdBly,&VdB,&VdB0,&VdBf,&dkorB,&tmpv,&tmpv1,&tmpv2,&tmpv3,&tmpv4,NULL); malloc_vecs(*antpers,&pbhat,&pbahat,&pahat,&pdbahat,&vone,&dR,&pdA,NULL); malloc_vec(*nxval,xt);vone=vec_ones(vone);malloc_vec(*pa,ahatt);malloc_vec(*pa,xai);malloc_vec(*pa,dA); if (*px>=*pa) pmax=*px; else pmax=*pa; if (*nx>=*na) nmax=*nx; else nmax=*na; R_CheckUserInterrupt(); for (s=1;s<*Ntimes;s++) { time=times[s]; risk=0; dtime=time-times[s-1]; mat_zeros(ldesignX); mat_zeros(ldesignA); vec_zeros(dR); for (c=0,count=0;((c=time)) { for(j=0;j=*pa) pmax=*px; else pmax=*pa; if (*pg>=pmax) pmax=*pg; if (*nx>=*na) nmax=*nx; else nmax=*na; /* Prelim. est. of gamma for var. est. loaded from (B(t)/t */ for(j=0;j<*pg;j++) {VE(gam,j)=gamma[j];VE(gamstart,j)=gamma[j];} R_CheckUserInterrupt(); for (s=1;s<*Ntimes;s++) { vec_zeros(dR); zpers=0; risk=0; time=times[s]; dtime=time-times[s-1]; mat_zeros(ldesignX); mat_zeros(ldesignG); mat_zeros(ldesignA); ctime=dtime+ctime; for (c=0,count=0;((c=time)) { for(j=0;j //#include #include #include "matrix.h" void robaalenC(times,Ntimes,designX,nx,p,antpers,start,stop,cu,vcu, robvcu,sim,antsim,retur,cumAit,test,rani,testOBS,status, Ut,simUt,id,weighted,robust,covariance,covs,resample, Biid,clusters,antclust,loglike,silent) double *designX,*times,*start,*stop,*cu,*vcu,*robvcu,*cumAit,*test,*testOBS,*Ut,*simUt,*covs,*Biid,*loglike; int *nx,*p,*antpers,*Ntimes,*sim,*retur,*rani,*antsim,*status,*id,*covariance, *weighted,*robust,*resample,*clusters,*antclust,*silent; { // {{{ matrix *ldesignX, *QR, *R, *A, *AI, *Vcov; matrix *cumAt[*antclust]; vector *diag,*dB,*dN,*VdB,*xi,*rowX,*rowcum,*difX,*vtmp; vector *cumhatA[*antclust],*cumA[*antclust],*cum; int ci,i,j,k,l,s,c,count,pers=0,*cluster=calloc(*antpers,sizeof(int)); double time,ahati,*vcudif=calloc((*Ntimes)*(*p+1),sizeof(double)); double fabs(),sqrt(); if (*robust==1) { for (i=0;i<*antclust;i++) { malloc_vec(*p,cumhatA[i]); malloc_vec(*p,cumA[i]); if (*sim==1) malloc_mat(*Ntimes,*p,cumAt[i]); } } /* print_clock(&debugTime, 0); */ malloc_mat(*antpers,*p,ldesignX); malloc_mat(*p,*p,QR); malloc_mat(*p,*p,Vcov); malloc_mat(*p,*p,A); malloc_mat(*p,*p,AI); malloc_mat(*antpers,*p,R); malloc_vec(*antpers,dN); malloc_vecs(*p,&cum,&diag,&dB,&VdB,&xi,&rowX,&rowcum,&difX,&vtmp,NULL); // for (j=0;j<*antpers;j++) cluster[j]=0; /* print_clock(&debugTime, 1); */ R_CheckUserInterrupt(); for (s=1;s<*Ntimes;s++){ time=times[s]; mat_zeros(ldesignX); for (c=0,count=0;((c<*nx) && (count!=*antpers));c++){ if ((start[c]=time)) { for(j=0;j<*p;j++) { ME(ldesignX,id[c],j) = designX[j*(*nx)+c]; } cluster[id[c]]=clusters[c]; if (time==stop[c] && status[c]==1) { pers=id[c]; } count=count+1; } } // readXt(antpers,nx,p,designX,start,stop,status,pers,ldesignX,time,clusters,cluster,id); MtM(ldesignX,A); invertS(A,AI,silent[0]); if (ME(AI,0,0)==0.0 && *silent==0){ Rprintf(" X'X not invertible at time %lf \n",time); } if (s < -1) { print_mat(AI); print_mat(A); } extract_row(ldesignX,pers,xi); Mv(AI,xi,dB); vec_star(dB,dB,VdB); vec_star(xi,dB,vtmp); ahati = vec_sum(vtmp); loglike[0]=loglike[0]-ahati/(time-times[s-1]); for (k=1;k<*p+1;k++) { cu[k*(*Ntimes)+s]=cu[k*(*Ntimes)+s-1]+VE(dB,k-1); vcu[k*(*Ntimes)+s]=vcu[k*(*Ntimes)+s-1]+VE(VdB,k-1); VE(cum,k-1)=cu[k*(*Ntimes)+s]; } cu[s]=time; vcu[s]=time; robvcu[s]=time; if (*robust==1 || *retur==1) { vec_zeros(VdB); mat_zeros(Vcov); for (i=0;i<*antpers;i++) { ci=cluster[i]; extract_row(ldesignX,i,xi); ahati=vec_prod(xi,dB); Mv(AI,xi,rowX); if (*robust==1) { if (i==pers) { vec_add(rowX,cumhatA[ci],cumhatA[ci]); } scl_vec_mult(ahati,rowX,rowX); vec_add(rowX,cumA[ci],cumA[ci]); } if (*retur==1){ cumAit[i*(*Ntimes)+s]= cumAit[i*(*Ntimes)+s]+1*(i==pers)-ahati; } } if (*robust==1) { for (i=0;i<*antclust;i++) { vec_subtr(cumhatA[i],cumA[i],difX); if (*sim==1) replace_row(cumAt[i],s,difX); vec_star(difX,difX,vtmp); vec_add(vtmp,VdB,VdB); if (*resample==1) { for (k=0;k<*p;k++) {l=i*(*p)+k; Biid[l*(*Ntimes)+s]=VE(difX,k);} } if (*covariance==1) { for (k=0;k<*p;k++) for (c=0;c<*p;c++) ME(Vcov,k,c) = ME(Vcov,k,c) + VE(difX,k)*VE(difX,c); } } for (k=1;k<*p+1;k++) { robvcu[k*(*Ntimes)+s]=VE(VdB,k-1); if (*covariance==1) { for (c=0;c<*p;c++) { l=(k-1)*(*p)+c; covs[l*(*Ntimes)+s]=ME(Vcov,k-1,c); } } } } } /* if robust==1 || retur==1*/ R_CheckUserInterrupt(); } /* s = 1..Ntimes */ R_CheckUserInterrupt(); if (*sim==1) { comptest(times,Ntimes,p,cu,robvcu,vcudif,antsim,test,testOBS,Ut,simUt,cumAt,weighted,antclust); } cu[0]=times[0]; vcu[0]=times[0]; robvcu[0]=times[0]; free_vecs(&dN,&cum,&diag,&dB,&VdB,&xi,&rowX,&rowcum,&difX,&vtmp,NULL); free_mats(&ldesignX,&QR,&Vcov,&A,&AI,&R,NULL); if (*robust==1){ for (i=0;i<*antclust;i++) { free_vec(cumA[i]); free_vec(cumhatA[i]); if (*sim==1) free_mat(cumAt[i]); } } free(cluster); free(vcudif); } // }}} timereg/src/pava.c0000644000176200001440000000441413520007035013566 0ustar liggesusers/* pava.c: R extension, PAVA (Pool Adjacent Violators Algorithm) */ /* By Bahjat Qaqish */ /************************************************************/ #include typedef double DBL; static void wpool (DBL *y, DBL *w, int i, int j) /* Pool y[i:j] using weights w[i:j] */ { int k; DBL s0=0, s1=0; for (k=i; k<=j; k++) {s1 += y[k]*w[k]; s0 += w[k];} s1 /= s0; for (k=i; k<=j; k++) y[k] = s1; } /*************************************************/ static void wpava (DBL *y, DBL *w, int *np) /* Apply weighted pava to y[0:n-1] using weights w[0:n-1] */ { int npools, n = *np; if (n <= 1) return; n--; /* keep passing through the array until pooling is not needed */ do { int i = 0; npools = 0; while (i < n) { int k = i; /* starting at y[i], find longest non-increasing sequence y[i:k] */ while (k < n && y[k] >= y[k+1]) k++; if (y[i] != y[k]) {wpool(y, w, i, k); npools++;} i = k+1; } } while (npools > 0); } /*************************************************/ static void upool (DBL *y, int i, int j) /* Pool y[i:j] */ { int k; DBL s=0; for (k=i; k<=j; k++) {s += y[k];} s /= (j-i+1); for (k=i; k<=j; k++) y[k] = s; } /*************************************************/ static void upava (DBL *y, int *np) /* Apply pava to y[0:n-1] */ { int npools, n = *np; if (n <= 1) return; n--; /* keep passing through the array until pooling is not needed */ do { int i = 0; npools = 0; while (i < n) { int k = i; /* starting at y[i], find longest non-increasing sequence y[i:k] */ while (k < n && y[k] >= y[k+1]) k++; if (y[i] != y[k]) {upool(y, i, k); npools++;} i = k+1; } } while (npools > 0); } /*************************************************/ void pava (DBL *y, DBL *w, int *np) /* Apply pava to y[0:n-1] using weights w[0:n-1] Calls an unweighted version if all weights are equal and != 0 Does nothing if all weights are == 0 Calls a weighted version otherwise */ { int n = *np, i=1; DBL w0; if (n <= 1) return; w0 = w[0]; while (i < n && w[i] == w0) i++; if (i == n) { if (w0 == 0.0) return; /* all weights are == 0 */ else upava(y, np); /* unweighted */ } else wpava(y, w, np); /* weighted */ } timereg/src/smooth.c0000644000176200001440000000645313520007035014155 0ustar liggesusers//#include #include #include "matrix.h" double tukey(x,b) double x,b; { return((1/b)*((cos(3.141592 *(x/b))+ 1)/2) * (fabs(x/b) < 1)); } double dtukey(x,b) double x,b; { return((-3.141592/b*b)*(sin(3.141592 *(x/b))/2)*(fabs(x/b) < 1)); } void smoothB(designX,nx,p,bhat,nb,b,degree,coef) double *designX,*bhat,*b; int *coef,*nx,*p,*degree,*nb; { // {{{ matrix *mat1,*mat2,*II,*I; vector *XWy,*Y,*RES,*sY; int count,j,k,s,d; int silent=1; double tukey(),x,w,band; matrix *sm1,*sm2; malloc_mat(*nx,(*degree)+1,mat1); malloc_mat(*nx,(*degree)+1,mat2); malloc_mat(*nx,(*degree)+1,sm1); malloc_mat(*nx,(*degree)+1,sm2); malloc_vec(*nx,Y); malloc_vec(*nx,sY); malloc_vec((*degree)+1,XWy); malloc_vec((*degree)+1,RES); malloc_mat((*degree)+1,(*degree)+1,II); malloc_mat((*degree)+1,(*degree)+1,I); for (s=0;s<*nb;s++){ x=bhat[s]; for (k=1;k<*p;k++) { vec_zeros(Y); mat_zeros(mat1); mat_zeros(mat2); count=0; vec_zeros(RES); band=b[(k-1)*(*nb)+s]; /* Rprintf("band %lf %ld \n",band,k); */ for (j=0;j<*nx;j++) { if (fabs(designX[j]-x)=4) { MtA(mat1,mat2,II); invertS(II,I,silent); vM(mat1,Y,XWy); vM(I,XWy,RES); }; bhat[k*(*nb)+s]=VE(RES,*coef); } /* components */ } /* times */ free_mat(sm1); free_mat(sm2); free_mat(mat1); free_mat(mat2); free_mat(I); free_mat(II); free_vec(sY); free_vec(Y); free_vec(XWy); free_vec(RES); } // }}} void localTimeReg(designX,nx,p,times,response,bhat,nb,b,lin,dens) double *designX,*bhat,*b,*times,*response,*dens; int *nx,*p,*nb,*lin; { matrix *X,*AI,*A; vector *res,*Y,*XY; int c,j,k,s,silent=1; double band,tukey(),dtukey(),x,w,delta; j=(*lin+1)*(*p); malloc_mat(*nx,j,X); malloc_mat(j,j,A); malloc_mat(j,j,AI); malloc_vec(*nx,Y); malloc_vec(j,XY); malloc_vec(j,res); /* Rprintf("enters Local Time Regression \n"); */ for (s=0;s<*nb;s++){ x=bhat[s]; for (c=0;c<*nx;c++){ delta=times[c]-x; band=b[s]; w=tukey(delta,band); dens[s]=dens[s]+w; dens[(*nb)+s]=dens[(*nb)+s]+dtukey(delta,b[s]); for(j=0;j<*p;j++) { ME(X,c,j)=designX[j*(*nx)+c]*sqrt(w); if (*lin>=1) ME(X,c,*p+j)=designX[j*(*nx)+c]*delta*sqrt(w); if (*lin>=2) ME(X,c,2*(*p)+j)=delta*ME(X,c,*p+j); if (*lin==3) ME(X,c,3*(*p)+j)=delta*ME(X,c,2*(*p)+j); } VE(Y,c)=response[c]*sqrt(w); } dens[s]=dens[s]/(*nx); dens[(*nb)+s]=dens[(*nb)+s]/(*nx); MtA(X,X,A); invertS(A,AI,silent); if (ME(AI,0,0)==0.0){ Rprintf("Non-invertible design in local smoothing at time %lf \n",x); } vM(X,Y,XY); Mv(AI,XY,res); for (k=1;k<((*lin)+1)*(*p)+1;k++){ bhat[k*(*nb)+s]=VE(res,k-1); } } free_mat(A); free_mat(AI); free_mat(X); free_vec(Y); free_vec(XY); free_vec(res); } timereg/src/additive-compSs.c0000644000176200001440000004157413520007035015702 0ustar liggesusers//#include #include #include "matrix.h" void compSs(alltimes,Nalltimes,Ntimes,designX,nx,px,designG,ng,pg,antpers,start,stop,id,status,deltaweight,intZHZ,intZHdN,silent) double *designX,*alltimes,*start,*stop,*intZHZ,*intZHdN,*designG; int *nx,*px,*antpers,*Nalltimes,*Ntimes,*ng,*pg,*status,*deltaweight,*id,*silent; { // {{{ matrix *X,*A,*AI,*AIXW,*dCGam,*CGam,*Ct,*ICGam,*XWZ,*ZWZ,*XWZAI,*tmpM4,*tmpM2; vector *xi,*tmpv2,*tmpv1,*PLScomp,*Xi,*dA,*rowX,*AIXWdN,*korG,*rowZ,*gam,*ZHdN, *IZHdN,*zi; int j,k,l,c,s,count,pers=0,pmax,*ipers=calloc(*Ntimes,sizeof(int)); int stat,*ls=calloc(*Ntimes,sizeof(int)); double time,dtime,fabs(),sqrt(); malloc_mats(*antpers,*px,&X,NULL); malloc_mats(*px,*px,&A,&AI,NULL); malloc_mats(*px,*antpers,&AIXW,NULL); // malloc_mats(*antpers,*pg,&Z,NULL); malloc_mats(*pg,*pg,&tmpM2,&ZWZ,&ICGam,&CGam,&dCGam,NULL); malloc_mats(*px,*pg,&Ct,&XWZ,&XWZAI,NULL); malloc_mat(*px,*pg,tmpM4); malloc_vecs(*px,&dA,&xi,&tmpv1,&korG,&rowX,&AIXWdN,NULL); malloc_vecs(*pg,&zi,&tmpv2,&rowZ,&gam,&ZHdN,&IZHdN,NULL); malloc_vecs(*antpers,&PLScomp,&Xi,NULL); if (*px>=*pg) pmax=*px; else pmax=*pg; mat_zeros(Ct); mat_zeros(CGam); vec_zeros(IZHdN); // Rprintf(" test \n"); for (s=1;s<*Nalltimes;s++){ // Rprintf(" test %d %d %d \n",s,*antpers,*nx); time=alltimes[s]; dtime=time-alltimes[s-1]; mat_zeros(A); stat=0; mat_zeros(ZWZ); mat_zeros(XWZ); l=0; stat=0; for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { // Rprintf("times %lf %lf %lf \n",time,start[c],stop[c]); if ((start[c]=time)) { // Rprintf("under risk %d %d %d \n",c,id[c],count); for(j=0;j=*pg) pmax=*px; else pmax=*pg; //mat_zeros(Ct); mat_zeros(CGam); vec_zeros(IZHdN); //mat_zeros(A); mat_zeros(ZWZ); mat_zeros(XWZ); count=nx[0]-1; for (s=(*Nalltimes)-1;s>0;s=s-1){ sstop=0; // Rprintf(" test %d %d %d \n",s,*antpers,*nx); time=alltimes[s]; dtime=time-alltimes[s-1]; stat=0; l=0; stat=0; if (1==0) { for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { // {{{ // Rprintf("times %lf %lf %lf \n",time,start[c],stop[c]); if ((start[c]=time)) { // Rprintf("under risk %d %d %d \n",c,id[c],count); for(j=0;j=0;c=c-1) { // {{{ // Rprintf("times %d %lf %lf %lf %d %d %d \n",s,time,start[c],stop[c],c,sstop,count); if ((start[c]=time)) { // Rprintf("under risk %d %d %d \n",c,id[c],count); for(j=0;j=*pg) pmax=*px; else pmax=*pg; //mat_zeros(Ct); mat_zeros(CGam); vec_zeros(IZHdN); //mat_zeros(A); mat_zeros(ZWZ); mat_zeros(XWZ); count=nx[0]-1; for (s=(*Nalltimes)-1;s>0;s=s-1){ sstop=0; // Rprintf(" test %d %d %d \n",s,*antpers,*nx); time=alltimes[s]; dtime=time-alltimes[s-1]; stat=0; l=0; stat=0; if (1==0) { for (c=0,count=0;((c<*nx) && (count!=*antpers));c++) { // {{{ // Rprintf("times %lf %lf %lf \n",time,start[c],stop[c]); if ((start[c]=time)) { // Rprintf("under risk %d %d %d \n",c,id[c],count); for(j=0;j=0;c=c-1) { // {{{ // Rprintf("times %d %lf %lf %lf %d %d %d \n",s,time,start[c],stop[c],c,sstop,count); if ((start[c]=time)) { // Rprintf("under risk %d %d %d \n",c,id[c],count); for(j=0;j=*pg) pmax=*px; else pmax=*pg; //mat_zeros(Ct); mat_zeros(CGam); vec_zeros(IZHdN); //mat_zeros(A); mat_zeros(ZWZ); mat_zeros(XWZ); count=0; sstop=0; for (s=1;s<*Nalltimes;s++){ // Rprintf(" test %d %d %d \n",s,*antpers,*nx); time=alltimes[s]; dtime=time-alltimes[s-1]; stat=0; l=0; stat=0; sstop=0; if (s==1) { for (c=0;c<*nx;c++) { // {{{ if ((start[c]=time)) { for(j=0;jtime) || (stop[c]time) sstop=1; } } // Rprintf(" s er %d \n",s); //print_mat(A); print_mat(ZWZ); print_mat(XWZ); // MtA(X,X,A); invertS(A,AI,silent[0]); if (ME(AI,0,0)==0 && *silent==0) Rprintf("time %lf X'X singular \n",time); // MtA(Z,Z,ZWZ);MtA(X,Z,XWZ); MxA(AI,XWZ,XWZAI); MtA(XWZAI,XWZ,tmpM2); mat_subtr(ZWZ,tmpM2,dCGam); scl_mat_mult(dtime,dCGam,dCGam); if (*deltaweight==0) scl_mat_mult(dtime,dCGam,dCGam); mat_add(CGam,dCGam,CGam); if (stat==1) { // extract_row(X,pers,tmpv1); Mv(AI,xi,AIXWdN); // extract_row(Z,pers,zi); vM(XWZ,AIXWdN,tmpv2); vec_subtr(zi,tmpv2,ZHdN); if (*deltaweight==0) scl_vec_mult(dtime,ZHdN,ZHdN); vec_add(ZHdN,IZHdN,IZHdN); } // scl_mat_mult(dtime,XWZAI,tmpM4);mat_add(tmpM4,Ct,Ct); } /* s =1...Ntimes */ // invertS(CGam,ICGam,silent[0]); Mv(ICGam,IZHdN,gam); //if (ME(ICGam,0,0)==0 && *silent==0) Rprintf(" intZHZ singular\n"); // print_mat(CGam); print_vec(IZHdN); for(k=0;k<*pg;k++) { intZHdN[k]=VE(IZHdN,k); for(j=0;j<*pg;j++) intZHZ[k*(*pg)+j]=ME(CGam,k,j); } free_mats(&X,&A,&AI,&AIXW,&tmpM2,&ZWZ,&ICGam,&CGam,&dCGam, &Ct,&XWZ,&XWZAI, &tmpM4,NULL); free_vecs(&dA,&xi,&tmpv1,&korG,&rowX,&AIXWdN,&zi,&tmpv2,&rowZ,&gam, &ZHdN,&IZHdN,&PLScomp,&Xi,NULL); free(ipers); free(ls); } // }}} timereg/src/cox-aalen.c0000644000176200001440000011223613520007035014510 0ustar liggesusers#include #include #include "matrix.h" #include #include void score(times,Ntimes,designX,nx,px,designG,ng,pg,antpers,start,stop, betaS,Nit,cu,vcu,w,mw,loglike,Iinv,Vbeta,detail,offs,mof,sim,antsim, Rvcu,RVbeta, test,testOBS,Ut,simUt,Uit,XligZ,aalen,nb,id,status,wscore,dNit,ratesim,score,dhatMit,gammaiid,dmgiid, retur,robust,covariance,Vcovs,addresamp,addproc, resample,gamiid,biid,clusters,antclust,vscore,betafixed,weights,entry,exactderiv, timegroup,maxtimepoint,stratum,silent,caseweight) double *designX,*designG,*times,*betaS,*start,*stop,*cu,*w,*loglike,*Vbeta,*RVbeta,*vcu,*offs,*Rvcu,*Iinv,*test,*testOBS,*Ut,*simUt,*Uit,*aalen,*score,*dhatMit,*gammaiid,*dmgiid,*Vcovs,*addproc,*gamiid,*biid,*vscore,*weights,*dNit,*sim,*caseweight,*silent; int*covariance,*nx,*px,*ng,*pg,*antpers,*Ntimes,*mw,*Nit,*detail,*mof,*antsim,*XligZ,*nb,*id,*status,*wscore,*ratesim,*retur,*robust,*addresamp,*resample,*clusters,*antclust,*betafixed,*entry,*exactderiv,*timegroup,*maxtimepoint,*stratum; { int timing=0; double basesim=0,basestart=0; int ssilent=round(silent[0]); double propodds=silent[1]; int icaseweight=round(silent[2]); // printf("%d %d %lf \n",ssilent,icaseweight,propodds); clock_t c0,c1; c0=clock(); // mjump=sim[2]; // multiple jumps in clusters, relevant for ratesim=0 simulering via cholesky simulering basesim =sim[0]; // 1,0,-1, baseline is also simulated from time basesim=sim[0] and variance estimated (can be omitted for some for models) basestart=sim[1]; // baseline is also simulated from time basesim=sim[0] and variance estimated (can be omitted for some for models) // printf(" basesim %lf %d \n",basesim,*antsim); // basesim=0 no simulations but variance, basesim=1 (simul and variance), basesim=-1 (no simulations no variance) if (*detail==2) Rprintf("Memory allocation starting %d %d %d \n",*antpers,*antclust,*maxtimepoint); // {{{ setting up memory matrix *X,*Z,*WX,*WZ,*cdesX,*cdesX2,*cdesX3,*CtVUCt,*A,*AI; matrix *Vcov,*dYI,*Ct,*dM1M2,*M1M2t,*COV,*ZX,*ZP,*ZPX; matrix *tmp2,*tmp3,*dSprop,*dS,*S1,*SI,*S2,*M1,*VU,*ZXAI,*VUI; matrix *ZPZ,*RobVbeta,*Delta,*tmpM1,*Utt,*Delta2,*tmpM2; // matrix *St[*maxtimepoint],*M1M2[*Ntimes],*C[*maxtimepoint],*ZXAIs[*Ntimes],*dYIt[*Ntimes]; // matrix *St[*Ntimes], // matrix *M1M2[*Ntimes],*C[*Ntimes],*ZXAIs[*Ntimes],*AIs[*Ntimes]; matrix *Stg[*maxtimepoint],*Cg[*maxtimepoint]; matrix *ZPX1,*ZPZ1,*ZPXo,*ZPZo; vector *cumm,*dA,*VdA,*MdA,*delta,*zav,*lamt,*lamtt; vector *xi,*zi,*U,*beta,*xtilde,*Gbeta,*zcol,*one,*difzzav; vector *offset,*weight,*varUthat[*maxtimepoint],*Uprofile; // vector *ZXdA[*Ntimes]; vector *ta,*ahatt,*vrisk,*tmpv1,*tmpv2,*rowX,*rowZ,*difX,*VdB; vector *W2[*antclust],*W3[*antclust]; matrix *W3t[*antclust],*W4t[*antclust],*W2t[*antclust],*Uti[*antclust]; vector *Ui[*antclust]; vector *reszpbeta,*res1dim; matrix *dAt; int cin=0,ci=0,c,pers=0,i=0,j,k,l,s,s1,it,count,pmax, *imin=calloc(1,sizeof(int)), *cluster=calloc(*antpers,sizeof(int)), *strata=calloc(*antpers,sizeof(int)), *ipers=calloc(*Ntimes,sizeof(int)); double S0,RR=1,time=0,ll,lle,llo; double tau,hati,random,scale,sumscore; double *cug=calloc((*maxtimepoint)*(*px+1),sizeof(double)), *timesg=calloc((*maxtimepoint),sizeof(double)), *powi=calloc(*Ntimes,sizeof(double)) ; // *caseweight=calloc(*Ntimes,sizeof(double)); double norm_rand(); void GetRNGstate(),PutRNGstate(); int stratpers=0,antstrat=stratum[1]; double *S0strata=calloc(antstrat,sizeof(double)); matrix *ZPZs[antstrat],*ZPXs[antstrat]; // ,*As[antstrat],*ZXs[antstrat]; // for (j=0;j<*nx;j++) printf(" %d ",stratum[j+2]); if (*detail==1) Rprintf("antstrat %d \n",antstrat); for (j=0;j=0) { malloc_mat(*maxtimepoint,*px,W3t[j]); malloc_mat(*maxtimepoint,*px,W4t[j]); malloc_vec(*px,W3[j]); } malloc_mat(*maxtimepoint,*pg,W2t[j]); malloc_mat(*maxtimepoint,*pg,Uti[j]); malloc_vec(*pg,Ui[j]); } for(j=0;j<*maxtimepoint;j++) malloc_vec(*pg,varUthat[j]); } // }}} for (c=0;c<*nx;c++) cluster[id[c]]=clusters[c]; if (*antsim>0) { malloc_mat(*maxtimepoint,*pg,Delta2); malloc_mat(*maxtimepoint,*pg,tmpM2); } if (basesim>0) { malloc_mat(*maxtimepoint,*px,Delta); malloc_mat(*maxtimepoint,*px,tmpM1); } malloc_mat(*maxtimepoint,*pg,Utt); malloc_mats(*antpers,*px,&WX,&X,&cdesX,&cdesX2,&cdesX3,NULL); malloc_mats(*antpers,*pg,&WZ,&ZP,&Z,NULL); malloc_mats(*px,*px,&Vcov,&COV,&A,&AI,&M1,&CtVUCt,NULL); malloc_mats(*pg,*pg,&RobVbeta,&ZPZ,&tmp2,&dSprop,&dS,&S1,&S2,&SI,&VU,&VUI,NULL); malloc_mats(*pg,*px,&ZXAI,&ZX,&dM1M2,&M1M2t,NULL); malloc_mats(*px,*pg,&tmp3,&ZPX,&dYI,&Ct,NULL); malloc_mats(*px,*pg,&ZPX1,NULL); malloc_mats(*pg,*pg,&ZPZ1,NULL); malloc_mats(*px,*pg,&ZPXo,NULL); malloc_mats(*pg,*pg,&ZPZo,NULL); malloc_mat(*Ntimes,*px,dAt); malloc_vec(1,reszpbeta); malloc_vec(1,res1dim); malloc_vecs(*antpers,&weight,&lamtt,&lamt,&zcol,&Gbeta,&one,&offset,NULL); malloc_vecs(*px,&ahatt,&tmpv1,&difX,&VdB,&rowX,&xi,&dA,&VdA,&MdA,&cumm,NULL); malloc_vecs(*px,&xtilde,NULL); malloc_vecs(*pg,&tmpv2,&rowZ,&zi,&U,&beta,&delta,&zav,&difzzav,&Uprofile,NULL); malloc_vec(*nb,ta); malloc_vec(*antpers,vrisk); if (*detail==1) Rprintf("Memory allocation starting \n"); for(j=0;j<*maxtimepoint;j++) { malloc_mat(*px,*pg,Cg[j]); malloc_mat(*pg,*pg,Stg[j]);} matrix *Cn,*M1M2n,*ZXAIn,*AIn; if (basesim>=0) { malloc_mat((*px)*(*Ntimes),*pg,Cn); malloc_mat(*pg,(*px)*(*Ntimes),M1M2n); malloc_mat((*px)*(*Ntimes),*px,AIn); } malloc_mat(*pg,(*px)*(*Ntimes),ZXAIn); // matrix *Uiclustert[*antclust]; // matrix *Uicluster[*antclust]; //if (*ratesim==0 && mjump==1) { // for(j=0;j<*antclust;j++) { // malloc_mat((*pg)*(*maxtimepoint),*pg,Uiclustert[j]); // malloc_mat((*pg),(*pg),Uicluster[j]); //} //} vector *ranvec,*vectmp; malloc_vec(*pg,ranvec); malloc_vec((*pg)*(*maxtimepoint),vectmp); // for(j=0;j<*Ntimes;j++) { // malloc_mat(*px,*pg,C[j]); malloc_mat(*pg,*px,M1M2[j]); // malloc_mat(*pg,*px,ZXAIs[j]); malloc_vec(*px,dAt[j]); malloc_mat(*px,*pg,dYIt[j]); //// malloc_vec(*pg,ZXdA[j]); malloc_mat(*pg,*pg,St[j]); // } pmax=max(*px,*pg); ll=0; for(j=0;j<*pg;j++) VE(beta,j)=betaS[j]; for(j=0;j<*antpers;j++) {VE(weight,j)=1; VE(offset,j)=1;} // }}} if (*detail==1) Rprintf("Memory allocation done \n"); if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: setting up allocation %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); if (*detail==1) Rprintf("Iterations start \n"); cu[0]=times[0]; double pweight=1,xdA=0; for (it=0;it<*Nit || (*Nit==0 && it==0);it++) // {{{ iterations start for cox-aalen model { if (it>0) { vec_zeros(cumm); vec_zeros(U); mat_zeros(S1); mat_zeros(A); mat_zeros(ZPZ); mat_zeros(ZPX); mat_zeros(ZX); mat_zeros(X); mat_zeros(Z); mat_zeros(WX); mat_zeros(WZ); for (j=0;j=time)) ) { for(j=0;j=0) ) ci=ci-1; } // }}} vec_zeros(rowX); vec_zeros(rowZ); if (s>1) // {{{ modifying design for next time points while ((stop[ci]=0) ) { VE(Gbeta,id[ci])=0; // vec_prod(zi,beta); for(j=0;j<*px;j++) VE(xi,j)=designX[j*(*nx)+ci]; for(j=0;j<*pg;j++) { VE(zi,j)=designG[j*(*nx)+ci]; VE(Gbeta,id[ci])+=VE(zi,j)*VE(beta,j); } RR=exp(VE(Gbeta,id[ci])+offs[ci]); if (entry[ci]==1) { replace_row(X,id[ci],xi); replace_row(Z,id[ci],zi); scl_vec_mult(weights[ci]*RR,xi,tmpv1); replace_row(WX,id[ci],tmpv1); scl_vec_mult(weights[ci],zi,tmpv2); replace_row(WZ,id[ci],tmpv2); VE(weight,id[ci])=weights[ci]; if (*mof==1) VE(offset,id[ci])=offs[ci]; } else { replace_row(X,id[ci],rowX); replace_row(Z,id[ci],rowZ); replace_row(WX,id[ci],rowX); replace_row(WZ,id[ci],rowZ); VE(Gbeta,id[ci])=0; VE(weight,id[ci])=0; if (*mof==1) VE(offset,id[ci])=0; } S0+=entry[ci]*RR*weights[ci]; S0strata[stratum[ci+2]]+=entry[ci]*RR*weights[ci]; for(j=0;j1) { scl_mat_mult(1/S0strata[stratpers],ZPZs[stratpers],ZPZo); scl_mat_mult(1/S0strata[stratpers],ZPXs[stratpers],ZPXo); } // }}} if (s<0) { Rprintf("======================================================= %d \n",s); print_mat(A); print_mat(ZPX); print_mat(ZX); print_mat(A); print_mat(ZPZ); } if (stratum[0]==0) invertS(A,AI,ssilent); if (ME(AI,0,0)==0 && stratum[0]==0 && ssilent==0) { Rprintf("additive design X'X not invertible at time (number, value): %d %lf \n",s,time); print_mat(A); } if (ME(AI,0,0)==0 && stratum[0]==0 && ssilent==2) { Rprintf("additive design X'X not invertible at time (number, value) : %d %lf \n",s,time); print_mat(A); Rprintf("print only first time with non-invertible design X'X\n"); ssilent=0; } if (stratum[0]==1) { for (k=0;k<*px;k++) if (fabs(ME(A,k,k))<0.000001) ME(AI,k,k)=0; else ME(AI,k,k)=1/ME(A,k,k); } // computation of dA scale=VE(weight,pers); extract_row(X,pers,xi); scl_vec_mult(scale,xi,xi); Mv(AI,xi,dA); MxA(ZX,AI,ZXAI); // if (*detail==3) {print_vec(xi); print_mat(A); print_mat(AI); } if (propodds>0) { // intensity 1/(1+theta exp(Z^T beta) A(t-1)) xdA=vec_prod(xi,cumm); pweight=(1+propodds*exp(+VE(Gbeta,pers))*xdA); powi[s]=pweight; scl_vec_mult(pweight,dA,dA); } if (icaseweight==1) { pweight=caseweight[s]; // printf(" %lf \n",caseweight[s]); powi[s]=pweight; scl_vec_mult(pweight,dA,dA); } if (it==(*Nit-1)) { replace_row(dAt,s,dA); for (j=0;j<*pg;j++) for (i=0;i<*px;i++) ME(ZXAIn,j,(s-1)*(*px)+i)=ME(ZXAI,j,i); } if (s<0) {print_mat(A); print_mat(AI); print_mat(ZX); } /* First derivative U and Second derivative S */ extract_row(Z,pers,zi); scl_vec_mult(scale,zi,zi); Mv(ZX,dA,zav); // pweight multiplied onto dA and therefore already on zav if (propodds>0 || icaseweight==1) scl_vec_mult(pweight,zi,zi); vec_subtr(zi,zav,difzzav); // scl_vec_mult(scale,difzzav,difzzav); // if (propodds>0 || icaseweight==1) scl_vec_mult(pweight,difzzav,difzzav); vec_add(difzzav,U,U); if (it==((*Nit)-1)) if (*detail==3) {Rprintf(" time %d %lf %lf Dl contribution \n",s,scale,times[s]); print_vec(difzzav); } if (s<0) { // {{{ Rprintf(" %d %d %lf %lf \n",pers,s,time,scale); print_vec(xi); print_vec(dA); print_vec(zi); print_vec(zav); print_vec(difzzav); print_vec(U); print_mat(A); print_mat(AI); } // }}} if (*betafixed==0) // {{{ if (stratum[0]==0) if ( (((*exactderiv==1) && (it==(*Nit-1) ||(*Nit==0 && it==0)) && (*px>1))) || ((*exactderiv==2) && (*px>1)) ) { if (*detail==3) Rprintf("Computation of second derivative \n"); mat_zeros(ZPZ1); mat_zeros(ZPX1); for (i=0;i<*antpers;i++) { extract_row(WX,i,xi); // er det weight her, nej da !! VE(lamt,i)=vec_prod(xi,dA); extract_row(Z,i,zi); scl_vec_mult(VE(lamt,i),zi,rowZ); replace_row(ZP,i,rowZ); extract_row(X,i,xi); for(j=0;j0) scl_mat_mult(pweight,dS,dS); if (icaseweight==1) scl_mat_mult(pweight,dS,dS); // extra term for second derivative wrt beta if (propodds>0) { // (Z-E) Z exp(Z beta) x^T A(t-1) mat_add(dS,dSprop,dS); } if (*mw==1) {scale=VE(weight,pers); scl_mat_mult(scale,dS,dS); } mat_add(dS,S1,S1); if (it==((*Nit)-1)) if (*detail==4) { Rprintf(" time %d %d %lf D2l contribution \n",s,stratpers,times[s]); print_mat(ZPZo); print_mat(ZPXo); print_mat(ZXAI); Rprintf("============================================ \n"); print_mat(tmp2); print_mat(dS); print_mat(S1); // for (j=0;j=0) for (j=0;j<*pg;j++) for (i=0;i<*px;i++) { ME(M1M2n,j,(s-1)*(*px)+i)=ME(M1M2t,j,i); ME(Cn,(s-1)*(*px)+i,j)=ME(Ct,i,j); } for (k=1;k<=*px;k++) { cu[k*(*Ntimes)+s]=cu[k*(*Ntimes)+s-1]+VE(dA,k-1); cug[k*(*maxtimepoint)+timegroup[s]]=cu[k*(*Ntimes)+s]; vcu[k*(*Ntimes)+s]=VE(VdA,k-1)+vcu[k*(*Ntimes)+s-1]; } if (*robust==1 && basesim>=0) { for (j=0;j<*px;j++) for (i=0;i<*px;i++) ME(AIn,(s-1)*(*px)+j,i)=ME(AI,j,i); } } // }}} if (propodds>0) { // cumulative hazard (to use for prop odds model vec_add(dA,cumm,cumm); } } // }}} /* Ntimes */ if (timing==1) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: going through times %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} /* for (k=0;k<*pg;k++) ME(S1,k,k)=ME(S1,k,k)+*ridge; */ invertS(S1,SI,ssilent); if (*betafixed==0 ) { Mv(SI,U,delta); MxA(SI,VU,S2); MxA(S2,SI,VU); } if (*detail==1) { Rprintf("=============Iteration %d =============== \n",it); Rprintf("Estimate beta \n"); print_vec(beta); Rprintf("delta beta \n"); print_vec(delta); Rprintf("Score D l\n"); print_vec(U); Rprintf("Information -D^2 l\n"); print_mat(SI); }; // updates beta for all but final and fixed situation // double step=0.5; if (*betafixed==0 && (*Nit>0) && (it<*Nit-1)) { // scl_vec_mult(step,delta,delta); vec_add(beta,delta,beta); } for (k=0;k<*pg;k++) sumscore=sumscore+fabs(VE(U,k)); if ((sumscore<0.0000001) & (it<(*Nit)-2)) { it=*Nit-2; } } /* it */ // }}} if (*detail==2) Rprintf("Fitting done \n"); if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: fitting done %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); lle=0; llo=0; ci=0; for (k=0;k<*pg;k++) score[k]=VE(U,k); mat_zeros(A); mat_zeros(ZPZ); mat_zeros(ZPX); mat_zeros(ZX); mat_zeros(X); mat_zeros(Z); mat_zeros(WX); mat_zeros(WZ); vec_zeros(zav); if (*detail==2) Rprintf("robust==%d \n",*robust); if (*robust==1) // {{{ { for (s=1;s<*Ntimes;s++) // {{{ terms for robust variances { time=times[s]; cu[s]=times[s]; vcu[s]=times[s]; if (*robust==1) { Rvcu[timegroup[s]]=times[s]; cug[timegroup[s]]=times[s]; timesg[timegroup[s]]=times[s]; Ut[timegroup[s]]=times[s]; } R_CheckUserInterrupt(); sumscore=0; S0=0; for (j=0;j=time)) ) { for(j=0;j=0) ) ci=ci-1; } // }}} vec_zeros(rowX); vec_zeros(rowZ); if (s>1) // {{{ modifying design for next time points while ((stop[ci]=0) ) { VE(Gbeta,id[ci])=0; // vec_prod(zi,beta); for(j=0;j<*px;j++) VE(xi,j)=designX[j*(*nx)+ci]; for(j=0;j<*pg;j++) { VE(zi,j)=designG[j*(*nx)+ci]; VE(Gbeta,id[ci])+=VE(zi,j)*VE(beta,j); } RR=exp(VE(Gbeta,id[ci])+offs[ci]); if (entry[ci]==1) { replace_row(X,id[ci],xi); replace_row(Z,id[ci],zi); scl_vec_mult(RR*weights[ci],xi,tmpv1); // scl_vec_mult(RR,xi,tmpv1); replace_row(WX,id[ci],tmpv1); scl_vec_mult(weights[ci],zi,tmpv2); replace_row(WZ,id[ci],tmpv2); if (*mw==1) VE(weight,id[ci])=weights[ci]; if (*mof==1) VE(offset,id[ci])=offs[ci]; } else { replace_row(X,id[ci],rowX); replace_row(WX,id[ci],rowX); replace_row(Z,id[ci],rowZ); replace_row(WZ,id[ci],rowZ); VE(Gbeta,id[ci])=0; if (*mw==1) VE(weight,id[ci])=0; if (*mof==1) VE(offset,id[ci])=0; } S0+=entry[ci]*RR*weights[ci]; S0strata[stratum[ci+2]]+=entry[ci]*RR*weights[ci]; ci=ci-1; pers=id[ci]; stratpers=stratum[ci+2]; } // }}} ipers[s]=pers; // }}} // if (s<3) { head_matrix(X); head_matrix(WX); head_matrix(Z); head_matrix(WZ); } // extract_row(WX,pers,xi); extract_row(dAt,s,dA); // hati=vec_prod(xi,dA); lle=lle+log(hati); for (j=0;j<*pg;j++) for (i=0;i<*px;i++) ME(ZXAI,j,i)=ME(ZXAIn,j,(s-1)*(*px)+i); if (basesim>=0) { for (j=0;j<*px;j++) for (i=0;i<*px;i++) ME(AI,j,i)=ME(AIn,(s-1)*(*px)+j,i); } // print_mat(ZXAI); print_vec(dA); if (*ratesim==1 || *retur>=1) for (i=0;i<*antpers;i++) // {{{ { cin=cluster[i]; extract_row(WX,i,rowX); // RR*xi extract_row(Z,i,zi); extract_row(X,i,xi); hati=vec_prod(rowX,dA); if (*ratesim==1) { Mv(ZXAI,xi,tmpv2); vec_subtr(zi,tmpv2,tmpv2); if (*mw==1) { scl_vec_mult(VE(weight,i),tmpv2,tmpv2);} // weight adjustment if (i==pers) vec_add(tmpv2,W2[cin],W2[cin]); if (*ratesim==1) {scl_vec_mult(hati,tmpv2,rowZ); vec_subtr(W2[cin],rowZ,W2[cin]); } if (basesim>=0) { Mv(AI,xi,rowX); if (*mw==1) scl_vec_mult(VE(weight,i),rowX,rowX); if (i==pers) {vec_add(rowX,W3[cin],W3[cin]); } llo=llo+hati; if (*ratesim==1) {scl_vec_mult(hati,rowX,rowX); vec_subtr(W3[cin],rowX,W3[cin]);} } } if (*retur==1) dhatMit[i*(*Ntimes)+s]=1*(i==pers)-hati; if (*retur==2) dhatMit[i]=dhatMit[i]+1*(i==pers)-hati; } // }}} if (*ratesim==1) for (j=0;j<*antclust;j++) { replace_row(W2t[j],timegroup[s],W2[j]); if (basesim>=0) replace_row(W3t[j],timegroup[s],W3[j]); } if ((*ratesim==0)) // {{{ compute resampling counting process LWY style version { cin=cluster[pers]; extract_row(WX,pers,rowX); // RR*xi extract_row(Z,pers,zi); extract_row(X,pers,xi); Mv(ZXAI,xi,tmpv2); vec_subtr(zi,tmpv2,tmpv2); if (*mw==1) scl_vec_mult(VE(weight,pers),tmpv2,tmpv2); if (propodds>0 || icaseweight==1) scl_vec_mult(powi[s],tmpv2,tmpv2); vec_add(tmpv2,W2[cin],W2[cin]); // if (mjump==1) // for (j=0;j<*pg;j++) for (i=0;i<*pg;i++) // ME(Uicluster[cin],j,i)+=VE(tmpv2,j)*VE(tmpv2,i); if (basesim>=0) { Mv(AI,xi,rowX); if (*mw==1) scl_vec_mult(VE(weight,pers),rowX,rowX); if (propodds>0 || icaseweight==1) scl_vec_mult(powi[s],rowX,rowX); vec_add(rowX,W3[cin],W3[cin]); } // distrubes the increments to the end for each process with jumps for (s1=timegroup[s];s1<*maxtimepoint;s1++) // {{{ { // printf("W2t %d %d %d \n",cin,s1,*maxtimepoint); // print_mat(W2t[cin]); replace_row(W2t[cin],s1,W2[cin]); // printf("2 %d %d \n",cin,s1); if (basesim>=0) replace_row(W3t[cin],s1,W3[cin]); // if (mjump==1) { // cholesky(Uicluster[cin],tmp2); //// if (s1==timegroup[s]) { //// printf(" tmp2 %d \n",cin); //// print_vec(tmpv2); //// print_mat(Uicluster[cin]); //// print_mat(tmp2); //// MtM(tmp2,dS); //// print_mat(dS); //// } // for (j=0;j<*pg;j++) for (i=0;i<*pg;i++) ME(Uiclustert[cin],s1*(*pg)+j,i)=ME(tmp2,i,j); // } } // }}} } // }}} /* MG baseret varians beregning */ if (basesim>=0) // {{{ { for (j=0;j<*pg;j++) for (i=0;i<*px;i++) { ME(M1M2t,j,i)=ME(M1M2n,j,(s-1)*(*px)+i); ME(Ct,i,j)= ME(Cn,(s-1)*(*px)+i,j); } // printf(" s %d \n",s); // print_mat(Ct); MxA(Ct,VU,tmp3); MAt(tmp3,Ct,CtVUCt); // print_mat(CtVUCt); MxA(Ct,SI,tmp3); MxA(tmp3,M1M2t,COV); // print_mat(COV); for (k=1;k<=*px;k++) { if (*betafixed==0) vcu[k*(*Ntimes)+s]+=ME(CtVUCt,k-1,k-1)+2*ME(COV,k-1,k-1); // vcu[k*(*Ntimes)+s]+=ME(CtVUCt,k-1,k-1); } } // }}} for (k=1;k<=*pg;k++) Ut[k*(*maxtimepoint)+timegroup[s]]=ME(Utt,timegroup[s],k-1); } // }}} } // }}} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: robust variance terms 1 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} if (*detail==2) Rprintf("Robust variances 1 \n"); R_CheckUserInterrupt(); ll=lle-llo; /* likelihood beregnes */ if (*detail==2) Rprintf("loglike is %lf \n",ll); // check af score process er ok // int itest=1; // if (itest==1) // for (s=0;s<*maxtimepoint;s++) { // mat_zeros(S2); mat_zeros(dS); mat_zeros(SI); // mat_zeros(VUI); mat_zeros(ZPZ); // for (j=0;j<*antclust;j++) // { // extract_row(W2t[j],s,tmpv2); // for (k=0;k<*pg;k++) for (i=0;i<*pg;i++) ME(S2,k,i)+=VE(tmpv2,k)*VE(tmpv2,i); // if (mjump==1) { // for (k=0;k<*pg;k++) for (i=0;i<*pg;i++) ME(dS,k,i)=ME(Uiclustert[j],s*(*pg)+k,i); // mat_transp(dS,dS); // MtM(dS,VUI); // mat_add(VUI,ZPZ,ZPZ); // } //} //printf("score process variance %d \n",s); //print_mat(S2); //print_mat(ZPZ); //} if ((*robust==1)) // {{{ robust variances { for (s=1;s<*maxtimepoint;s++) { vec_zeros(VdB); mat_zeros(Vcov); for (j=0;j<*antclust;j++) // {{{ { if (s==1 && *detail==4) { Rprintf("========================= %d \n",j); print_mat(W2t[j]); print_vec(W2[j]); print_mat(Stg[s]); print_mat(S1); print_mat(SI); } Mv(SI,W2[j],tmpv2); if (basesim>=0) { Mv(Cg[s],tmpv2,rowX); extract_row(W3t[j],s,tmpv1); vec_add(tmpv1,rowX,difX); if (*betafixed==1) scl_vec_mult(1,tmpv1,difX); replace_row(W4t[j],s,difX); vec_star(difX,difX,tmpv1); vec_add(tmpv1,VdB,VdB); } if (s==1) if (*betafixed==0) { for (c=0;c<*pg;c++) gamiid[c*(*antclust)+j]=gamiid[c*(*antclust)+j]+VE(tmpv2,c); } if (*resample==1 && basesim>=0) { for (c=0;c<*px;c++) {l=j*(*px)+c; biid[l*(*maxtimepoint)+s]=biid[l*(*maxtimepoint)+s]+VE(difX,c); } } if (*covariance==1 && basesim>=0) { for (k=0;k<*px;k++) for (c=0;c<*px;c++) ME(Vcov,k,c)=ME(Vcov,k,c)+VE(difX,k)*VE(difX,c); } Mv(Stg[s],tmpv2,rowZ); extract_row(W2t[j],s,tmpv2); if (*detail==4) Rprintf("j,s is %d %d \n",j,s); if (*betafixed==0) { vec_subtr(tmpv2,rowZ,zi); replace_row(Uti[j],s,zi); // if (mjump==1 && *ratesim==0) // { // cholesky(Uicluster[j],tmp2); // mat_transp(tmp2,tmp2); // MxA(SI,tmp2,tmp2); // MxA(Stg[s],tmp2,dS); // for (c=0;c<*pg;c++) for (i=0;i<*pg;i++) ME(Uiclustert[j],s*(*pg)+c,i)-=ME(dS,c,i); // } } else replace_row(Uti[j],s,tmpv2); vec_star(zi,zi,tmpv2); vec_add(tmpv2,varUthat[s],varUthat[s]); } // }}} /* j in clusters */ if (*betafixed==0) for (i=0;i<*pg;i++) vscore[(i+1)*(*maxtimepoint)+s]=VE(varUthat[s],i); if (basesim>=0) for (k=1;k<*px+1;k++) { Rvcu[k*(*maxtimepoint)+s]=VE(VdB,k-1); if (*covariance==1) { for (j=0;j<*px;j++) { l=(k-1)*(*px)+j; Vcovs[l*(*maxtimepoint)+s]=ME(Vcov,k-1,j); } } } } /* s=1 ..maxtimepoints */ } // }}} robust variance //if (mjump==1 && *ratesim==0) //for (j=0;j<*antclust;j++) { // printf("observed score ========================== %d \n",j); // print_vec(W2[j]); // print_mat(Uicluster[j]); //} //if (mjump==2 && *ratesim==0) //for (j=0;j<*antclust;j++) { // printf("observed score ========================== %d \n",j); // print_mat(Uti[j]); // print_mat(Uiclustert[j]); //} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: variance terms 2 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} if (*detail==2) Rprintf("Robust variances 2 \n"); if ((*betafixed==0) && (*robust==1)) { for (j=0;j<*antclust;j++) { Mv(SI,W2[j],tmpv2); for (c=0;c<*pg;c++) for (k=0;k<*pg;k++) ME(RobVbeta,c,k)=ME(RobVbeta,c,k)+VE(W2[j],c)*VE(W2[j],k); for (k=0;k<*pg;k++) gammaiid[j*(*pg)+k]=VE(tmpv2,k); } MxA(RobVbeta,SI,ZPZ); MxA(SI,ZPZ,RobVbeta); } if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: variance terms 3 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} R_CheckUserInterrupt(); for(j=0;j<*pg;j++) { betaS[j]= VE(beta,j); loglike[0]=lle; loglike[1]=ll; for (k=0;k<*pg;k++){ Iinv[k*(*pg)+j]=ME(SI,j,k); Vbeta[k*(*pg)+j]=-ME(VU,j,k); RVbeta[k*(*pg)+j]=-ME(RobVbeta,j,k); } } // printf("cholesky"); cholesky(SI,VU); print_mat(SI); print_mat(VU); // MtM(VU,SI); print_mat(SI); if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: variance terms 4 %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} // check af observed score process er ok , sammenligning af variancer til // int itest1=1; // if (itest1==1 ) // for (s=0;s<*maxtimepoint;s++) { // mat_zeros(S2); mat_zeros(dS); mat_zeros(VUI); mat_zeros(ZPZ); // for (j=0;j<*antclust;j++) // { // extract_row(Uti[j],s,tmpv2); // for (k=0;k<*pg;k++) for (i=0;i<*pg;i++) ME(S2,k,i)+=VE(tmpv2,k)*VE(tmpv2,i); // if (mjump==1) { // for (k=0;k<*pg;k++) for (i=0;i<*pg;i++) ME(dS,k,i)=ME(Uiclustert[j],s*(*pg)+k,i); // print_mat(dS); //// mat_transp(dS,dS); // MtM(dS,ZPZ); // printf("obs Score %d %d \n",s,j); // print_mat(ZPZ); // mat_add(ZPZ,VUI,VUI); // } //} //printf(" %d \n",s); //print_mat(S2); //print_mat(VUI); //} // for(j=0;j<*antclust;j++) print_mat(Uti[j]); if (*detail==2) Rprintf("Ready for simulations antsim =%d\n",*antsim); if (*antsim>0) // {{{ score process simulations { // Rprintf("Simulations start N= %ld \n",(long int) *antsim); tau=times[*Ntimes-1]-times[0]; for (i=1;i<=*px;i++) VE(rowX,i-1)=cug[i*(*maxtimepoint)+(*maxtimepoint-1)]; for (s=1;s<*maxtimepoint;s++) // {{{ /* Beregning af OBS teststrrelser */ { time=timesg[s]-times[0]; // FIX if (basesim>0) // {{{ { if ((timesg[s]>basestart)) { for (i=1;i<=*px;i++) { VE(xi,i-1)=fabs(cug[i*(*maxtimepoint)+s])/sqrt(Rvcu[i*(*maxtimepoint)+s]); if (VE(xi,i-1)>testOBS[i-1]) testOBS[i-1]=VE(xi,i-1); } } scl_vec_mult(time/tau,rowX,difX); for (i=1;i<=*px;i++) VE(xi,i-1)=cug[i*(*maxtimepoint)+s]; vec_subtr(xi,difX,difX); if ((s>*wscore) && (s<*maxtimepoint-*wscore)) { for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>testOBS[l]) testOBS[l]=VE(difX,i); } } } // }}} if (*wscore>=1) { /* sup beregnes i R */ if ((s>*wscore) && (s<*maxtimepoint-*wscore)) {extract_row(Utt,s,rowZ); for (i=0;i<*pg;i++) VE(rowZ,i) = VE(rowZ,i)/sqrt(VE(varUthat[s],i)); replace_row(Utt,s,rowZ); /* scaled score process */ } else {vec_zeros(rowZ); replace_row(Utt,s,rowZ);} } for (k=1;k<=*pg;k++) Ut[k*(*maxtimepoint)+s]=ME(Utt,s,k-1); } // }}} *s=1..maxtimepoint Beregning af obs teststrrelser if (*detail==2) Rprintf("Simulations start N= %ld \n",(long int) *antsim); for (k=1;k<=*antsim;k++) // {{{ k=1,...,antsim { R_CheckUserInterrupt(); if (basesim>0) mat_zeros(Delta); mat_zeros(Delta2); vec_zeros(tmpv1); for (i=0;i<*antclust;i++) // {{{ { random=norm_rand(); if (basesim>0) { scl_mat_mult(random,W4t[i],tmpM1); mat_add(tmpM1,Delta,Delta); } // if ((mjump==0 && *ratesim==0) || (*ratesim==1)) { // random=norm_rand(); scl_mat_mult(random,Uti[i],tmpM2); mat_add(tmpM2,Delta2,Delta2); // } else { // for (c=0;c<*pg;c++) VE(ranvec,c)=norm_rand(); // Mv(Uiclustert[i],ranvec,vectmp); // for (c=0;c<*maxtimepoint;c++) // for (l=0;l<*pg;l++) ME(tmpM2,c,l)=VE(vectmp,c*(*pg)+l); // mat_add(tmpM2,Delta2,Delta2); // } } // }}} if (basesim>0) extract_row(Delta,*maxtimepoint-1,tmpv1); for (s=1;s<*maxtimepoint;s++) { time=timesg[s]-times[0]; if (basesim>0) // {{{ { scl_vec_mult(time/tau,tmpv1,xi); extract_row(Delta,s,rowX); vec_subtr(rowX,xi,difX); if (*addresamp==1) { if (k<51) for (i=0;i<*px;i++) {l=(k-1)*(*px)+i; addproc[l*(*maxtimepoint)+s]=ME(Delta,s,i);} } for (i=0;i<*px;i++) { VE(difX,i)=fabs(VE(difX,i)); l=(*px+i); if (VE(difX,i)>test[l*(*antsim)+k-1]) test[l*(*antsim)+k-1]=VE(difX,i); if ((timesg[s]>basestart)) { VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(Rvcu[(i+1)*(*maxtimepoint)+s]); if (VE(xi,i)>test[i*((*antsim))+k-1]) test[i*((*antsim))+k-1]=VE(xi,i); } } } // }}} if (*wscore>=1) {/*{{{*/ extract_row(Delta2,s,zi); if ((s>*wscore) && (s<*maxtimepoint-*wscore)) { for (i=0;i<*pg;i++) {VE(zi,i)=fabs(ME(Delta2,s,i))/sqrt(VE(varUthat[s],i)); if (VE(zi,i)>simUt[i*(*antsim)+k-1]) simUt[i*(*antsim)+k-1]=VE(zi,i); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*maxtimepoint)+s]=ME(Delta2,s,i)/sqrt(VE(varUthat[s],i));}} } } /* weighted score */ else { extract_row(Delta2,s,zi); for (i=0;i<*pg;i++) { if (fabs(VE(zi,i))>simUt[i*(*antsim)+k-1]) simUt[i*(*antsim)+k-1]=fabs(VE(zi,i)); } if (k<50) { for (i=0;i<*pg;i++) { l=(k-1)*(*pg)+i; Uit[l*(*maxtimepoint)+s]=ME(Delta2,s,i);} } } /* else wscore=0 */ /*}}}*/ } /* s=1..Ntims */ } // }}} /* k=1..antsim */ } /* sim==1 */ // }}} if (timing==2) { // {{{ c1=clock(); Rprintf ("\telapsed CPU time: before freeing %f\n", (float) (c1 - c0)/CLOCKS_PER_SEC); c0=clock(); } // }}} PutRNGstate(); /* to use R random normals */ if (*detail==2) Rprintf("Freeing "); // {{{ freeing if (antsim[0]>0) free_mats(&Delta2,&tmpM2,NULL); if (basesim>0) free_mats(&Delta,&tmpM1,NULL); if (basesim>=0) free_mats(&Cn,&M1M2n,&AIn,NULL); free_mats(&ZXAIn,NULL); free_mats(&dAt,&Utt,&WX,&X,&cdesX,&cdesX2,&cdesX3, &WZ,&ZP,&Z, &Vcov,&COV,&A,&AI,&M1,&CtVUCt, &RobVbeta,&ZPZ,&tmp2,&dSprop,&dS,&S1,&S2,&SI,&VU,&VUI, &ZXAI,&ZX,&dM1M2,&M1M2t, &tmp3,&ZPX,&dYI,&Ct, &ZPX1,&ZPZ1, &ZPXo,&ZPZo,NULL); free_vecs(&vectmp,&ranvec,&reszpbeta,&res1dim,&weight,&lamtt,&lamt,&zcol,&Gbeta,&one,&offset, &ahatt,&tmpv1,&difX,&VdB,&rowX,&xi,&dA,&VdA,&MdA,&cumm, &xtilde, &tmpv2,&rowZ,&zi,&U,&beta,&delta,&zav,&difzzav,&Uprofile, &ta,&vrisk,NULL); if (*robust==1) { for (j=0;j<*antclust;j++) { free_vec(W2[j]); if (basesim>=0) { free_mat(W3t[j]); free_mat(W4t[j]); free_vec(W3[j]); } free_mat(W2t[j]); free_mat(Uti[j]); free_vec(Ui[j]); } for (j=0;j<*maxtimepoint;j++) free_vec(varUthat[j]); } // if (*ratesim==0 && mjump==1) { // for(j=0;j<*antclust;j++) { free_mat(Uiclustert[j]); free_mat(Uicluster[j]); } // } for(j=0;j<*maxtimepoint;j++) { free_mat(Cg[j]); free_mat(Stg[j]);} free(cluster); free(ipers); free(imin); free(cug); free(timesg); free(S0strata); free(strata); free(powi); // free(caseweight); // }}} for (j=0;j #include #include "matrix.h" void comptestfunc(times,Ntimes,px,cu,vcu,vcudif,antsim,test,testOBS,Ut,simUt,W4t,weighted,antpers,gamma,line,timepow) double *times,*cu,*vcu,*vcudif,*test,*testOBS,*Ut,*simUt,*gamma,*timepow; int *px,*Ntimes,*antsim,*weighted,*antpers,*line; matrix **W4t; { matrix *Delta,*tmpM1; vector *gammavt,*tmpv1t,*tmpv1,*rowX,*xi,*difX,*ssrow,*VdB, *gammai[*antpers],*gammav; /*float gasdev(),expdev(),ran1(); */ double norm_rand(); void GetRNGstate(),PutRNGstate(); int i,k,l,s,c; double xij,vardif,tau,time,dtime,random,fabs(),sqrt(),stime,mtime;// unused var:x double *cumweight=calloc(*px,sizeof(double)); malloc_vecs(*px,&tmpv1t,&tmpv1,&rowX,&xi,&difX,&ssrow,&VdB,&gammavt,&gammav,NULL); malloc_mat(*Ntimes,*px,Delta); malloc_mat(*Ntimes,*px,tmpM1); for (i=0;i<*antpers;i++) malloc_vec(*px,gammai[i]); /* Rprintf("Simulations start N= %ld \n",*antsim); */ GetRNGstate(); /* to use R random normals */ stime=times[0]; stime=0; mtime=times[(*Ntimes-1)]-stime; tau=times[(*Ntimes-1)]-stime; Ut[0]=times[0]; if (*weighted==3) { if (*line==0) for (i=0;i<*px;i++) cumweight[i]=tau; else for (i=0;i<*px;i++) cumweight[i]=mtime*mtime*0.5; } /* computation of constant effects */ for (i=0;i<*px;i++) { if (fabs(timepow[i])<0.000001) { // timepow ca 0 for (s=0;s<*Ntimes;s++) if (vcu[i*(*Ntimes)+s]>0) { // time=times[s];dtime=times[s]-times[s-1]; if (vcu[(i+1)*(*Ntimes)+s]>0) { cumweight[i]=cumweight[i]+(1/vcu[(i+1)*(*Ntimes)+s]); gamma[i]=gamma[i]+cu[(i+1)*(*Ntimes)+s]/vcu[(i+1)*(*Ntimes)+s]; for (c=0;c<*antpers;c++) VE(gammai[c],i)= VE(gammai[c],i)+ME(W4t[c],s,i)/vcu[(i+1)*(*Ntimes)+s]; } } gamma[i]=gamma[i]/cumweight[i]; VE(gammav,i)=gamma[i]; for (c=0;c<*antpers;c++) VE(gammai[c],i)=VE(gammai[c],i)/cumweight[i]; } else { gamma[i]=cu[(i+1)*(*Ntimes)+(*Ntimes-1)]/pow(mtime,timepow[i]);; VE(gammav,i)=gamma[i]; for (c=0;c<*antpers;c++) VE(gammai[c],i)=ME(W4t[c],*Ntimes-1,i)/pow(mtime,timepow[i]);; } } /* i=1..px */ /* if (*weighted>=1) { for (s=1;s<*Ntimes;s++) { vec_zeros(VdB); for (i=0;i<*antpers;i++) { extract_row(W4t[i],s,tmpv1); vec_subtr(tmpv1,gammai[i],difX); vec_star(difX,difX,rowX); vec_add(rowX,VdB,VdB); } for (k=1;k<=*px;k++) vcudif[k*(*Ntimes)+s]=VE(VdB,k-1); } } */ /* weighted==1 */ /* Computation of observed teststatistics */ for (s=1;s<*Ntimes;s++) if (vcu[0*(*Ntimes)+s]>0) { time=times[s]-stime; dtime=times[s]-times[s-1]; for (i=1;i<=*px;i++) { xij=fabs(cu[i*(*Ntimes)+s])/sqrt(vcu[i*(*Ntimes)+s]); if (xij>testOBS[i-1]) testOBS[i-1]=xij; } for (i=1;i<=*px;i++) VE(xi,i-1)=cu[i*(*Ntimes)+s]; // if (*line==1) scl_vec_mult(time,gammav,gammavt); for (i=0;i<*px;i++) VE(gammavt,i)=VE(gammav,i)*pow(time,timepow[i]); vec_subtr(xi,gammavt,difX); vec_star(difX,difX,ssrow); Ut[s]=times[s]; for (i=0;i<*px;i++) { // if (*weighted>=2) vardif=vcudif[(i+1)*(*Ntimes)+s]; else vardif=1; // if (*weighted>=2) { // if ((s>*weighted) && (s<*Ntimes-*weighted)) // VE(difX,i)=VE(difX,i)/sqrt(vardif); else VE(difX,i)=0.0; // } else VE(difX,i)=VE(difX,i); Ut[(i+1)*(*Ntimes)+s]=VE(difX,i); c=(*px)+i; if (fabs(VE(difX,i))>testOBS[c]) testOBS[c]=fabs(VE(difX,i)); c=2*(*px)+i; if ((s>*weighted) && (s<*Ntimes-*weighted)) testOBS[c]=testOBS[c]+VE(ssrow,i)*dtime; } } /* for (i=0;i<3*(*px);i++) Rprintf(" %lf \n",testOBS[i]); */ /* simulation of testprocesses and teststatistics */ for (k=1;k<*antsim;k++) { mat_zeros(Delta); vec_zeros(tmpv1); for (i=0;i<*antpers;i++) { /* random=gasdev(&idum); */ random=norm_rand(); scl_mat_mult(random,W4t[i],tmpM1);mat_add(tmpM1,Delta,Delta); scl_vec_mult(random,gammai[i],xi); vec_add(xi,tmpv1,tmpv1); } scl_vec_mult(1,tmpv1,tmpv1t); for (s=1;s<*Ntimes;s++) if (vcu[0*(*Ntimes)+s]>0) { time=times[s]-stime; dtime=times[s]-times[s-1]; extract_row(Delta,s,rowX); // if (*line==1) scl_vec_mult(times[s],tmpv1,tmpv1t); for (i=0;i<*px;i++) VE(tmpv1t,i)=VE(tmpv1,i)*pow(time,timepow[i]);; vec_subtr(rowX,tmpv1t,difX); vec_star(difX,difX,ssrow); for (i=0;i<*px;i++) { VE(xi,i)=fabs(ME(Delta,s,i))/sqrt(vcu[(i+1)*(*Ntimes)+s]); if (VE(xi,i)>test[i*(*antsim)+k]) test[i*(*antsim)+k]=VE(xi,i); if (*weighted>=1) vardif=vcudif[(i+1)*(*Ntimes)+s]; else vardif=1; if (*weighted>=2) { if ((s>*weighted) && (s<*Ntimes-*weighted)) VE(difX,i)=VE(difX,i)/sqrt(vardif); else VE(difX,i)=0; } else VE(difX,i)=VE(difX,i); if (k<51) {l=(k-1)*(*px)+i; simUt[l*(*Ntimes)+s]=VE(difX,i);} c=(*px+i); VE(difX,i)=fabs(VE(difX,i)); if (VE(difX,i)>test[c*(*antsim)+k]) test[c*(*antsim)+k]=VE(difX,i); c=2*(*px)+i; if ((s>*weighted) && (s<*Ntimes-*weighted)) test[c*(*antsim)+k]=test[c*(*antsim)+k]+VE(ssrow,i)*dtime/vardif; } } /* s=1..Ntimes */ } /* k=1..antsim */ PutRNGstate(); /* to use R random normals */ free_mats(&Delta,&tmpM1,NULL); free_vecs(&gammavt,&tmpv1t,&VdB,&rowX,&difX,&xi,&tmpv1,&ssrow,&gammav,NULL); for (i=0;i<*antpers;i++) free_vec(gammai[i]); free(cumweight); } timereg/src/prop-odds-subdist2.c0000644000176200001440000005502113520007035016303 0ustar liggesusers#include #include #include "matrix.h" #include #include void posubdist2(times,Ntimes,designX,nx,px,antpers,start,stop,betaS,Nit,cu,vcu,Iinv, Vbeta,detail,sim,antsim,rani,Rvcu,RVbeta,test,testOBS,Ut,simUt,Uit,id,status, weighted,ratesim,score,dhatMit,dhatMitiid,retur,loglike,profile,sym, KMtimes,KMti,etime,causeS,ipers,baselinevar,clusters,antclust,ccode,biid,gamiid,wweights) double *designX,*times,*betaS,*start,*stop,*cu,*Vbeta,*RVbeta,*vcu,*Rvcu,*Iinv,*test,*testOBS,*Ut,*simUt,*Uit,*score,*dhatMit,*dhatMitiid,*loglike, *KMtimes,*KMti,*etime,*biid,*gamiid,*wweights; int *nx,*px,*antpers,*Ntimes,*Nit,*detail,*sim,*antsim,*rani,*id,*status,*weighted,*ratesim,*retur,*profile,*sym,*causeS,*ipers,*baselinevar,*clusters,*antclust,*ccode; { // {{{ setting up matrix *ldesignX,*WX,*ldesignG,*CtVUCt,*A,*AI; matrix *dYI,*Ct,*dM1M2,*M1M2t,*COV,*ZX,*ZP,*ZPX; matrix *tmp1,*tmp2,*tmp3,*dS1,*SI,*dS2,*S2,*S2pl,*dS2pl,*M1,*VU,*ZXAI,*VUI; matrix *d2S0,*RobVbeta,*tmpM1,*Utt,*dS0t,*S1start,*tmpM2,*et,*gt,*qt; matrix *St[*Ntimes],*M1M2[*Ntimes],*C[*Ntimes],*ZXAIs[*Ntimes],*dYIt[*Ntimes]; matrix *dotwitowit[*antpers], // *W3tmg[*antclust], *W3t[*antclust],*W4t[*antclust],*W2t[*antclust],*AIxit[*antpers],*Uti[*antclust],*d2G[*Ntimes],*Delta,*Delta2; vector *Ctt,*lht,*S1,*dS0,*incS0t,*S0t,*S0start,*dA,*VdA,*dN,*MdA,*delta,*zav,*dlamt,*plamt,*dG[*Ntimes], *S1star; vector *xav,*difxxav,*xi,*zi,*U,*Upl,*beta,*xtilde; vector *Gbeta,*zcol,*one,*difzzav,*difZ,*neta2[*antclust]; vector *offset,*weight,*ZXdA[*Ntimes],*varUthat[*Ntimes],*Uprofile; vector *ahatt,*risk,*tmpv1,*tmpv2,*rowX,*rowZ,*difX,*VdB,*VdBmg; vector *W2[*antclust],*W3[*antclust],*reszpbeta,*res1dim,*dAt[*Ntimes],*eta2; // vector *W2[*antclust],*W3[*antclust],*W3mg[*antclust],*reszpbeta,*res1dim,*dAt[*Ntimes],*eta2; vector *dLamt[*antpers]; int *pg=calloc(1,sizeof(int)),c,robust=1,pers=0,ci,i,j,k,l,s,it; double weights,risks,RR,S0star,time,alpha,ll; double S0,tau,random,scale,sumscore; double norm_rand(); void GetRNGstate(),PutRNGstate(); pg[0]=1; for (j=0;j<*antpers;j++) { malloc_vec(*Ntimes,dLamt[j]); malloc_mat(*Ntimes,*px,dotwitowit[j]); malloc_mat(*Ntimes,*px,AIxit[j]); } for (j=0;j<*antclust;j++) { malloc_mat(*Ntimes,*pg,W3t[j]); // malloc_mat(*Ntimes,*pg,W3tmg[j]); malloc_mat(*Ntimes,*pg,W4t[j]); malloc_mat(*Ntimes,*px,W2t[j]); malloc_mat(*Ntimes,*px,Uti[j]); malloc_vec(*px,W2[j]); malloc_vec(*pg,W3[j]); // malloc_vec(*pg,W3mg[j]); malloc_vec(*Ntimes,neta2[j]) } malloc_mat(*Ntimes,*pg,tmpM1); malloc_mat(*Ntimes,*px,dS0t); malloc_mat(*Ntimes,*px,tmpM2); malloc_mat(*Ntimes,*px,S1start); malloc_mat(*Ntimes,*px,et); malloc_mat(*Ntimes,*px,gt); malloc_mat(*Ntimes,*px,qt); malloc_mat(*Ntimes,*px,Utt); malloc_mat(*Ntimes,*pg,Delta); malloc_mat(*Ntimes,*px,Delta2); malloc_mats(*antpers,*px,&WX,&ldesignX,NULL); malloc_mats(*antpers,*pg,&ZP,&ldesignG,NULL); malloc_mats(*px,*px,&COV,&A,&AI,&M1,&CtVUCt,NULL); malloc_mats(*px,*px,&d2S0,&RobVbeta,&tmp1,&tmp2,&dS1,&S2,&dS2,&S2pl,&dS2pl,&SI,&VU,&VUI,NULL); malloc_mats(*pg,*px,&ZXAI,&ZX,&dM1M2,&M1M2t,NULL); malloc_mats(*px,*pg,&tmp3,&ZPX,&dYI,&Ct,NULL); malloc_vec(*Ntimes,S0t); malloc_vec(*Ntimes,incS0t); malloc_vec(*Ntimes,eta2); malloc_vec(*Ntimes,S0start); malloc_vec(*Ntimes,lht); malloc_vec(1,reszpbeta); malloc_vec(1,res1dim); malloc_vecs(*antpers,&risk,&weight,&plamt,&dlamt,&dN,&zcol,&Gbeta,&one,&offset,NULL); malloc_vecs(*px,&Ctt,&ahatt,&tmpv1,&difX,&rowX,&xi,&dA,&VdA,&MdA,NULL); malloc_vecs(*px,&S1,&dS0,&S1star,&xtilde,&xav,&difxxav,NULL); malloc_vecs(*px,&U,&Upl,&beta,&delta,&difzzav,&Uprofile,NULL); malloc_vecs(*pg,&tmpv2,&rowZ,&zi,&difZ,&zav,&VdB,&VdBmg,NULL); for(j=0;j<*Ntimes;j++) { malloc_mat(*px,*pg,C[j]); malloc_mat(*pg,*px,M1M2[j]); malloc_mat(*pg,*px,ZXAIs[j]); malloc_mat(*px,*pg,dYIt[j]); malloc_vec(*px,dAt[j]); malloc_vec(*pg,ZXdA[j]); malloc_mat(*px,*px,St[j]); malloc_mat(*px,*px,d2G[j]); malloc_vec(*px,dG[j]); malloc_vec(*px,varUthat[j]); } ll=0; for(j=0;j<*px;j++) VE(beta,j)=betaS[j]; // }}} int timing=0; clock_t c0,c1; c0=clock(); double dummy,plamtj,dlamtj,weightp=0; // reading design once and for all for (c=0;c<*nx;c++) for(j=0;j<*px;j++) ME(WX,id[c],j)=designX[j*(*nx)+c]; cu[0]=times[0]; for (it=0;it<*Nit;it++) { // {{{ vec_zeros(U); vec_zeros(Upl); mat_zeros(S2pl); mat_zeros(S2); mat_zeros(COV); ll=0; sumscore=0; R_CheckUserInterrupt(); Mv(WX,beta,Gbeta); for (s=1;s<*Ntimes;s++) {// {{{ time=times[s]; pers=ipers[s]; // person with type 1 jump // printf(" pers=%d weight=%lf cause=%d \n",pers,wweights[pers],status[pers]); vec_zeros(dS0); mat_zeros(d2S0); mat_zeros(dS1); vec_zeros(S1star); vec_zeros(S1); S0star=0; S0=0; // S0p=0; S0cox=0; weightp=1; for (j=0;j<*antpers;j++) { // {{{ int other=((status[j]!=*causeS) && (status[j]!=*ccode))*1; weights=1; if (etime[j]